2 SGBlog -- Git-backed CGI/inetd blogging/phlogging engine
3 Copyright (C) 2020 Sergey Matveev <stargrave@stargrave.org>
5 This program is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Affero General Public License as
7 published by the Free Software Foundation, version 3 of the License.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU Affero General Public License
15 along with this program. If not, see <http://www.gnu.org/licenses/>.
40 "github.com/hjson/hjson-go"
41 "go.stargrave.org/sgblog"
42 "golang.org/x/crypto/blake2b"
43 "golang.org/x/tools/blog/atom"
44 "gopkg.in/src-d/go-git.v4"
45 "gopkg.in/src-d/go-git.v4/plumbing"
46 "gopkg.in/src-d/go-git.v4/plumbing/object"
50 AtomPostsFeed = "feed.atom"
51 AtomCommentsFeed = "comments.atom"
55 defaultLinks = []string{}
57 renderableSchemes = map[string]struct{}{
66 type TableEntry struct {
71 func makeA(href, text string) string {
72 return `<a href="` + href + `">` + text + `</a>`
75 func etagString(etag hash.Hash) string {
76 return `"` + hex.EncodeToString(etag.Sum(nil)) + `"`
79 func urlParse(what string) *url.URL {
80 if u, err := url.ParseRequestURI(what); err == nil {
81 if _, exists := renderableSchemes[u.Scheme]; exists {
88 func lineURLize(urlPrefix, line string) string {
89 cols := strings.Split(html.EscapeString(line), " ")
90 for i, col := range cols {
91 if u := urlParse(col); u != nil {
92 cols[i] = makeA(col, col)
95 cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(
96 urlPrefix+"/$1", "$1",
99 return strings.Join(cols, " ")
102 func startHeader(etag hash.Hash, gziped bool) string {
104 "Content-Type: text/html; charset=UTF-8",
105 "ETag: " + etagString(etag),
108 lines = append(lines, "Content-Encoding: gzip")
110 lines = append(lines, "")
111 lines = append(lines, "")
112 return strings.Join(lines, "\n")
115 func startHTML(title string, additional []string) string {
116 return fmt.Sprintf(`<html>
118 <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
119 <meta name="generator" content="SGBlog %s">
125 sgblog.Version, title,
126 strings.Join(append(defaultLinks, additional...), "\n "),
130 func makeErr(err error) {
131 fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
136 func checkETag(etag hash.Hash) {
137 ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
138 if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
139 fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
144 func bytes2uuid(b []byte) string {
147 raw[6] = (raw[6] & 0x0F) | uint8(4<<4) // version 4
148 return fmt.Sprintf("%x-%x-%x-%x-%x", raw[0:4], raw[4:6], raw[6:8], raw[8:10], raw[10:])
152 cfgPath := os.Getenv("SGBLOG_CFG")
154 log.Fatalln("SGBLOG_CFG is not set")
156 cfgRaw, err := ioutil.ReadFile(cfgPath)
160 var cfgGeneral map[string]interface{}
161 if err = hjson.Unmarshal(cfgRaw, &cfgGeneral); err != nil {
164 cfgRaw, err = json.Marshal(cfgGeneral)
169 if err = json.Unmarshal(cfgRaw, &cfg); err != nil {
172 pathInfo, exists := os.LookupEnv("PATH_INFO")
176 queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
181 etagHash, err := blake2b.New256(nil)
185 etagHash.Write([]byte("SGBLOG"))
186 etagHash.Write([]byte(sgblog.Version))
187 etagHash.Write([]byte(cfg.GitPath))
188 etagHash.Write([]byte(cfg.Branch))
189 etagHash.Write([]byte(cfg.Title))
190 etagHash.Write([]byte(cfg.URLPrefix))
191 etagHash.Write([]byte(cfg.AtomBaseURL))
192 etagHash.Write([]byte(cfg.AtomId))
193 etagHash.Write([]byte(cfg.AtomAuthor))
195 etagHashForWeb := [][]byte{}
197 defaultLinks = append(defaultLinks, `<link rel="stylesheet" type="text/css" href="`+cfg.CSS+`">`)
198 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CSS))
200 if cfg.Webmaster != "" {
201 defaultLinks = append(defaultLinks, `<link rev="made" href="mailto:`+cfg.Webmaster+`">`)
202 etagHashForWeb = append(etagHashForWeb, []byte(cfg.Webmaster))
204 if cfg.AboutURL != "" {
205 etagHashForWeb = append(etagHashForWeb, []byte(cfg.AboutURL))
207 for _, gitURL := range cfg.GitURLs {
208 defaultLinks = append(defaultLinks, `<link rel="vcs-git" href="`+gitURL+`" title="Git repository">`)
209 etagHashForWeb = append(etagHashForWeb, []byte(gitURL))
211 if cfg.CommentsNotesRef != "" {
212 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsNotesRef))
214 if cfg.CommentsEmail != "" {
215 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsEmail))
218 defaultLinks = append(defaultLinks, `<link rel="top" href="`+cfg.URLPrefix+`/" title="top">`)
219 atomPostsURL := cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomPostsFeed
220 atomCommentsURL := cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomCommentsFeed
222 headHash, err := initRepo(cfg)
227 if notes, err := repo.Notes(); err == nil {
228 var notesRef *plumbing.Reference
229 var commentsRef *plumbing.Reference
230 notes.ForEach(func(ref *plumbing.Reference) error {
231 switch string(ref.Name()) {
232 case "refs/notes/commits":
234 case cfg.CommentsNotesRef:
240 if commentsCommit, err := repo.CommitObject(notesRef.Hash()); err == nil {
241 notesTree, _ = commentsCommit.Tree()
244 if commentsRef != nil {
245 if commentsCommit, err := repo.CommitObject(commentsRef.Hash()); err == nil {
246 commentsTree, _ = commentsCommit.Tree()
251 var outBuf bytes.Buffer
254 var gzipWriter *gzip.Writer
255 acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
256 for _, encoding := range strings.Split(acceptEncoding, ", ") {
257 if encoding == "gzip" {
258 gzipWriter = gzip.NewWriter(&outBuf)
265 if offsetRaw, exists := queryValues["offset"]; exists {
266 offset, err = strconv.Atoi(offsetRaw[0])
271 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
276 for i := 0; i < offset; i++ {
277 if _, err = repoLog.Next(); err != nil {
283 entries := make([]TableEntry, 0, PageEntries)
285 for _, data := range etagHashForWeb {
288 etagHash.Write([]byte("INDEX"))
289 for i := 0; i < PageEntries; i++ {
290 commit, err := repoLog.Next()
295 etagHash.Write(commit.Hash[:])
296 commentsRaw := getNote(commentsTree, commit.Hash)
297 etagHash.Write(commentsRaw)
298 entries = append(entries, TableEntry{commit, commentsRaw})
302 var table bytes.Buffer
304 "<table border=1>\n" +
308 `<th size="5%"><a title="Lines">L</a></th>` +
309 `<th size="5%"><a title="Comments">C</a></th>` +
310 "<th>Linked to</th></tr>\n")
312 var monthPrev time.Month
314 for _, entry := range entries {
315 yearCur, monthCur, dayCur := entry.commit.Author.When.Date()
316 if dayCur != dayPrev || monthCur != monthPrev || yearCur != yearPrev {
317 table.WriteString(fmt.Sprintf(
318 "<tr><td colspan=6><center><tt>%04d-%02d-%02d</tt></center></td></tr>\n",
319 yearCur, monthCur, dayCur,
321 yearPrev, monthPrev, dayPrev = yearCur, monthCur, dayCur
324 lines := msgSplit(entry.commit.Message)
325 domains := []string{}
326 for _, line := range lines[2:] {
327 if u := urlParse(line); u == nil {
330 domains = append(domains, makeA(line, u.Host))
333 var commentsValue string
334 if l := len(parseComments(entry.commentsRaw)); l > 0 {
335 commentsValue = strconv.Itoa(l)
337 commentsValue = " "
339 table.WriteString(fmt.Sprintf(
340 "<tr><td>%d</td><td><tt>%02d:%02d</tt></td>"+
342 "<td>%d</td><td>%s</td>"+
343 "<td>%s</td></tr>\n",
345 entry.commit.Author.When.Hour(),
346 entry.commit.Author.When.Minute(),
347 makeA(cfg.URLPrefix+"/"+entry.commit.Hash.String(), lines[0]),
350 strings.Join(domains, " "),
353 table.WriteString("</table>")
356 links := []string{`<link rel="alternate" title="Posts feed" href="` + atomPostsURL + `" type="application/atom+xml">`}
357 var refs bytes.Buffer
358 if commentsTree != nil {
359 links = append(links, `<link rel="alternate" title="Comments feed" href="`+atomCommentsURL+`" type="application/atom+xml">`)
362 if offsetPrev := offset - PageEntries; offsetPrev > 0 {
363 href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offsetPrev)
365 href = cfg.URLPrefix + "/"
367 links = append(links, `<link rel="prev" href="`+href+`" title="prev">`)
368 refs.WriteString("\n" + makeA(href, "[prev]"))
371 href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offset+PageEntries)
372 links = append(links, `<link rel="next" href="`+href+`" title="next">`)
373 refs.WriteString("\n" + makeA(href, "[next]"))
376 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
377 out.Write([]byte(startHTML(
378 fmt.Sprintf("%s (%d-%d)", cfg.Title, offset, offset+PageEntries),
381 if cfg.AboutURL != "" {
382 out.Write([]byte(fmt.Sprintf("[%s]", makeA(cfg.AboutURL, "about"))))
384 out.Write(refs.Bytes())
385 out.Write(table.Bytes())
386 out.Write(refs.Bytes())
387 out.Write([]byte("\n"))
388 } else if pathInfo == "/"+AtomPostsFeed {
389 commit, err := repo.CommitObject(*headHash)
393 etagHash.Write([]byte("ATOM POSTS"))
394 etagHash.Write(commit.Hash[:])
399 Updated: atom.Time(commit.Author.When),
400 Link: []atom.Link{{Rel: "self", Href: atomPostsURL}},
401 Author: &atom.Person{Name: cfg.AtomAuthor},
403 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
407 for i := 0; i < PageEntries; i++ {
408 commit, err = repoLog.Next()
412 lines := msgSplit(commit.Message)
413 feed.Entry = append(feed.Entry, &atom.Entry{
415 ID: "urn:uuid:" + bytes2uuid(commit.Hash[:]),
418 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
420 Published: atom.Time(commit.Author.When),
421 Updated: atom.Time(commit.Author.When),
422 Summary: &atom.Text{Type: "text", Body: lines[0]},
425 Body: strings.Join(lines[2:], "\n"),
429 data, err := xml.MarshalIndent(&feed, "", " ")
435 } else if pathInfo == "/"+AtomCommentsFeed {
436 commit, err := repo.CommitObject(commentsRef.Hash())
440 etagHash.Write([]byte("ATOM COMMENTS"))
441 etagHash.Write(commit.Hash[:])
443 idHasher, err := blake2b.New256(nil)
447 idHasher.Write([]byte("ATOM COMMENTS"))
448 idHasher.Write([]byte(cfg.AtomId))
450 Title: cfg.Title + " comments",
451 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
452 Updated: atom.Time(commit.Author.When),
453 Link: []atom.Link{{Rel: "self", Href: atomCommentsURL}},
454 Author: &atom.Person{Name: cfg.AtomAuthor},
456 repoLog, err := repo.Log(&git.LogOptions{From: commentsRef.Hash()})
460 for i := 0; i < PageEntries; i++ {
461 commit, err = repoLog.Next()
465 fileStats, err := commit.Stats()
469 t, err := commit.Tree()
473 commentedHash := plumbing.NewHash(strings.ReplaceAll(
474 fileStats[0].Name, "/", "",
476 comments := parseComments(getNote(t, commentedHash))
477 commit, err = repo.CommitObject(commentedHash)
481 commentN := strconv.Itoa(len(comments) - 1)
482 lines := strings.Split(comments[len(comments)-1], "\n")
483 from := strings.TrimPrefix(lines[0], "From: ")
484 date := strings.TrimPrefix(lines[1], "Date: ")
486 idHasher.Write([]byte("COMMENT"))
487 idHasher.Write(commit.Hash[:])
488 idHasher.Write([]byte(commentN))
489 feed.Entry = append(feed.Entry, &atom.Entry{
490 Title: strings.Join([]string{
491 "Comment ", commentN,
492 " for \"", msgSplit(commit.Message)[0],
495 Author: &atom.Person{Name: from},
496 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
499 Href: strings.Join([]string{
500 cfg.AtomBaseURL, cfg.URLPrefix, "/",
501 commit.Hash.String(), "#comment", commentN,
504 Published: atom.TimeStr(date),
505 Updated: atom.TimeStr(date),
508 Body: strings.Join(lines[2:], "\n"),
512 data, err := xml.MarshalIndent(&feed, "", " ")
518 } else if sha1DigestRe.MatchString(pathInfo[1:]) {
519 commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1 : 1+sha1.Size*2]))
523 for _, data := range etagHashForWeb {
526 etagHash.Write([]byte("ENTRY"))
527 etagHash.Write(commit.Hash[:])
528 atomCommentsURL = strings.Join([]string{
529 cfg.AtomBaseURL, cfg.URLPrefix, "/",
530 commit.Hash.String(), "/", AtomCommentsFeed,
532 commentsRaw := getNote(commentsTree, commit.Hash)
533 etagHash.Write(commentsRaw)
534 if strings.HasSuffix(pathInfo, AtomCommentsFeed) {
535 etagHash.Write([]byte("ATOM COMMENTS"))
537 type Comment struct {
543 commentsRaw := parseComments(commentsRaw)
545 if len(commentsRaw) > PageEntries {
546 toSkip = len(commentsRaw) - PageEntries
548 comments := make([]Comment, 0, len(commentsRaw)-toSkip)
549 for i := len(commentsRaw) - 1; i >= toSkip; i-- {
550 lines := strings.Split(commentsRaw[i], "\n")
551 from := strings.TrimPrefix(lines[0], "From: ")
552 date := strings.TrimPrefix(lines[1], "Date: ")
553 comments = append(comments, Comment{
556 date: strings.Replace(date, " ", "T", 1),
560 idHasher, err := blake2b.New256(nil)
564 idHasher.Write([]byte("ATOM COMMENTS"))
565 idHasher.Write(commit.Hash[:])
567 Title: fmt.Sprintf("\"%s\" comments", msgSplit(commit.Message)[0]),
568 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
569 Link: []atom.Link{{Rel: "self", Href: atomCommentsURL}},
570 Author: &atom.Person{Name: cfg.AtomAuthor},
572 if len(comments) > 0 {
573 feed.Updated = atom.TimeStr(comments[0].date)
575 feed.Updated = atom.Time(commit.Author.When)
577 for _, comment := range comments {
579 idHasher.Write([]byte("COMMENT"))
580 idHasher.Write(commit.Hash[:])
581 idHasher.Write([]byte(comment.n))
582 feed.Entry = append(feed.Entry, &atom.Entry{
583 Title: strings.Join([]string{
584 "Comment", comment.n,
587 Author: &atom.Person{Name: comment.from},
588 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
591 Href: strings.Join([]string{
594 commit.Hash.String(),
595 "#comment", comment.n,
598 Published: atom.TimeStr(comment.date),
599 Updated: atom.TimeStr(comment.date),
602 Body: strings.Join(comment.body, "\n"),
606 data, err := xml.MarshalIndent(&feed, "", " ")
613 notesRaw := getNote(notesTree, commit.Hash)
614 etagHash.Write(notesRaw)
616 lines := msgSplit(commit.Message)
618 when := commit.Author.When.Format(sgblog.WhenFmt)
619 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
620 links := []string{`<link rel="alternate" title="Comments feed" href="` + atomCommentsURL + `" type="application/atom+xml">`}
622 if len(commit.ParentHashes) > 0 {
623 parent = commit.ParentHashes[0].String()
624 links = append(links, `<link rel="prev" href="`+cfg.URLPrefix+"/"+parent+`" title="prev">`)
626 out.Write([]byte(startHTML(fmt.Sprintf("%s (%s)", title, when), links)))
627 if cfg.AboutURL != "" {
628 out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.AboutURL, "about"))))
630 out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.URLPrefix+"/", "index"))))
632 out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.URLPrefix+"/"+parent, "prev"))))
634 out.Write([]byte(fmt.Sprintf(
635 "[<tt><a title=\"When\">%s</a></tt>]\n"+
636 "[<tt><a title=\"What\">%s</a></tt>]\n"+
637 "<hr/>\n<h2>%s</h2>\n<pre>\n",
638 when, commit.Hash.String(), title,
640 for _, line := range lines[2:] {
641 out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
643 out.Write([]byte("</pre>\n<hr/>\n"))
644 if len(notesRaw) > 0 {
645 out.Write([]byte("Note:<pre>\n" + string(notesRaw) + "\n</pre>\n<hr/>\n"))
647 if cfg.CommentsEmail != "" {
648 out.Write([]byte("[" + makeA(
649 "mailto:"+cfg.CommentsEmail+"?subject="+commit.Hash.String(),
653 out.Write([]byte("<dl>\n"))
654 for i, comment := range parseComments(commentsRaw) {
655 out.Write([]byte(fmt.Sprintf(
656 "<dt><a name=\"comment%d\"><a href=\"#comment%d\">comment %d</a>:"+
657 "</dt>\n<dd><pre>\n",
660 lines = strings.Split(comment, "\n")
661 for _, line := range lines[:3] {
662 out.Write([]byte(line + "\n"))
664 for _, line := range lines[3:] {
665 out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
667 out.Write([]byte("</pre></dd>\n"))
669 out.Write([]byte("</dl>\n"))
671 makeErr(errors.New("unknown URL action"))
673 out.Write([]byte("</body></html>\n"))
674 if gzipWriter != nil {
677 os.Stdout.Write(outBuf.Bytes())
681 os.Stdout.WriteString("Content-Type: text/xml; charset=UTF-8\n")
682 os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
683 if gzipWriter != nil {
684 os.Stdout.WriteString("Content-Encoding: gzip\n")
687 os.Stdout.WriteString("\n")
688 os.Stdout.Write(outBuf.Bytes())