2 SGBlog -- Git-based CGI blogging engine
3 Copyright (C) 2020 Sergey Matveev <stargrave@stargrave.org>
5 This program is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Affero General Public License as
7 published by the Free Software Foundation, version 3 of the License.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU Affero General Public License
15 along with this program. If not, see <http://www.gnu.org/licenses/>.
18 // Git-based CGI blogging engine
40 "github.com/hjson/hjson-go"
41 "go.cypherpunks.ru/netstring/v2"
42 "golang.org/x/crypto/blake2b"
43 "golang.org/x/tools/blog/atom"
44 "gopkg.in/src-d/go-git.v4"
45 "gopkg.in/src-d/go-git.v4/plumbing"
46 "gopkg.in/src-d/go-git.v4/plumbing/object"
51 WhenFmt = "2006-01-02 15:04:05Z07:00"
52 AtomFeed = "feed.atom"
57 sha1DigestRe = regexp.MustCompilePOSIX("([0-9a-f]{40,40})")
58 defaultLinks = []string{}
60 commentsTree *object.Tree
62 renderableSchemes = map[string]struct{}{
70 type TableEntry struct {
91 CommentsNotesRef string
95 func makeA(href, text string) string {
96 return `<a href="` + href + `">` + text + `</a>`
99 func etagString(etag hash.Hash) string {
100 return `"` + hex.EncodeToString(etag.Sum(nil)) + `"`
103 func urlParse(what string) *url.URL {
104 if u, err := url.ParseRequestURI(what); err == nil {
105 if _, exists := renderableSchemes[u.Scheme]; exists {
112 func msgSplit(msg string) []string {
113 lines := strings.Split(msg, "\n")
114 lines = lines[:len(lines)-1]
116 lines = []string{lines[0], "", ""}
121 func getCommentsRaw(what plumbing.Hash) []byte {
122 if commentsTree == nil {
125 entry, err := commentsTree.FindEntry(what.String())
129 blob, err := repo.BlobObject(entry.Hash)
133 r, err := blob.Reader()
137 data, err := ioutil.ReadAll(r)
141 return bytes.TrimSuffix(data, []byte{'\n'})
144 func parseComments(data []byte) []string {
145 comments := []string{}
146 nsr := netstring.NewReader(bytes.NewReader(data))
148 if _, err := nsr.Next(); err != nil {
151 if comment, err := ioutil.ReadAll(nsr); err == nil {
152 comments = append(comments, string(comment))
158 func startHeader(etag hash.Hash, gziped bool) string {
160 "Content-Type: text/html; charset=UTF-8",
161 "ETag: " + etagString(etag),
164 lines = append(lines, "Content-Encoding: gzip")
166 lines = append(lines, "")
167 lines = append(lines, "")
168 return strings.Join(lines, "\n")
171 func startHTML(title string, additional []string) string {
172 return fmt.Sprintf(`<html>
174 <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
175 <meta name="generator" content="SGBlog %s">
182 strings.Join(append(defaultLinks, additional...), "\n "),
186 func makeErr(err error) {
187 fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
192 func checkETag(etag hash.Hash) {
193 ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
194 if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
195 fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
201 cfgPath := os.Getenv("SGBLOG_CFG")
203 log.Fatalln("SGBLOG_CFG is not set")
205 pathInfo, exists := os.LookupEnv("PATH_INFO")
209 queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
214 cfgRaw, err := ioutil.ReadFile(cfgPath)
218 var cfgGeneral map[string]interface{}
219 if err = hjson.Unmarshal(cfgRaw, &cfgGeneral); err != nil {
222 cfgRaw, err = json.Marshal(cfgGeneral)
227 if err = json.Unmarshal(cfgRaw, &cfg); err != nil {
231 etagHash, err := blake2b.New256(nil)
235 etagHash.Write([]byte("SGBLOG"))
236 etagHash.Write([]byte(cfg.GitPath))
237 etagHash.Write([]byte(cfg.Branch))
238 etagHash.Write([]byte(cfg.Title))
239 etagHash.Write([]byte(cfg.BaseURL))
240 etagHash.Write([]byte(cfg.URLPrefix))
241 etagHash.Write([]byte(cfg.AtomId))
242 etagHash.Write([]byte(cfg.AtomAuthor))
244 etagHashForWeb := [][]byte{}
246 defaultLinks = append(defaultLinks, `<link rel="stylesheet" type="text/css" href="`+cfg.CSS+`">`)
247 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CSS))
249 if cfg.Webmaster != "" {
250 defaultLinks = append(defaultLinks, `<link rev="made" href="mailto:`+cfg.Webmaster+`">`)
251 etagHashForWeb = append(etagHashForWeb, []byte(cfg.Webmaster))
253 if cfg.AboutURL != "" {
254 etagHashForWeb = append(etagHashForWeb, []byte(cfg.AboutURL))
256 for _, gitURL := range cfg.GitURLs {
257 defaultLinks = append(defaultLinks, `<link rel="vcs-git" href="`+gitURL+`" title="Git repository">`)
258 etagHashForWeb = append(etagHashForWeb, []byte(gitURL))
260 if cfg.CommentsNotesRef != "" {
261 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsNotesRef))
263 if cfg.CommentsEmail != "" {
264 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsEmail))
267 defaultLinks = append(defaultLinks, `<link rel="top" href="`+cfg.URLPrefix+`/" title="top">`)
268 atomURL := cfg.BaseURL + cfg.URLPrefix + "/" + AtomFeed
269 defaultLinks = append(defaultLinks, `<link rel="alternate" title="Atom feed" href="`+atomURL+`" type="application/atom+xml">`)
271 repo, err = git.PlainOpen(cfg.GitPath)
275 head, err := repo.Reference(plumbing.ReferenceName(cfg.Branch), false)
280 if cfg.CommentsNotesRef != "" {
281 if notes, err := repo.Notes(); err == nil {
282 var comments *plumbing.Reference
283 notes.ForEach(func(ref *plumbing.Reference) error {
284 if string(ref.Name()) == cfg.CommentsNotesRef {
290 if commentsCommit, err := repo.CommitObject(comments.Hash()); err == nil {
291 commentsTree, _ = commentsCommit.Tree()
297 var outBuf bytes.Buffer
300 var gzipWriter *gzip.Writer
301 acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
302 for _, encoding := range strings.Split(acceptEncoding, ", ") {
303 if encoding == "gzip" {
304 gzipWriter = gzip.NewWriter(&outBuf)
311 if offsetRaw, exists := queryValues["offset"]; exists {
312 offset, err = strconv.Atoi(offsetRaw[0])
317 log, err := repo.Log(&git.LogOptions{From: head.Hash()})
322 for i := 0; i < offset; i++ {
323 if _, err = log.Next(); err != nil {
329 entries := make([]TableEntry, 0, PageEntries)
331 for _, data := range etagHashForWeb {
334 etagHash.Write([]byte("INDEX"))
335 for i := 0; i < PageEntries; i++ {
336 commit, err := log.Next()
341 etagHash.Write(commit.Hash[:])
342 commentsRaw := getCommentsRaw(commit.Hash)
343 etagHash.Write(commentsRaw)
344 entries = append(entries, TableEntry{commit, commentsRaw})
348 var table bytes.Buffer
350 "<table border=1>\n<tr>" +
354 "<th size=\"5%\">L</th>" +
355 "<th size=\"5%\">C</th>" +
356 "<th>Linked to</th></tr>\n")
357 for _, entry := range entries {
359 lines := msgSplit(entry.commit.Message)
360 domains := []string{}
361 for _, line := range lines[2:] {
362 if u := urlParse(line); u == nil {
365 domains = append(domains, makeA(line, u.Host))
368 var commentsValue string
369 if l := len(parseComments(entry.commentsRaw)); l > 0 {
370 commentsValue = strconv.Itoa(l)
372 commentsValue = " "
374 table.WriteString(fmt.Sprintf(
375 "<tr><td>%d</td><td><tt>%s</tt></td>"+
377 "<td>%d</td><td>%s</td>"+
378 "<td>%s</td></tr>\n",
379 commentN, entry.commit.Author.When.Format(WhenFmt),
380 makeA(cfg.URLPrefix+"/"+entry.commit.Hash.String(), lines[0]),
383 strings.Join(domains, " "),
386 table.WriteString("</table>")
390 var refs bytes.Buffer
392 if offsetPrev := offset - PageEntries; offsetPrev > 0 {
393 href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offsetPrev)
395 href = cfg.URLPrefix + "/"
397 links = append(links, `<link rel="prev" href="`+href+`" title="newer">`)
398 refs.WriteString(makeA(href, " [prev]"))
401 href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offset+PageEntries)
402 links = append(links, `<link rel="next" href="`+href+`" title="older">`)
403 refs.WriteString(makeA(href, " [next]"))
406 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
407 out.Write([]byte(startHTML(
408 fmt.Sprintf("%s (%d-%d)", cfg.Title, offset, offset+PageEntries),
411 if cfg.AboutURL != "" {
412 out.Write([]byte(fmt.Sprintf("[%s]", makeA(cfg.AboutURL, "about"))))
414 out.Write(refs.Bytes())
415 out.Write(table.Bytes())
416 out.Write(refs.Bytes())
417 out.Write([]byte("\n"))
418 } else if pathInfo == "/"+AtomFeed {
419 commit, err := repo.CommitObject(head.Hash())
423 etagHash.Write([]byte("ATOM"))
424 etagHash.Write(commit.Hash[:])
429 Updated: atom.Time(commit.Author.When),
434 Author: &atom.Person{Name: cfg.AtomAuthor},
436 log, err := repo.Log(&git.LogOptions{From: head.Hash()})
440 for i := 0; i < PageEntries; i++ {
441 commit, err = log.Next()
446 feedIdRaw := new([16]byte)
447 copy(feedIdRaw[:], commit.Hash[:])
448 feedIdRaw[6] = (feedIdRaw[6] & 0x0F) | uint8(4<<4) // version 4
449 feedId := fmt.Sprintf(
458 lines := msgSplit(commit.Message)
459 feed.Entry = append(feed.Entry, &atom.Entry{
461 ID: "urn:uuid:" + feedId,
464 Href: cfg.BaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
466 Published: atom.Time(commit.Author.When),
467 Updated: atom.Time(commit.Author.When),
474 Body: strings.Join(lines[2:], "\n"),
478 data, err := xml.MarshalIndent(&feed, "", " ")
483 os.Stdout.WriteString("Content-Type: text/xml; charset=UTF-8\n")
484 os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
485 if gzipWriter != nil {
486 os.Stdout.WriteString("Content-Encoding: gzip\n")
489 os.Stdout.WriteString("\n")
490 os.Stdout.Write(outBuf.Bytes())
492 } else if sha1DigestRe.MatchString(pathInfo[1:]) {
493 commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1:]))
497 for _, data := range etagHashForWeb {
500 etagHash.Write([]byte("ENTRY"))
501 etagHash.Write(commit.Hash[:])
502 commentsRaw := getCommentsRaw(commit.Hash)
503 etagHash.Write(commentsRaw)
505 lines := msgSplit(commit.Message)
507 when := commit.Author.When.Format(WhenFmt)
508 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
511 if len(commit.ParentHashes) > 0 {
512 parent = commit.ParentHashes[0].String()
513 links = append(links, `<link rel="prev" href="`+cfg.URLPrefix+"/"+parent+`" title="older">`)
515 out.Write([]byte(startHTML(fmt.Sprintf("%s (%s)", title, when), links)))
516 if cfg.AboutURL != "" {
517 out.Write([]byte(fmt.Sprintf("[%s] ", makeA(cfg.AboutURL, "about"))))
520 out.Write([]byte(fmt.Sprintf(
522 makeA(cfg.URLPrefix+"/"+parent, "older"),
525 out.Write([]byte(fmt.Sprintf(
526 "[<tt>%s</tt>] [<tt>%s</tt>]<hr/>\n<h2>%s</h2>\n<pre>\n",
527 when, commit.Hash.String(), title,
529 for _, line := range lines[2:] {
530 line = html.EscapeString(line)
531 cols := strings.Split(line, " ")
532 for i, col := range cols {
533 if u := urlParse(col); u != nil {
534 cols[i] = makeA(col, col)
537 cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(
538 cfg.URLPrefix+"/$1", "$1",
541 line = strings.Join(cols, " ")
542 out.Write([]byte(line + "\n"))
544 out.Write([]byte("</pre>\n<hr/>\n"))
545 if cfg.CommentsEmail != "" {
546 out.Write([]byte("[" + makeA(
547 "mailto:"+cfg.CommentsEmail+"?subject="+commit.Hash.String(),
551 out.Write([]byte("<dl>\n"))
552 for i, comment := range parseComments(commentsRaw) {
553 out.Write([]byte(fmt.Sprintf(
554 "<dt><a name=\"comment%d\"><a href=\"#comment%d\">comment %d</a>:"+
555 "</dt>\n<dd><pre>\n%s\n</pre></dd>\n",
556 i, i, i, html.EscapeString(comment),
559 out.Write([]byte("</dl>\n"))
561 makeErr(errors.New("unknown URL action"))
563 out.Write([]byte("</body></html>\n"))
564 if gzipWriter != nil {
567 os.Stdout.Write(outBuf.Bytes())