2 SGBlog -- Git-based CGI blogging engine
3 Copyright (C) 2020 Sergey Matveev <stargrave@stargrave.org>
5 This program is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Affero General Public License as
7 published by the Free Software Foundation, version 3 of the License.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU Affero General Public License
15 along with this program. If not, see <http://www.gnu.org/licenses/>.
18 // Git-based CGI blogging engine
39 "github.com/hjson/hjson-go"
40 "go.stargrave.org/sgblog"
41 "golang.org/x/crypto/blake2b"
42 "golang.org/x/tools/blog/atom"
43 "gopkg.in/src-d/go-git.v4"
44 "gopkg.in/src-d/go-git.v4/plumbing"
45 "gopkg.in/src-d/go-git.v4/plumbing/object"
49 AtomFeed = "feed.atom"
53 defaultLinks = []string{}
55 renderableSchemes = map[string]struct{}{
64 type TableEntry struct {
69 func makeA(href, text string) string {
70 return `<a href="` + href + `">` + text + `</a>`
73 func etagString(etag hash.Hash) string {
74 return `"` + hex.EncodeToString(etag.Sum(nil)) + `"`
77 func urlParse(what string) *url.URL {
78 if u, err := url.ParseRequestURI(what); err == nil {
79 if _, exists := renderableSchemes[u.Scheme]; exists {
86 func lineURLize(urlPrefix, line string) string {
87 cols := strings.Split(html.EscapeString(line), " ")
88 for i, col := range cols {
89 if u := urlParse(col); u != nil {
90 cols[i] = makeA(col, col)
93 cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(
94 urlPrefix+"/$1", "$1",
97 return strings.Join(cols, " ")
100 func startHeader(etag hash.Hash, gziped bool) string {
102 "Content-Type: text/html; charset=UTF-8",
103 "ETag: " + etagString(etag),
106 lines = append(lines, "Content-Encoding: gzip")
108 lines = append(lines, "")
109 lines = append(lines, "")
110 return strings.Join(lines, "\n")
113 func startHTML(title string, additional []string) string {
114 return fmt.Sprintf(`<html>
116 <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
117 <meta name="generator" content="SGBlog %s">
123 sgblog.Version, title,
124 strings.Join(append(defaultLinks, additional...), "\n "),
128 func makeErr(err error) {
129 fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
134 func checkETag(etag hash.Hash) {
135 ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
136 if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
137 fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
143 cfgPath := os.Getenv("SGBLOG_CFG")
145 log.Fatalln("SGBLOG_CFG is not set")
147 cfgRaw, err := ioutil.ReadFile(cfgPath)
151 var cfgGeneral map[string]interface{}
152 if err = hjson.Unmarshal(cfgRaw, &cfgGeneral); err != nil {
155 cfgRaw, err = json.Marshal(cfgGeneral)
160 if err = json.Unmarshal(cfgRaw, &cfg); err != nil {
163 pathInfo, exists := os.LookupEnv("PATH_INFO")
167 queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
172 etagHash, err := blake2b.New256(nil)
176 etagHash.Write([]byte("SGBLOG"))
177 etagHash.Write([]byte(sgblog.Version))
178 etagHash.Write([]byte(cfg.GitPath))
179 etagHash.Write([]byte(cfg.Branch))
180 etagHash.Write([]byte(cfg.Title))
181 etagHash.Write([]byte(cfg.URLPrefix))
182 etagHash.Write([]byte(cfg.AtomBaseURL))
183 etagHash.Write([]byte(cfg.AtomId))
184 etagHash.Write([]byte(cfg.AtomAuthor))
186 etagHashForWeb := [][]byte{}
188 defaultLinks = append(defaultLinks, `<link rel="stylesheet" type="text/css" href="`+cfg.CSS+`">`)
189 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CSS))
191 if cfg.Webmaster != "" {
192 defaultLinks = append(defaultLinks, `<link rev="made" href="mailto:`+cfg.Webmaster+`">`)
193 etagHashForWeb = append(etagHashForWeb, []byte(cfg.Webmaster))
195 if cfg.AboutURL != "" {
196 etagHashForWeb = append(etagHashForWeb, []byte(cfg.AboutURL))
198 for _, gitURL := range cfg.GitURLs {
199 defaultLinks = append(defaultLinks, `<link rel="vcs-git" href="`+gitURL+`" title="Git repository">`)
200 etagHashForWeb = append(etagHashForWeb, []byte(gitURL))
202 if cfg.CommentsNotesRef != "" {
203 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsNotesRef))
205 if cfg.CommentsEmail != "" {
206 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsEmail))
209 defaultLinks = append(defaultLinks, `<link rel="top" href="`+cfg.URLPrefix+`/" title="top">`)
210 atomURL := cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomFeed
211 defaultLinks = append(defaultLinks, `<link rel="alternate" title="Atom feed" href="`+atomURL+`" type="application/atom+xml">`)
213 headHash, err := initRepo(cfg)
218 if notes, err := repo.Notes(); err == nil {
219 var notesRef *plumbing.Reference
220 var commentsRef *plumbing.Reference
221 notes.ForEach(func(ref *plumbing.Reference) error {
222 switch string(ref.Name()) {
223 case "refs/notes/commits":
225 case cfg.CommentsNotesRef:
231 if commentsCommit, err := repo.CommitObject(notesRef.Hash()); err == nil {
232 notesTree, _ = commentsCommit.Tree()
235 if commentsRef != nil {
236 if commentsCommit, err := repo.CommitObject(commentsRef.Hash()); err == nil {
237 commentsTree, _ = commentsCommit.Tree()
242 var outBuf bytes.Buffer
245 var gzipWriter *gzip.Writer
246 acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
247 for _, encoding := range strings.Split(acceptEncoding, ", ") {
248 if encoding == "gzip" {
249 gzipWriter = gzip.NewWriter(&outBuf)
256 if offsetRaw, exists := queryValues["offset"]; exists {
257 offset, err = strconv.Atoi(offsetRaw[0])
262 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
267 for i := 0; i < offset; i++ {
268 if _, err = repoLog.Next(); err != nil {
274 entries := make([]TableEntry, 0, PageEntries)
276 for _, data := range etagHashForWeb {
279 etagHash.Write([]byte("INDEX"))
280 for i := 0; i < PageEntries; i++ {
281 commit, err := repoLog.Next()
286 etagHash.Write(commit.Hash[:])
287 commentsRaw := getNote(commentsTree, commit.Hash)
288 etagHash.Write(commentsRaw)
289 entries = append(entries, TableEntry{commit, commentsRaw})
293 var table bytes.Buffer
295 "<table border=1>\n" +
296 "<caption>Comments</caption>\n<tr>" +
300 `<th size="5%"><a title="Lines">L</a></th>` +
301 `<th size="5%"><a title="Comments">C</a></th>` +
302 "<th>Linked to</th></tr>\n")
303 for _, entry := range entries {
305 lines := msgSplit(entry.commit.Message)
306 domains := []string{}
307 for _, line := range lines[2:] {
308 if u := urlParse(line); u == nil {
311 domains = append(domains, makeA(line, u.Host))
314 var commentsValue string
315 if l := len(parseComments(entry.commentsRaw)); l > 0 {
316 commentsValue = strconv.Itoa(l)
318 commentsValue = " "
320 table.WriteString(fmt.Sprintf(
321 "<tr><td>%d</td><td><tt>%s</tt></td>"+
323 "<td>%d</td><td>%s</td>"+
324 "<td>%s</td></tr>\n",
325 commitN, entry.commit.Author.When.Format(sgblog.WhenFmt),
326 makeA(cfg.URLPrefix+"/"+entry.commit.Hash.String(), lines[0]),
329 strings.Join(domains, " "),
332 table.WriteString("</table>")
336 var refs bytes.Buffer
338 if offsetPrev := offset - PageEntries; offsetPrev > 0 {
339 href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offsetPrev)
341 href = cfg.URLPrefix + "/"
343 links = append(links, `<link rel="prev" href="`+href+`" title="newer">`)
344 refs.WriteString("\n" + makeA(href, "[prev]"))
347 href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offset+PageEntries)
348 links = append(links, `<link rel="next" href="`+href+`" title="older">`)
349 refs.WriteString("\n" + makeA(href, "[next]"))
352 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
353 out.Write([]byte(startHTML(
354 fmt.Sprintf("%s (%d-%d)", cfg.Title, offset, offset+PageEntries),
357 if cfg.AboutURL != "" {
358 out.Write([]byte(fmt.Sprintf("[%s]", makeA(cfg.AboutURL, "about"))))
360 out.Write(refs.Bytes())
361 out.Write(table.Bytes())
362 out.Write(refs.Bytes())
363 out.Write([]byte("\n"))
364 } else if pathInfo == "/"+AtomFeed {
365 commit, err := repo.CommitObject(*headHash)
369 etagHash.Write([]byte("ATOM"))
370 etagHash.Write(commit.Hash[:])
375 Updated: atom.Time(commit.Author.When),
380 Author: &atom.Person{Name: cfg.AtomAuthor},
382 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
386 for i := 0; i < PageEntries; i++ {
387 commit, err = repoLog.Next()
392 feedIdRaw := new([16]byte)
393 copy(feedIdRaw[:], commit.Hash[:])
394 feedIdRaw[6] = (feedIdRaw[6] & 0x0F) | uint8(4<<4) // version 4
395 feedId := fmt.Sprintf(
404 lines := msgSplit(commit.Message)
405 feed.Entry = append(feed.Entry, &atom.Entry{
407 ID: "urn:uuid:" + feedId,
410 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
412 Published: atom.Time(commit.Author.When),
413 Updated: atom.Time(commit.Author.When),
420 Body: strings.Join(lines[2:], "\n"),
424 data, err := xml.MarshalIndent(&feed, "", " ")
429 os.Stdout.WriteString("Content-Type: text/xml; charset=UTF-8\n")
430 os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
431 if gzipWriter != nil {
432 os.Stdout.WriteString("Content-Encoding: gzip\n")
435 os.Stdout.WriteString("\n")
436 os.Stdout.Write(outBuf.Bytes())
438 } else if sha1DigestRe.MatchString(pathInfo[1:]) {
439 commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1:]))
443 for _, data := range etagHashForWeb {
446 etagHash.Write([]byte("ENTRY"))
447 etagHash.Write(commit.Hash[:])
448 notesRaw := getNote(notesTree, commit.Hash)
449 etagHash.Write(notesRaw)
450 commentsRaw := getNote(commentsTree, commit.Hash)
451 etagHash.Write(commentsRaw)
453 lines := msgSplit(commit.Message)
455 when := commit.Author.When.Format(sgblog.WhenFmt)
456 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
459 if len(commit.ParentHashes) > 0 {
460 parent = commit.ParentHashes[0].String()
461 links = append(links, `<link rel="prev" href="`+cfg.URLPrefix+"/"+parent+`" title="older">`)
463 out.Write([]byte(startHTML(fmt.Sprintf("%s (%s)", title, when), links)))
464 if cfg.AboutURL != "" {
465 out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.AboutURL, "about"))))
467 out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.URLPrefix+"/", "index"))))
469 out.Write([]byte(fmt.Sprintf(
471 makeA(cfg.URLPrefix+"/"+parent, "older"),
474 out.Write([]byte(fmt.Sprintf(
475 "[<tt><a title=\"When\">%s</a></tt>]\n"+
476 "[<tt><a title=\"Hash\">%s</a></tt>]\n"+
477 "<hr/>\n<h2>%s</h2>\n<pre>\n",
478 when, commit.Hash.String(), title,
480 for _, line := range lines[2:] {
481 out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
483 out.Write([]byte("</pre>\n<hr/>\n"))
484 if len(notesRaw) > 0 {
485 out.Write([]byte("Note:<pre>\n" + string(notesRaw) + "\n</pre>\n<hr/>\n"))
487 if cfg.CommentsEmail != "" {
488 out.Write([]byte("[" + makeA(
489 "mailto:"+cfg.CommentsEmail+"?subject="+commit.Hash.String(),
493 out.Write([]byte("<dl>\n"))
494 for i, comment := range parseComments(commentsRaw) {
495 out.Write([]byte(fmt.Sprintf(
496 "<dt><a name=\"comment%d\"><a href=\"#comment%d\">comment %d</a>:"+
497 "</dt>\n<dd><pre>\n",
500 lines = strings.Split(comment, "\n")
501 for _, line := range lines[:3] {
502 out.Write([]byte(line + "\n"))
504 for _, line := range lines[3:] {
505 out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
507 out.Write([]byte("</pre></dd>\n"))
509 out.Write([]byte("</dl>\n"))
511 makeErr(errors.New("unknown URL action"))
513 out.Write([]byte("</body></html>\n"))
514 if gzipWriter != nil {
517 os.Stdout.Write(outBuf.Bytes())