]> Sergey Matveev's repositories - sgblog.git/blob - cmd/sgblog/http.go
Separate HTTP and Gopher related functions
[sgblog.git] / cmd / sgblog / http.go
1 /*
2 SGBlog -- Git-based CGI blogging engine
3 Copyright (C) 2020 Sergey Matveev <stargrave@stargrave.org>
4
5 This program is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Affero General Public License as
7 published by the Free Software Foundation, version 3 of the License.
8
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12 GNU General Public License for more details.
13
14 You should have received a copy of the GNU Affero General Public License
15 along with this program.  If not, see <http://www.gnu.org/licenses/>.
16 */
17
18 // Git-based CGI blogging engine
19 package main
20
21 import (
22         "bytes"
23         "compress/gzip"
24         "encoding/hex"
25         "encoding/json"
26         "encoding/xml"
27         "errors"
28         "fmt"
29         "hash"
30         "html"
31         "io"
32         "io/ioutil"
33         "log"
34         "net/url"
35         "os"
36         "strconv"
37         "strings"
38
39         "github.com/hjson/hjson-go"
40         "go.stargrave.org/sgblog"
41         "golang.org/x/crypto/blake2b"
42         "golang.org/x/tools/blog/atom"
43         "gopkg.in/src-d/go-git.v4"
44         "gopkg.in/src-d/go-git.v4/plumbing"
45         "gopkg.in/src-d/go-git.v4/plumbing/object"
46 )
47
48 const (
49         AtomFeed = "feed.atom"
50 )
51
52 var (
53         defaultLinks = []string{}
54
55         renderableSchemes = map[string]struct{}{
56                 "ftp":    struct{}{},
57                 "gopher": struct{}{},
58                 "http":   struct{}{},
59                 "https":  struct{}{},
60                 "telnet": struct{}{},
61         }
62 )
63
64 type TableEntry struct {
65         commit      *object.Commit
66         commentsRaw []byte
67 }
68
69 func makeA(href, text string) string {
70         return `<a href="` + href + `">` + text + `</a>`
71 }
72
73 func etagString(etag hash.Hash) string {
74         return `"` + hex.EncodeToString(etag.Sum(nil)) + `"`
75 }
76
77 func urlParse(what string) *url.URL {
78         if u, err := url.ParseRequestURI(what); err == nil {
79                 if _, exists := renderableSchemes[u.Scheme]; exists {
80                         return u
81                 }
82         }
83         return nil
84 }
85
86 func lineURLize(urlPrefix, line string) string {
87         cols := strings.Split(html.EscapeString(line), " ")
88         for i, col := range cols {
89                 if u := urlParse(col); u != nil {
90                         cols[i] = makeA(col, col)
91                         continue
92                 }
93                 cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(
94                         urlPrefix+"/$1", "$1",
95                 ))
96         }
97         return strings.Join(cols, " ")
98 }
99
100 func startHeader(etag hash.Hash, gziped bool) string {
101         lines := []string{
102                 "Content-Type: text/html; charset=UTF-8",
103                 "ETag: " + etagString(etag),
104         }
105         if gziped {
106                 lines = append(lines, "Content-Encoding: gzip")
107         }
108         lines = append(lines, "")
109         lines = append(lines, "")
110         return strings.Join(lines, "\n")
111 }
112
113 func startHTML(title string, additional []string) string {
114         return fmt.Sprintf(`<html>
115 <head>
116         <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
117         <meta name="generator" content="SGBlog %s">
118         <title>%s</title>
119         %s
120 </head>
121 <body>
122 `,
123                 sgblog.Version, title,
124                 strings.Join(append(defaultLinks, additional...), "\n   "),
125         )
126 }
127
128 func makeErr(err error) {
129         fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
130         fmt.Println(err)
131         panic(err)
132 }
133
134 func checkETag(etag hash.Hash) {
135         ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
136         if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
137                 fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
138                 os.Exit(0)
139         }
140 }
141
142 func serveHTTP() {
143         cfgPath := os.Getenv("SGBLOG_CFG")
144         if cfgPath == "" {
145                 log.Fatalln("SGBLOG_CFG is not set")
146         }
147         cfgRaw, err := ioutil.ReadFile(cfgPath)
148         if err != nil {
149                 makeErr(err)
150         }
151         var cfgGeneral map[string]interface{}
152         if err = hjson.Unmarshal(cfgRaw, &cfgGeneral); err != nil {
153                 makeErr(err)
154         }
155         cfgRaw, err = json.Marshal(cfgGeneral)
156         if err != nil {
157                 makeErr(err)
158         }
159         var cfg *Cfg
160         if err = json.Unmarshal(cfgRaw, &cfg); err != nil {
161                 makeErr(err)
162         }
163         pathInfo, exists := os.LookupEnv("PATH_INFO")
164         if !exists {
165                 pathInfo = "/"
166         }
167         queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
168         if err != nil {
169                 makeErr(err)
170         }
171
172         etagHash, err := blake2b.New256(nil)
173         if err != nil {
174                 panic(err)
175         }
176         etagHash.Write([]byte("SGBLOG"))
177         etagHash.Write([]byte(sgblog.Version))
178         etagHash.Write([]byte(cfg.GitPath))
179         etagHash.Write([]byte(cfg.Branch))
180         etagHash.Write([]byte(cfg.Title))
181         etagHash.Write([]byte(cfg.URLPrefix))
182         etagHash.Write([]byte(cfg.AtomBaseURL))
183         etagHash.Write([]byte(cfg.AtomId))
184         etagHash.Write([]byte(cfg.AtomAuthor))
185
186         etagHashForWeb := [][]byte{}
187         if cfg.CSS != "" {
188                 defaultLinks = append(defaultLinks, `<link rel="stylesheet" type="text/css" href="`+cfg.CSS+`">`)
189                 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CSS))
190         }
191         if cfg.Webmaster != "" {
192                 defaultLinks = append(defaultLinks, `<link rev="made" href="mailto:`+cfg.Webmaster+`">`)
193                 etagHashForWeb = append(etagHashForWeb, []byte(cfg.Webmaster))
194         }
195         if cfg.AboutURL != "" {
196                 etagHashForWeb = append(etagHashForWeb, []byte(cfg.AboutURL))
197         }
198         for _, gitURL := range cfg.GitURLs {
199                 defaultLinks = append(defaultLinks, `<link rel="vcs-git" href="`+gitURL+`" title="Git repository">`)
200                 etagHashForWeb = append(etagHashForWeb, []byte(gitURL))
201         }
202         if cfg.CommentsNotesRef != "" {
203                 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsNotesRef))
204         }
205         if cfg.CommentsEmail != "" {
206                 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsEmail))
207         }
208
209         defaultLinks = append(defaultLinks, `<link rel="top" href="`+cfg.URLPrefix+`/" title="top">`)
210         atomURL := cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomFeed
211         defaultLinks = append(defaultLinks, `<link rel="alternate" title="Atom feed" href="`+atomURL+`" type="application/atom+xml">`)
212
213         headHash, err := initRepo(cfg)
214         if err != nil {
215                 makeErr(err)
216         }
217
218         if notes, err := repo.Notes(); err == nil {
219                 var notesRef *plumbing.Reference
220                 var commentsRef *plumbing.Reference
221                 notes.ForEach(func(ref *plumbing.Reference) error {
222                         switch string(ref.Name()) {
223                         case "refs/notes/commits":
224                                 notesRef = ref
225                         case cfg.CommentsNotesRef:
226                                 commentsRef = ref
227                         }
228                         return nil
229                 })
230                 if notesRef != nil {
231                         if commentsCommit, err := repo.CommitObject(notesRef.Hash()); err == nil {
232                                 notesTree, _ = commentsCommit.Tree()
233                         }
234                 }
235                 if commentsRef != nil {
236                         if commentsCommit, err := repo.CommitObject(commentsRef.Hash()); err == nil {
237                                 commentsTree, _ = commentsCommit.Tree()
238                         }
239                 }
240         }
241
242         var outBuf bytes.Buffer
243         var out io.Writer
244         out = &outBuf
245         var gzipWriter *gzip.Writer
246         acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
247         for _, encoding := range strings.Split(acceptEncoding, ", ") {
248                 if encoding == "gzip" {
249                         gzipWriter = gzip.NewWriter(&outBuf)
250                         out = gzipWriter
251                 }
252         }
253
254         if pathInfo == "/" {
255                 offset := 0
256                 if offsetRaw, exists := queryValues["offset"]; exists {
257                         offset, err = strconv.Atoi(offsetRaw[0])
258                         if err != nil {
259                                 makeErr(err)
260                         }
261                 }
262                 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
263                 if err != nil {
264                         makeErr(err)
265                 }
266                 commitN := 0
267                 for i := 0; i < offset; i++ {
268                         if _, err = repoLog.Next(); err != nil {
269                                 break
270                         }
271                         commitN++
272                 }
273
274                 entries := make([]TableEntry, 0, PageEntries)
275                 logEnded := false
276                 for _, data := range etagHashForWeb {
277                         etagHash.Write(data)
278                 }
279                 etagHash.Write([]byte("INDEX"))
280                 for i := 0; i < PageEntries; i++ {
281                         commit, err := repoLog.Next()
282                         if err != nil {
283                                 logEnded = true
284                                 break
285                         }
286                         etagHash.Write(commit.Hash[:])
287                         commentsRaw := getNote(commentsTree, commit.Hash)
288                         etagHash.Write(commentsRaw)
289                         entries = append(entries, TableEntry{commit, commentsRaw})
290                 }
291                 checkETag(etagHash)
292
293                 var table bytes.Buffer
294                 table.WriteString(
295                         "<table border=1>\n" +
296                                 "<caption>Comments</caption>\n<tr>" +
297                                 "<th>N</th>" +
298                                 "<th>When</th>" +
299                                 "<th>Title</th>" +
300                                 `<th size="5%"><a title="Lines">L</a></th>` +
301                                 `<th size="5%"><a title="Comments">C</a></th>` +
302                                 "<th>Linked to</th></tr>\n")
303                 for _, entry := range entries {
304                         commitN++
305                         lines := msgSplit(entry.commit.Message)
306                         domains := []string{}
307                         for _, line := range lines[2:] {
308                                 if u := urlParse(line); u == nil {
309                                         break
310                                 } else {
311                                         domains = append(domains, makeA(line, u.Host))
312                                 }
313                         }
314                         var commentsValue string
315                         if l := len(parseComments(entry.commentsRaw)); l > 0 {
316                                 commentsValue = strconv.Itoa(l)
317                         } else {
318                                 commentsValue = "&nbsp;"
319                         }
320                         table.WriteString(fmt.Sprintf(
321                                 "<tr><td>%d</td><td><tt>%s</tt></td>"+
322                                         "<td>%s</td>"+
323                                         "<td>%d</td><td>%s</td>"+
324                                         "<td>%s</td></tr>\n",
325                                 commitN, entry.commit.Author.When.Format(sgblog.WhenFmt),
326                                 makeA(cfg.URLPrefix+"/"+entry.commit.Hash.String(), lines[0]),
327                                 len(lines)-2,
328                                 commentsValue,
329                                 strings.Join(domains, " "),
330                         ))
331                 }
332                 table.WriteString("</table>")
333
334                 var href string
335                 var links []string
336                 var refs bytes.Buffer
337                 if offset > 0 {
338                         if offsetPrev := offset - PageEntries; offsetPrev > 0 {
339                                 href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offsetPrev)
340                         } else {
341                                 href = cfg.URLPrefix + "/"
342                         }
343                         links = append(links, `<link rel="prev" href="`+href+`" title="newer">`)
344                         refs.WriteString("\n" + makeA(href, "[prev]"))
345                 }
346                 if !logEnded {
347                         href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offset+PageEntries)
348                         links = append(links, `<link rel="next" href="`+href+`" title="older">`)
349                         refs.WriteString("\n" + makeA(href, "[next]"))
350                 }
351
352                 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
353                 out.Write([]byte(startHTML(
354                         fmt.Sprintf("%s (%d-%d)", cfg.Title, offset, offset+PageEntries),
355                         links,
356                 )))
357                 if cfg.AboutURL != "" {
358                         out.Write([]byte(fmt.Sprintf("[%s]", makeA(cfg.AboutURL, "about"))))
359                 }
360                 out.Write(refs.Bytes())
361                 out.Write(table.Bytes())
362                 out.Write(refs.Bytes())
363                 out.Write([]byte("\n"))
364         } else if pathInfo == "/"+AtomFeed {
365                 commit, err := repo.CommitObject(*headHash)
366                 if err != nil {
367                         makeErr(err)
368                 }
369                 etagHash.Write([]byte("ATOM"))
370                 etagHash.Write(commit.Hash[:])
371                 checkETag(etagHash)
372                 feed := atom.Feed{
373                         Title:   cfg.Title,
374                         ID:      cfg.AtomId,
375                         Updated: atom.Time(commit.Author.When),
376                         Link: []atom.Link{{
377                                 Rel:  "self",
378                                 Href: atomURL,
379                         }},
380                         Author: &atom.Person{Name: cfg.AtomAuthor},
381                 }
382                 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
383                 if err != nil {
384                         makeErr(err)
385                 }
386                 for i := 0; i < PageEntries; i++ {
387                         commit, err = repoLog.Next()
388                         if err != nil {
389                                 break
390                         }
391
392                         feedIdRaw := new([16]byte)
393                         copy(feedIdRaw[:], commit.Hash[:])
394                         feedIdRaw[6] = (feedIdRaw[6] & 0x0F) | uint8(4<<4) // version 4
395                         feedId := fmt.Sprintf(
396                                 "%x-%x-%x-%x-%x",
397                                 feedIdRaw[0:4],
398                                 feedIdRaw[4:6],
399                                 feedIdRaw[6:8],
400                                 feedIdRaw[8:10],
401                                 feedIdRaw[10:],
402                         )
403
404                         lines := msgSplit(commit.Message)
405                         feed.Entry = append(feed.Entry, &atom.Entry{
406                                 Title: lines[0],
407                                 ID:    "urn:uuid:" + feedId,
408                                 Link: []atom.Link{{
409                                         Rel:  "alternate",
410                                         Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
411                                 }},
412                                 Published: atom.Time(commit.Author.When),
413                                 Updated:   atom.Time(commit.Author.When),
414                                 Summary: &atom.Text{
415                                         Type: "text",
416                                         Body: lines[0],
417                                 },
418                                 Content: &atom.Text{
419                                         Type: "text",
420                                         Body: strings.Join(lines[2:], "\n"),
421                                 },
422                         })
423                 }
424                 data, err := xml.MarshalIndent(&feed, "", "  ")
425                 if err != nil {
426                         makeErr(err)
427                 }
428                 out.Write(data)
429                 os.Stdout.WriteString("Content-Type: text/xml; charset=UTF-8\n")
430                 os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
431                 if gzipWriter != nil {
432                         os.Stdout.WriteString("Content-Encoding: gzip\n")
433                         gzipWriter.Close()
434                 }
435                 os.Stdout.WriteString("\n")
436                 os.Stdout.Write(outBuf.Bytes())
437                 return
438         } else if sha1DigestRe.MatchString(pathInfo[1:]) {
439                 commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1:]))
440                 if err != nil {
441                         makeErr(err)
442                 }
443                 for _, data := range etagHashForWeb {
444                         etagHash.Write(data)
445                 }
446                 etagHash.Write([]byte("ENTRY"))
447                 etagHash.Write(commit.Hash[:])
448                 notesRaw := getNote(notesTree, commit.Hash)
449                 etagHash.Write(notesRaw)
450                 commentsRaw := getNote(commentsTree, commit.Hash)
451                 etagHash.Write(commentsRaw)
452                 checkETag(etagHash)
453                 lines := msgSplit(commit.Message)
454                 title := lines[0]
455                 when := commit.Author.When.Format(sgblog.WhenFmt)
456                 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
457                 links := []string{}
458                 var parent string
459                 if len(commit.ParentHashes) > 0 {
460                         parent = commit.ParentHashes[0].String()
461                         links = append(links, `<link rel="prev" href="`+cfg.URLPrefix+"/"+parent+`" title="older">`)
462                 }
463                 out.Write([]byte(startHTML(fmt.Sprintf("%s (%s)", title, when), links)))
464                 if cfg.AboutURL != "" {
465                         out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.AboutURL, "about"))))
466                 }
467                 out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.URLPrefix+"/", "index"))))
468                 if parent != "" {
469                         out.Write([]byte(fmt.Sprintf(
470                                 "[%s]\n",
471                                 makeA(cfg.URLPrefix+"/"+parent, "older"),
472                         )))
473                 }
474                 out.Write([]byte(fmt.Sprintf(
475                         "[<tt><a title=\"When\">%s</a></tt>]\n"+
476                                 "[<tt><a title=\"Hash\">%s</a></tt>]\n"+
477                                 "<hr/>\n<h2>%s</h2>\n<pre>\n",
478                         when, commit.Hash.String(), title,
479                 )))
480                 for _, line := range lines[2:] {
481                         out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
482                 }
483                 out.Write([]byte("</pre>\n<hr/>\n"))
484                 if len(notesRaw) > 0 {
485                         out.Write([]byte("Note:<pre>\n" + string(notesRaw) + "\n</pre>\n<hr/>\n"))
486                 }
487                 if cfg.CommentsEmail != "" {
488                         out.Write([]byte("[" + makeA(
489                                 "mailto:"+cfg.CommentsEmail+"?subject="+commit.Hash.String(),
490                                 "write comment",
491                         ) + "]\n"))
492                 }
493                 out.Write([]byte("<dl>\n"))
494                 for i, comment := range parseComments(commentsRaw) {
495                         out.Write([]byte(fmt.Sprintf(
496                                 "<dt><a name=\"comment%d\"><a href=\"#comment%d\">comment %d</a>:"+
497                                         "</dt>\n<dd><pre>\n",
498                                 i, i, i,
499                         )))
500                         lines = strings.Split(comment, "\n")
501                         for _, line := range lines[:3] {
502                                 out.Write([]byte(line + "\n"))
503                         }
504                         for _, line := range lines[3:] {
505                                 out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
506                         }
507                         out.Write([]byte("</pre></dd>\n"))
508                 }
509                 out.Write([]byte("</dl>\n"))
510         } else {
511                 makeErr(errors.New("unknown URL action"))
512         }
513         out.Write([]byte("</body></html>\n"))
514         if gzipWriter != nil {
515                 gzipWriter.Close()
516         }
517         os.Stdout.Write(outBuf.Bytes())
518 }