"bytes"
"compress/gzip"
"encoding/hex"
+ "encoding/json"
"encoding/xml"
"errors"
"fmt"
"hash"
+ "html"
"io"
"io/ioutil"
+ "log"
"net/url"
"os"
"regexp"
"strconv"
"strings"
- "github.com/google/uuid"
+ "github.com/hjson/hjson-go"
+ "go.cypherpunks.ru/netstring/v2"
+ "go.stargrave.org/sgblog"
"golang.org/x/crypto/blake2b"
"golang.org/x/tools/blog/atom"
"gopkg.in/src-d/go-git.v4"
const (
PageEntries = 50
- WhenFmt = "2006-01-02 15:04:05Z07:00"
AtomFeed = "feed.atom"
)
var (
- Version = "0.0.1"
- ETagVersion = []byte("1")
sha1DigestRe = regexp.MustCompilePOSIX("([0-9a-f]{40,40})")
defaultLinks = []string{}
repo *git.Repository
+ notesTree *object.Tree
commentsTree *object.Tree
renderableSchemes = map[string]struct{}{
- "http": struct{}{},
- "https": struct{}{},
"ftp": struct{}{},
"gopher": struct{}{},
+ "http": struct{}{},
+ "https": struct{}{},
}
)
+type TableEntry struct {
+ commit *object.Commit
+ commentsRaw []byte
+}
+
+type Cfg struct {
+ GitPath string
+ Branch string
+ Title string
+
+ URLPrefix string
+
+ AtomBaseURL string
+ AtomId string
+ AtomAuthor string
+
+ CSS string
+ Webmaster string
+ AboutURL string
+ GitURLs []string
+
+ CommentsNotesRef string
+ CommentsEmail string
+}
+
func makeA(href, text string) string {
- return fmt.Sprintf(`<a href="%s">%s</a>`, href, text)
+ return `<a href="` + href + `">` + text + `</a>`
}
func etagString(etag hash.Hash) string {
return lines
}
-func getNote(what plumbing.Hash) string {
- if commentsTree == nil {
- return ""
+func lineURLize(urlPrefix, line string) string {
+ cols := strings.Split(html.EscapeString(line), " ")
+ for i, col := range cols {
+ if u := urlParse(col); u != nil {
+ cols[i] = makeA(col, col)
+ continue
+ }
+ cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(
+ urlPrefix+"/$1", "$1",
+ ))
+ }
+ return strings.Join(cols, " ")
+}
+
+func getNote(tree *object.Tree, what plumbing.Hash) []byte {
+ if tree == nil {
+ return nil
}
- entry, err := commentsTree.FindEntry(what.String())
- if err != nil {
- return ""
+ var entry *object.TreeEntry
+ var err error
+ paths := make([]string, 3)
+ paths[0] = what.String()
+ paths[1] = paths[0][:2] + "/" + paths[0][2:]
+ paths[2] = paths[1][:4+1] + "/" + paths[1][4+1:]
+ for _, p := range paths {
+ entry, err = tree.FindEntry(p)
+ if err == nil {
+ break
+ }
+ }
+ if entry == nil {
+ return nil
}
blob, err := repo.BlobObject(entry.Hash)
if err != nil {
- return ""
+ return nil
}
r, err := blob.Reader()
if err != nil {
- return ""
+ return nil
}
data, err := ioutil.ReadAll(r)
if err != nil {
- return ""
+ return nil
}
- return string(data)
+ return bytes.TrimSuffix(data, []byte{'\n'})
+}
+
+func parseComments(data []byte) []string {
+ comments := []string{}
+ nsr := netstring.NewReader(bytes.NewReader(data))
+ for {
+ if _, err := nsr.Next(); err != nil {
+ break
+ }
+ if comment, err := ioutil.ReadAll(nsr); err == nil {
+ comments = append(comments, string(comment))
+ }
+ }
+ return comments
}
func startHeader(etag hash.Hash, gziped bool) string {
</head>
<body>
`,
- Version, title,
+ sgblog.Version, title,
strings.Join(append(defaultLinks, additional...), "\n "),
)
}
func makeErr(err error) {
- fmt.Println("Content-Type: text/plain; charset=UTF-8\n")
+ fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
fmt.Println(err)
panic(err)
}
func checkETag(etag hash.Hash) {
ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
- fmt.Println("Status: 304\nETag:", ifNoneMatch, "\n")
+ fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
os.Exit(0)
}
}
func main() {
- gitPath, exists := os.LookupEnv("SGBLOG_GIT_PATH")
- if !exists {
- makeErr(errors.New("SGBLOG_GIT_PATH is unset"))
+ cfgPath := os.Getenv("SGBLOG_CFG")
+ if cfgPath == "" {
+ log.Fatalln("SGBLOG_CFG is not set")
}
- branchName, exists := os.LookupEnv("SGBLOG_BRANCH")
+ pathInfo, exists := os.LookupEnv("PATH_INFO")
if !exists {
- makeErr(errors.New("SGBLOG_BRANCH is unset"))
+ pathInfo = "/"
}
- blogBaseURL, exists := os.LookupEnv("SGBLOG_BASE_URL")
- if !exists {
- makeErr(errors.New("SGBLOG_BASE_URL is unset"))
+ queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
+ if err != nil {
+ makeErr(err)
}
- blogTitle, exists := os.LookupEnv("SGBLOG_TITLE")
- if !exists {
- makeErr(errors.New("SGBLOG_TITLE is unset"))
+
+ cfgRaw, err := ioutil.ReadFile(cfgPath)
+ if err != nil {
+ makeErr(err)
}
- atomId, exists := os.LookupEnv("SGBLOG_ATOM_ID")
- if !exists {
- makeErr(errors.New("SGBLOG_ATOM_ID is unset"))
+ var cfgGeneral map[string]interface{}
+ if err = hjson.Unmarshal(cfgRaw, &cfgGeneral); err != nil {
+ makeErr(err)
}
- atomAuthorName, exists := os.LookupEnv("SGBLOG_ATOM_AUTHOR")
- if !exists {
- makeErr(errors.New("SGBLOG_ATOM_AUTHOR is unset"))
+ cfgRaw, err = json.Marshal(cfgGeneral)
+ if err != nil {
+ makeErr(err)
+ }
+ var cfg *Cfg
+ if err = json.Unmarshal(cfgRaw, &cfg); err != nil {
+ makeErr(err)
}
etagHash, err := blake2b.New256(nil)
if err != nil {
panic(err)
}
- etagHash.Write(ETagVersion)
- etagHash.Write([]byte(gitPath))
- etagHash.Write([]byte(branchName))
- etagHash.Write([]byte(blogBaseURL))
- etagHash.Write([]byte(blogTitle))
- etagHash.Write([]byte(atomId))
- etagHash.Write([]byte(atomAuthorName))
-
- // SGBLOG_URL_PREFIX
- urlPrefix := os.Getenv("SGBLOG_URL_PREFIX")
- etagHash.Write([]byte(urlPrefix))
-
- // SGBLOG_CSS
- if cssUrl, exists := os.LookupEnv("SGBLOG_CSS"); exists {
- defaultLinks = append(defaultLinks, fmt.Sprintf(
- `<link rel="stylesheet" type="text/css" href="%s">`,
- cssUrl,
- ))
- etagHash.Write([]byte(cssUrl))
- }
+ etagHash.Write([]byte("SGBLOG"))
+ etagHash.Write([]byte(sgblog.Version))
+ etagHash.Write([]byte(cfg.GitPath))
+ etagHash.Write([]byte(cfg.Branch))
+ etagHash.Write([]byte(cfg.Title))
+ etagHash.Write([]byte(cfg.URLPrefix))
+ etagHash.Write([]byte(cfg.AtomBaseURL))
+ etagHash.Write([]byte(cfg.AtomId))
+ etagHash.Write([]byte(cfg.AtomAuthor))
- // SGBLOG_WEBMASTER
- if webmaster, exists := os.LookupEnv("SGBLOG_WEBMASTER"); exists {
- defaultLinks = append(defaultLinks, fmt.Sprintf(
- `<link rev="made" href="mailto:%s">`,
- webmaster,
- ))
- etagHash.Write([]byte(webmaster))
+ etagHashForWeb := [][]byte{}
+ if cfg.CSS != "" {
+ defaultLinks = append(defaultLinks, `<link rel="stylesheet" type="text/css" href="`+cfg.CSS+`">`)
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.CSS))
}
-
- // SGBLOG_ABOUT
- aboutUrl := os.Getenv("SGBLOG_ABOUT")
- etagHash.Write([]byte(aboutUrl))
-
- // SGBLOG_GIT_URLS
- if gitUrls, exists := os.LookupEnv("SGBLOG_GIT_URLS"); exists {
- for _, gitUrl := range strings.Split(gitUrls, " ") {
- defaultLinks = append(defaultLinks, fmt.Sprintf(
- `<link rel="vcs-git" href="%s" title="Git repository">`,
- gitUrl,
- ))
- }
- etagHash.Write([]byte(gitUrls))
+ if cfg.Webmaster != "" {
+ defaultLinks = append(defaultLinks, `<link rev="made" href="mailto:`+cfg.Webmaster+`">`)
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.Webmaster))
}
-
- defaultLinks = append(defaultLinks, fmt.Sprintf(
- `<link rel="top" href="%s/" title="top">`,
- urlPrefix,
- ))
- atomUrl := blogBaseURL + urlPrefix + "/" + AtomFeed
- defaultLinks = append(defaultLinks, fmt.Sprintf(
- `<link rel="alternate" title="Atom feed" href="%s" type="application/atom+xml">`,
- atomUrl,
- ))
-
- pathInfo, exists := os.LookupEnv("PATH_INFO")
- if !exists {
- pathInfo = "/"
+ if cfg.AboutURL != "" {
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.AboutURL))
}
- queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
- if err != nil {
- makeErr(err)
+ for _, gitURL := range cfg.GitURLs {
+ defaultLinks = append(defaultLinks, `<link rel="vcs-git" href="`+gitURL+`" title="Git repository">`)
+ etagHashForWeb = append(etagHashForWeb, []byte(gitURL))
}
+ if cfg.CommentsNotesRef != "" {
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsNotesRef))
+ }
+ if cfg.CommentsEmail != "" {
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsEmail))
+ }
+
+ defaultLinks = append(defaultLinks, `<link rel="top" href="`+cfg.URLPrefix+`/" title="top">`)
+ atomURL := cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomFeed
+ defaultLinks = append(defaultLinks, `<link rel="alternate" title="Atom feed" href="`+atomURL+`" type="application/atom+xml">`)
- repo, err = git.PlainOpen(gitPath)
+ repo, err = git.PlainOpen(cfg.GitPath)
if err != nil {
makeErr(err)
}
- head, err := repo.Reference(plumbing.ReferenceName(branchName), false)
+ head, err := repo.Reference(plumbing.ReferenceName(cfg.Branch), false)
if err != nil {
makeErr(err)
}
+
if notes, err := repo.Notes(); err == nil {
- var comments *plumbing.Reference
+ var notesRef *plumbing.Reference
+ var commentsRef *plumbing.Reference
notes.ForEach(func(ref *plumbing.Reference) error {
- if ref.Name() == "refs/notes/commits" {
- comments = ref
+ switch string(ref.Name()) {
+ case "refs/notes/commits":
+ notesRef = ref
+ case cfg.CommentsNotesRef:
+ commentsRef = ref
}
return nil
})
- if comments != nil {
- if commentsCommit, err := repo.CommitObject(comments.Hash()); err == nil {
+ if notesRef != nil {
+ if commentsCommit, err := repo.CommitObject(notesRef.Hash()); err == nil {
+ notesTree, _ = commentsCommit.Tree()
+ }
+ }
+ if commentsRef != nil {
+ if commentsCommit, err := repo.CommitObject(commentsRef.Hash()); err == nil {
commentsTree, _ = commentsCommit.Tree()
}
}
}
}
- var commit *object.Commit
if pathInfo == "/" {
offset := 0
if offsetRaw, exists := queryValues["offset"]; exists {
makeErr(err)
}
}
- var table bytes.Buffer
- table.WriteString("<table border=1>\n<tr><th>When</th><th>Title</th><th>Comment of</th></tr>\n")
log, err := repo.Log(&git.LogOptions{From: head.Hash()})
if err != nil {
makeErr(err)
}
- errOccured := false
+ commentN := 0
for i := 0; i < offset; i++ {
- commit, err = log.Next()
- if err != nil {
+ if _, err = log.Next(); err != nil {
break
}
+ commentN++
}
+
+ entries := make([]TableEntry, 0, PageEntries)
+ logEnded := false
+ for _, data := range etagHashForWeb {
+ etagHash.Write(data)
+ }
+ etagHash.Write([]byte("INDEX"))
for i := 0; i < PageEntries; i++ {
- commit, err = log.Next()
+ commit, err := log.Next()
if err != nil {
- errOccured = true
+ logEnded = true
break
}
- if i == 0 {
- etagHash.Write(commit.Hash[:])
- checkETag(etagHash)
- }
- lines := msgSplit(commit.Message)
+ etagHash.Write(commit.Hash[:])
+ commentsRaw := getNote(commentsTree, commit.Hash)
+ etagHash.Write(commentsRaw)
+ entries = append(entries, TableEntry{commit, commentsRaw})
+ }
+ checkETag(etagHash)
+
+ var table bytes.Buffer
+ table.WriteString(
+ "<table border=1>\n" +
+ "<caption>Comments</caption>\n<tr>" +
+ "<th>N</th>" +
+ "<th>When</th>" +
+ "<th>Title</th>" +
+ `<th size="5%"><a title="Lines">L</a></th>` +
+ `<th size="5%"><a title="Comments">C</a></th>` +
+ "<th>Linked to</th></tr>\n")
+ for _, entry := range entries {
+ commentN++
+ lines := msgSplit(entry.commit.Message)
domains := []string{}
for _, line := range lines[2:] {
if u := urlParse(line); u == nil {
domains = append(domains, makeA(line, u.Host))
}
}
- entry := []string{
- makeA(urlPrefix+"/"+commit.Hash.String(), lines[0]),
- fmt.Sprintf("(%dL)", len(lines)-2),
- }
- if note := getNote(commit.Hash); note != "" {
- entry = append(entry, "(N)")
+ var commentsValue string
+ if l := len(parseComments(entry.commentsRaw)); l > 0 {
+ commentsValue = strconv.Itoa(l)
+ } else {
+ commentsValue = " "
}
table.WriteString(fmt.Sprintf(
- "<tr><td><tt>%s</tt></td><td>%s</td><td>%s</td></tr>\n",
- commit.Author.When.Format(WhenFmt),
- strings.Join(entry, " "),
+ "<tr><td>%d</td><td><tt>%s</tt></td>"+
+ "<td>%s</td>"+
+ "<td>%d</td><td>%s</td>"+
+ "<td>%s</td></tr>\n",
+ commentN, entry.commit.Author.When.Format(sgblog.WhenFmt),
+ makeA(cfg.URLPrefix+"/"+entry.commit.Hash.String(), lines[0]),
+ len(lines)-2,
+ commentsValue,
strings.Join(domains, " "),
))
}
table.WriteString("</table>")
+
+ var href string
var links []string
var refs bytes.Buffer
if offset > 0 {
- offsetPrev := offset - PageEntries
- if offsetPrev < 0 {
- offsetPrev = 0
+ if offsetPrev := offset - PageEntries; offsetPrev > 0 {
+ href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offsetPrev)
+ } else {
+ href = cfg.URLPrefix + "/"
}
- href := urlPrefix + "/?offset=" + strconv.Itoa(offsetPrev)
- links = append(links, fmt.Sprintf(
- `<link rel="prev" href="%s" title="newer">`, href,
- ))
- refs.WriteString(makeA(href, "[prev]"))
+ links = append(links, `<link rel="prev" href="`+href+`" title="newer">`)
+ refs.WriteString("\n" + makeA(href, "[prev]"))
}
- if !errOccured {
- href := urlPrefix + "/?offset=" + strconv.Itoa(offset+PageEntries)
- links = append(links, fmt.Sprintf(
- `<link rel="next" href="%s" title="older">`, href,
- ))
- refs.WriteString(makeA(href, "[next]"))
+ if !logEnded {
+ href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offset+PageEntries)
+ links = append(links, `<link rel="next" href="`+href+`" title="older">`)
+ refs.WriteString("\n" + makeA(href, "[next]"))
}
+
os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
out.Write([]byte(startHTML(
- fmt.Sprintf("%s (%d-%d)", blogTitle, offset, offset+PageEntries),
+ fmt.Sprintf("%s (%d-%d)", cfg.Title, offset, offset+PageEntries),
links,
)))
+ if cfg.AboutURL != "" {
+ out.Write([]byte(fmt.Sprintf("[%s]", makeA(cfg.AboutURL, "about"))))
+ }
out.Write(refs.Bytes())
out.Write(table.Bytes())
out.Write(refs.Bytes())
out.Write([]byte("\n"))
} else if pathInfo == "/"+AtomFeed {
- commit, err = repo.CommitObject(head.Hash())
+ commit, err := repo.CommitObject(head.Hash())
if err != nil {
makeErr(err)
}
- etagHash.Write(commit.Hash[:])
etagHash.Write([]byte("ATOM"))
+ etagHash.Write(commit.Hash[:])
checkETag(etagHash)
feed := atom.Feed{
- Title: blogTitle,
- ID: atomId,
+ Title: cfg.Title,
+ ID: cfg.AtomId,
Updated: atom.Time(commit.Author.When),
Link: []atom.Link{{
Rel: "self",
- Href: atomUrl,
+ Href: atomURL,
}},
- Author: &atom.Person{Name: atomAuthorName},
+ Author: &atom.Person{Name: cfg.AtomAuthor},
}
log, err := repo.Log(&git.LogOptions{From: head.Hash()})
if err != nil {
if err != nil {
break
}
+
+ feedIdRaw := new([16]byte)
+ copy(feedIdRaw[:], commit.Hash[:])
+ feedIdRaw[6] = (feedIdRaw[6] & 0x0F) | uint8(4<<4) // version 4
+ feedId := fmt.Sprintf(
+ "%x-%x-%x-%x-%x",
+ feedIdRaw[0:4],
+ feedIdRaw[4:6],
+ feedIdRaw[6:8],
+ feedIdRaw[8:10],
+ feedIdRaw[10:],
+ )
+
lines := msgSplit(commit.Message)
- feedId, err := uuid.FromBytes(commit.Hash[:16])
- if err != nil {
- panic(err)
- }
feed.Entry = append(feed.Entry, &atom.Entry{
Title: lines[0],
- ID: "urn:uuid:" + feedId.String(),
+ ID: "urn:uuid:" + feedId,
Link: []atom.Link{{
Rel: "alternate",
- Href: blogBaseURL + urlPrefix + "/" + commit.Hash.String(),
+ Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
}},
Published: atom.Time(commit.Author.When),
Updated: atom.Time(commit.Author.When),
os.Stdout.Write(outBuf.Bytes())
return
} else if sha1DigestRe.MatchString(pathInfo[1:]) {
- commit, err = repo.CommitObject(plumbing.NewHash(pathInfo[1:]))
+ commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1:]))
if err != nil {
makeErr(err)
}
+ for _, data := range etagHashForWeb {
+ etagHash.Write(data)
+ }
+ etagHash.Write([]byte("ENTRY"))
etagHash.Write(commit.Hash[:])
+ notesRaw := getNote(notesTree, commit.Hash)
+ etagHash.Write(notesRaw)
+ commentsRaw := getNote(commentsTree, commit.Hash)
+ etagHash.Write(commentsRaw)
checkETag(etagHash)
lines := msgSplit(commit.Message)
title := lines[0]
- when := commit.Author.When.Format(WhenFmt)
+ when := commit.Author.When.Format(sgblog.WhenFmt)
os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
- parent := commit.ParentHashes[0].String()
- out.Write([]byte(startHTML(fmt.Sprintf("%s (%s)", title, when), []string{
- fmt.Sprintf(`<link rel="prev" href="%s" title="older">`, "/"+parent),
- })))
+ links := []string{}
+ var parent string
+ if len(commit.ParentHashes) > 0 {
+ parent = commit.ParentHashes[0].String()
+ links = append(links, `<link rel="prev" href="`+cfg.URLPrefix+"/"+parent+`" title="older">`)
+ }
+ out.Write([]byte(startHTML(fmt.Sprintf("%s (%s)", title, when), links)))
+ if cfg.AboutURL != "" {
+ out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.AboutURL, "about"))))
+ }
+ out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.URLPrefix+"/", "index"))))
+ if parent != "" {
+ out.Write([]byte(fmt.Sprintf(
+ "[%s]\n",
+ makeA(cfg.URLPrefix+"/"+parent, "older"),
+ )))
+ }
out.Write([]byte(fmt.Sprintf(
- "[%s] [<tt>%s</tt>]\n<hr/>\n",
- makeA(urlPrefix+"/"+parent, "older"),
- when,
+ "[<tt><a title=\"When\">%s</a></tt>]\n"+
+ "[<tt><a title=\"Hash\">%s</a></tt>]\n"+
+ "<hr/>\n<h2>%s</h2>\n<pre>\n",
+ when, commit.Hash.String(), title,
)))
- out.Write([]byte(fmt.Sprintf("<h2>%s</h2>\n<pre>\n", title)))
for _, line := range lines[2:] {
- line = strings.ReplaceAll(line, "&", "&")
- line = strings.ReplaceAll(line, "<", "<")
- line = strings.ReplaceAll(line, ">", ">")
- cols := strings.Split(line, " ")
- for i, col := range cols {
- if u := urlParse(col); u != nil {
- cols[i] = makeA(col, col)
- continue
- }
- cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(urlPrefix+"/$1", "$1"))
- }
- line = strings.Join(cols, " ")
- out.Write([]byte(line + "\n"))
+ out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
+ }
+ out.Write([]byte("</pre>\n<hr/>\n"))
+ if len(notesRaw) > 0 {
+ out.Write([]byte("Note:<pre>\n" + string(notesRaw) + "\n</pre>\n<hr/>\n"))
}
- out.Write([]byte("</pre>\n"))
- if note := getNote(commit.Hash); note != "" {
- out.Write([]byte(fmt.Sprintf("Note:\n<pre>\n%s</pre>\n", note)))
+ if cfg.CommentsEmail != "" {
+ out.Write([]byte("[" + makeA(
+ "mailto:"+cfg.CommentsEmail+"?subject="+commit.Hash.String(),
+ "write comment",
+ ) + "]\n"))
}
+ out.Write([]byte("<dl>\n"))
+ for i, comment := range parseComments(commentsRaw) {
+ out.Write([]byte(fmt.Sprintf(
+ "<dt><a name=\"comment%d\"><a href=\"#comment%d\">comment %d</a>:"+
+ "</dt>\n<dd><pre>\n",
+ i, i, i,
+ )))
+ lines = strings.Split(comment, "\n")
+ for _, line := range lines[:3] {
+ out.Write([]byte(line + "\n"))
+ }
+ for _, line := range lines[3:] {
+ out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
+ }
+ out.Write([]byte("</pre></dd>\n"))
+ }
+ out.Write([]byte("</dl>\n"))
} else {
makeErr(errors.New("unknown URL action"))
}
- if aboutUrl != "" {
- out.Write([]byte(fmt.Sprintf(
- "<hr/>%s %s\n",
- makeA(aboutUrl, "About"),
- blogTitle,
- )))
- }
out.Write([]byte("</body></html>\n"))
if gzipWriter != nil {
gzipWriter.Close()