]> Sergey Matveev's repositories - godlighty.git/blobdiff - handler.go
Use mtime instead of ctime
[godlighty.git] / handler.go
index 6fa1d35bbeb0e1a6290900f55ecaaf9a8b91f998..10b0d2ff83f0b7b557be80494eea1389e8d0e31b 100644 (file)
@@ -1,28 +1,27 @@
-/*
-godlighty -- highly-customizable HTTP, HTTP/2, HTTPS server
-Copyright (C) 2021 Sergey Matveev <stargrave@stargrave.org>
-
-This program is free software: you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation, version 3 of the License.
-
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with this program.  If not, see <http://www.gnu.org/licenses/>.
-*/
+// godlighty -- highly-customizable HTTP, HTTP/2, HTTPS server
+// Copyright (C) 2021-2024 Sergey Matveev <stargrave@stargrave.org>
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, version 3 of the License.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License
+// along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 package godlighty
 
 import (
        "bytes"
        "compress/gzip"
+       "encoding/base64"
        "errors"
        "fmt"
-       "io/ioutil"
+       "io"
        "log"
        "net"
        "net/http"
@@ -35,23 +34,23 @@ import (
        "time"
 
        "github.com/klauspost/compress/zstd"
+       "go.stargrave.org/godlighty/meta4"
        "golang.org/x/net/webdav"
 )
 
 const (
-       Index    = "index.html"
-       Readme   = "README"
-       Meta4Ext = ".meta4"
+       Index  = "index.html"
+       Readme = "README"
 )
 
 var (
        gzPool = sync.Pool{
-               New: func() interface{} { return gzip.NewWriter(ioutil.Discard) },
+               New: func() interface{} { return gzip.NewWriter(io.Discard) },
        }
        zstdPool = sync.Pool{
                New: func() interface{} {
                        w, err := zstd.NewWriter(
-                               ioutil.Discard,
+                               io.Discard,
                                zstd.WithEncoderLevel(zstd.SpeedDefault),
                        )
                        if err != nil {
@@ -85,6 +84,7 @@ func (h Handler) Handle(
                )
                http.NotFound(w, r)
        }
+       w.Header().Set("Server", Version)
        if cfg == nil {
                notFound()
                return
@@ -117,7 +117,9 @@ func (h Handler) Handle(
                return
        }
 
-       if cfg.TLS != nil && len(cfg.TLS.ClientCAs) > 0 {
+       if (cfg.ECDSATLS != nil && len(cfg.ECDSATLS.ClientCAs) > 0) ||
+               (cfg.EdDSATLS != nil && len(cfg.EdDSATLS.ClientCAs) > 0) ||
+               (cfg.GOSTTLS != nil && len(cfg.GOSTTLS.ClientCAs) > 0) {
                if r.TLS == nil {
                        err = errors.New("TLS client authentication required")
                        printErr(http.StatusForbidden, err)
@@ -139,7 +141,15 @@ func (h Handler) Handle(
                return
        }
 
-       if cfg.WebDAV && (r.Method == http.MethodHead ||
+       pthOrig := path.Clean(path.Join(cfg.Root, r.URL.Path))
+       pth := pthOrig
+       fi, err := os.Stat(pth)
+       if err != nil {
+               notFound()
+               return
+       }
+
+       if cfg.WebDAV && (((r.Method == http.MethodHead) && fi.IsDir()) ||
                r.Method == http.MethodOptions ||
                r.Method == "PROPFIND") {
                dav := webdav.Handler{
@@ -156,7 +166,7 @@ func (h Handler) Handle(
                return
        }
 
-       if !(r.Method == "" || r.Method == http.MethodGet) {
+       if !(r.Method == "" || r.Method == http.MethodGet || r.Method == http.MethodHead) {
                fmt.Printf("%s %s \"%s %+q %s\" %d %s\"%s\"\n",
                        r.RemoteAddr, host, r.Method, PathWithQuery(r.URL), r.Proto,
                        http.StatusMethodNotAllowed,
@@ -165,17 +175,11 @@ func (h Handler) Handle(
                http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
                return
        }
+
        var fd *os.File
        var contentType string
        var etag string
-       pthOrig := path.Clean(path.Join(cfg.Root, r.URL.Path))
-       pth := pthOrig
-IndexLookup:
-       fi, err := os.Stat(pth)
-       if err != nil {
-               notFound()
-               return
-       }
+IndexLookuped:
        if fi.IsDir() {
                if cfg.DirList {
                        entries, err := os.ReadDir(pth)
@@ -190,7 +194,7 @@ IndexLookup:
                                http.Error(w, "internal error", http.StatusInternalServerError)
                                return
                        }
-                       etag, err = ctimeETag(fd)
+                       etag, err = mtimeETag(fd)
                        fd.Close()
                        if err != nil {
                                printErr(http.StatusInternalServerError, err)
@@ -199,12 +203,12 @@ IndexLookup:
                        }
                        var readme []byte
                        for _, f := range append(cfg.Readmes, Readme) {
-                               readme, _ = ioutil.ReadFile(path.Join(pth, f))
+                               readme, _ = os.ReadFile(path.Join(pth, f))
                                if readme != nil {
                                        break
                                }
                        }
-                       fd, err = dirList(cfg, r.URL.Path, entries, string(readme))
+                       fd, err = dirList(cfg, r.URL.Path, pth, entries, string(readme))
                        if err != nil {
                                printErr(http.StatusInternalServerError, err)
                                http.Error(w, "internal error", http.StatusInternalServerError)
@@ -212,11 +216,16 @@ IndexLookup:
                        }
                        contentType = "text/html; charset=utf-8"
                } else {
-                       for _, index := range append(cfg.Indexes, Index) {
+                       for _, index := range append(cfg.Indices, Index) {
                                p := path.Join(pth, index)
                                if _, err := os.Stat(p); err == nil {
                                        pth = p
-                                       goto IndexLookup
+                                       fi, err = os.Stat(pth)
+                                       if err != nil {
+                                               notFound()
+                                               return
+                                       }
+                                       goto IndexLookuped
                                }
                        }
                        notFound()
@@ -231,7 +240,7 @@ IndexLookup:
                        http.Error(w, "internal error", http.StatusInternalServerError)
                        return
                }
-               etag, err = ctimeETag(fd)
+               etag, err = mtimeETag(fd)
                if err != nil {
                        printErr(http.StatusInternalServerError, err)
                        http.Error(w, "internal error", http.StatusInternalServerError)
@@ -240,9 +249,33 @@ IndexLookup:
        }
        defer fd.Close()
 
-       if _, err = os.Stat(pth + Meta4Ext); err == nil {
-               w.Header().Set("Link", "<"+path.Base(pth)+Meta4Ext+`>; rel=describedby; type="application/metalink4+xml"`)
+       if meta4fi, err := os.Stat(pth + meta4.Ext); err == nil {
+               if meta4fi.Size() > meta4.MaxSize {
+                       goto SkipMeta4
+               }
+               meta4Raw, err := os.ReadFile(pth + meta4.Ext)
+               if err != nil {
+                       goto SkipMeta4
+               }
+               base := path.Base(pth)
+               forHTTP, err := meta4.Parse(base, meta4Raw)
+               if err != nil {
+                       goto SkipMeta4
+               }
+               w.Header().Add("Link", "<"+base+meta4.Ext+
+                       `>; rel=describedby; type="application/metalink4+xml"`,
+               )
+               for _, u := range forHTTP.URLs {
+                       w.Header().Add("Link", "<"+u+">; rel=duplicate")
+               }
+               for name, digest := range forHTTP.Hashes {
+                       w.Header().Add("Digest", name+"="+base64.StdEncoding.EncodeToString(digest))
+               }
+               for _, u := range forHTTP.Torrents {
+                       w.Header().Add("Link", "<"+u+`>; rel=describedby; type="application/x-bittorrent"`)
+               }
        }
+SkipMeta4:
 
        if contentType == "" {
                contentType = mediaType(path.Base(pth), cfg.MIMEs)
@@ -250,7 +283,6 @@ IndexLookup:
        contentTypeBase := strings.SplitN(contentType, ";", 2)[0]
        w.Header().Set("Content-Type", contentType)
 
-       w.Header().Set("Server", Version)
        if etag != "" {
                w.Header().Set("ETag", etag)
        }