From: Matt Joiner Date: Thu, 31 Jul 2025 01:54:06 +0000 (+1000) Subject: Rejigger the webseed debug logging X-Git-Tag: v1.59.0~2^2~80 X-Git-Url: http://www.git.stargrave.org/?a=commitdiff_plain;h=8f5054baf458902946cb763b12710c969667c34d;p=btrtrc.git Rejigger the webseed debug logging --- diff --git a/webseed-peer.go b/webseed-peer.go index d32aab93..6026d9c7 100644 --- a/webseed-peer.go +++ b/webseed-peer.go @@ -25,6 +25,7 @@ import ( type webseedPeer struct { // First field for stats alignment. peer Peer + logger *slog.Logger client webseed.Client activeRequests map[*webseedRequest]struct{} locker sync.Locker @@ -40,7 +41,7 @@ func (me *webseedPeer) cancelAllRequests() { // Is there any point to this? Won't we fail to receive a chunk and cancel anyway? Should we // Close requests instead? for req := range me.activeRequests { - req.Cancel() + req.Cancel("all requests cancelled") } } @@ -131,9 +132,10 @@ func (ws *webseedPeer) intoSpec(begin, end RequestIndex) webseed.RequestSpec { return webseed.RequestSpec{start, endOff - start} } -func (ws *webseedPeer) spawnRequest(begin, end RequestIndex) { - extWsReq := ws.client.StartNewRequest(ws.intoSpec(begin, end)) +func (ws *webseedPeer) spawnRequest(begin, end RequestIndex, logger *slog.Logger) { + extWsReq := ws.client.StartNewRequest(ws.intoSpec(begin, end), logger) wsReq := webseedRequest{ + logger: logger, request: extWsReq, begin: begin, next: begin, @@ -141,7 +143,7 @@ func (ws *webseedPeer) spawnRequest(begin, end RequestIndex) { } if ws.hasOverlappingRequests(begin, end) { if webseed.PrintDebug { - fmt.Printf("webseedPeer.spawnRequest: overlapping request for %v[%v-%v)\n", ws.peer.t.name(), begin, end) + logger.Warn("webseedPeer.spawnRequest: request overlaps existing") } ws.peer.t.cl.dumpCurrentWebseedRequests() } @@ -304,7 +306,7 @@ func (ws *webseedPeer) readChunks(wr *webseedRequest) (err error) { stop := err != nil || wr.next >= wr.end if !stop { if !ws.keepReading(wr) { - wr.Cancel() + wr.Cancel("finished or discarded") } } ws.peer.locker().Unlock() diff --git a/webseed-request.go b/webseed-request.go index 62e2f1ea..c0db9dbe 100644 --- a/webseed-request.go +++ b/webseed-request.go @@ -2,6 +2,7 @@ package torrent import ( "fmt" + "log/slog" "sync/atomic" "github.com/anacrolix/torrent/webseed" @@ -11,6 +12,7 @@ import ( type webseedRequest struct { // Fingers out. request webseed.Request + logger *slog.Logger // First assigned in the range. begin RequestIndex // The next to be read. @@ -25,11 +27,11 @@ func (me *webseedRequest) Close() { } // Record that it was exceptionally cancelled. -func (me *webseedRequest) Cancel() { +func (me *webseedRequest) Cancel(reason string) { me.request.Cancel() if !me.cancelled.Swap(true) { if webseed.PrintDebug { - fmt.Printf("cancelled webseed request\n") + me.logger.Debug("cancelled", "reason", reason) } } } diff --git a/webseed-requesting.go b/webseed-requesting.go index 48d7eea4..ee1f9396 100644 --- a/webseed-requesting.go +++ b/webseed-requesting.go @@ -4,6 +4,7 @@ import ( "cmp" "fmt" "iter" + "log/slog" "maps" "os" "strconv" @@ -14,6 +15,7 @@ import ( g "github.com/anacrolix/generics" "github.com/anacrolix/generics/heap" "github.com/anacrolix/missinggo/v2/panicif" + pp "github.com/anacrolix/torrent/peer_protocol" "github.com/anacrolix/torrent/internal/request-strategy" "github.com/anacrolix/torrent/metainfo" @@ -156,12 +158,8 @@ func (cl *Client) updateWebseedRequests() { } // Cancel any existing requests that are no longer wanted. - for key, value := range unwantedExistingRequests { - if webseed.PrintDebug { - fmt.Printf("cancelling deprioritized existing webseed request %v\n", key) - } - key.t.slogger().Debug("cancelling deprioritized existing webseed request", "webseedUrl", key.url, "fileIndex", key.fileIndex) - value.existingWebseedRequest.Cancel() + for _, value := range unwantedExistingRequests { + value.existingWebseedRequest.Cancel("deprioritized") } printPlan := sync.OnceFunc(func() { @@ -197,16 +195,25 @@ func (cl *Client) updateWebseedRequests() { } last++ } + debugLogger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{ + Level: slog.LevelDebug, + AddSource: true, + })).With( + "webseedUrl", requestKey.url, + "fileIndex", requestKey.fileIndex) // Request shouldn't exist if this occurs. panicif.LessThan(last, begin) // Hello C++ my old friend. end := last + 1 if webseed.PrintDebug && end != fileEnd { - fmt.Printf("shortened webseed request for %v: [%v-%v) to [%v-%v)\n", - requestKey.filePath(), begin, fileEnd, begin, end) + debugLogger.Debug( + "shortened webseed request", + "first-file", requestKey.filePath(), + "from", endExclusiveString(begin, fileEnd), + "to", endExclusiveString(begin, end)) } panicif.GreaterThan(end, fileEnd) - peer.spawnRequest(begin, end) + peer.spawnRequest(begin, end, debugLogger) } } } @@ -390,3 +397,15 @@ func (cl *Client) updateWebseedRequestsAndResetTimer() { panicif.True(cl.webseedRequestTimer.Reset(webseedRequestUpdateTimerInterval)) } + +type endExclusive[T any] struct { + start, end T +} + +func (me endExclusive[T]) String() string { + return fmt.Sprintf("[%v-%v)", me.start, me.end) +} + +func endExclusiveString[T any](start, end T) string { + return endExclusive[T]{start, end}.String() +} diff --git a/webseed/client.go b/webseed/client.go index babbe0f5..50d5efe5 100644 --- a/webseed/client.go +++ b/webseed/client.go @@ -104,7 +104,7 @@ func (ws *Client) UrlForFileIndex(fileIndex int) string { return urlForFileIndex(ws.Url, fileIndex, ws.info, ws.PathEscaper) } -func (ws *Client) StartNewRequest(r RequestSpec) Request { +func (ws *Client) StartNewRequest(r RequestSpec, debugLogger *slog.Logger) Request { ctx, cancel := context.WithCancel(context.TODO()) var requestParts []requestPart if !ws.fileIndex.Locate(r, func(i int, e segments.Extent) bool { @@ -123,12 +123,11 @@ func (ws *Client) StartNewRequest(r RequestSpec) Request { } part.do = func() (*http.Response, error) { if PrintDebug { - fmt.Printf( - "doing request for %q (file size %v), Range: %q\n", - req.URL, - humanize.Bytes(uint64(ws.fileIndex.Index(i).Length)), - req.Header.Get("Range"), - ) + debugLogger.Debug( + "doing request for part", + "url", req.URL, + "file size", humanize.Bytes(uint64(e.Length)), + "range", req.Header.Get("Range")) } return ws.HttpClient.Do(req) }