}
func (p *Piece) fileExtents(offsetIntoPiece int64) iter.Seq2[int, segments.Extent] {
- return p.t.info.FileSegmentsIndex().LocateIter(segments.Extent{
+ return p.t.fileSegmentsIndex.Unwrap().LocateIter(segments.Extent{
p.torrentBeginOffset() + offsetIntoPiece,
int64(p.length()) - offsetIntoPiece,
})
"github.com/anacrolix/missinggo/v2/pubsub"
"github.com/anacrolix/multiless"
"github.com/anacrolix/sync"
+ "github.com/anacrolix/torrent/segments"
"github.com/pion/webrtc/v4"
"golang.org/x/sync/errgroup"
"golang.org/x/time/rate"
// routines. Cancelled when the Torrent is Closed too.
getInfoCtx context.Context
// Put a nice reason in :)
- getInfoCtxCancel context.CancelCauseFunc
- files *[]*File
+ getInfoCtxCancel context.CancelCauseFunc
+ files *[]*File
+ fileSegmentsIndex g.Option[segments.Index]
_chunksPerRegularPiece chunkIndexType
t.metadataCompletedChunks[i] = false
}
t.nameMu.Lock()
- t.info = nil
+ // Why the fuck would info be set?
+ panicif.NotNil(t.info)
t.nameMu.Unlock()
}
}
t.nameMu.Lock()
t.info = info
+ panicif.True(t.fileSegmentsIndex.Set(info.FileSegmentsIndex()).Ok)
t.getInfoCtxCancel(errors.New("got info"))
t.nameMu.Unlock()
t._chunksPerRegularPiece = chunkIndexType(intCeilDiv(pp.Integer(t.usualPieceSize()), t.chunkSize))
}
func (ws *webseedPeer) onGotInfo(info *metainfo.Info) {
- ws.client.SetInfo(info)
+ ws.client.SetInfo(info, ws.peer.t.fileSegmentsIndex.UnwrapPtr())
// There should be probably be a callback in Client instead, so it can remove pieces at its whim
// too.
ws.client.Pieces.Iterate(func(x uint32) bool {
// Max concurrent requests to a WebSeed for a given torrent.
MaxRequests int
- // TODO: Share this with Torrent.
- fileIndex segments.Index
+ fileIndex *segments.Index
info *metainfo.Info
// The pieces we can request with the Url. We're more likely to ban/block at the file-level
// given that's how requests are mapped to webseeds, but the torrent.Client works at the piece
type ResponseBodyWrapper func(io.Reader) io.Reader
-func (me *Client) SetInfo(info *metainfo.Info) {
+func (me *Client) SetInfo(info *metainfo.Info, fileIndex *segments.Index) {
if !strings.HasSuffix(me.Url, "/") && info.IsDir() {
// In my experience, this is a non-conforming webseed. For example the
// http://ia600500.us.archive.org/1/items URLs in archive.org torrents.
return
}
- me.fileIndex = info.FileSegmentsIndex()
+ me.fileIndex = fileIndex
me.info = info
me.Pieces.AddRange(0, uint64(info.NumPieces()))
}