responseHook: always include the announcing peer

Fixes #231
Fixes #232
This commit is contained in:
Leo Balduf 2016-09-28 03:02:37 -04:00
parent 71232b3fad
commit 45a5e632ae
2 changed files with 20 additions and 2 deletions

View file

@ -102,9 +102,9 @@ func (h *responseHook) HandleAnnounce(ctx context.Context, req *bittorrent.Annou
switch len(req.IP) {
case net.IPv4len:
resp.IPv4Peers, err = h.store.AnnouncePeers(req.InfoHash, req.Left == 0, int(req.NumWant), req.Peer)
resp.IPv4Peers, err = h.getPeers(req.InfoHash, req.Left == 0, int(req.NumWant), req.Peer)
case net.IPv6len:
resp.IPv6Peers, err = h.store.AnnouncePeers(req.InfoHash, req.Left == 0, int(req.NumWant), req.Peer)
resp.IPv6Peers, err = h.getPeers(req.InfoHash, req.Left == 0, int(req.NumWant), req.Peer)
default:
return ctx, ErrInvalidIP
}
@ -112,6 +112,22 @@ func (h *responseHook) HandleAnnounce(ctx context.Context, req *bittorrent.Annou
return ctx, err
}
func (h *responseHook) getPeers(ih bittorrent.InfoHash, seeder bool, numWant int, p bittorrent.Peer) ([]bittorrent.Peer, error) {
peers, err := h.store.AnnouncePeers(ih, seeder, numWant, p)
if err != nil && err != storage.ErrResourceDoesNotExist {
return nil, err
}
// Insert announcing peer.
// Some clients expect at least themselves in every announce response.
if len(peers) < numWant || len(peers) == 0 {
return append(peers, p), nil
}
peers[len(peers)-1] = p
return peers, nil
}
func (h *responseHook) HandleScrape(ctx context.Context, req *bittorrent.ScrapeRequest, resp *bittorrent.ScrapeResponse) (context.Context, error) {
if ctx.Value(SkipResponseHookKey) != nil {
return ctx, nil

View file

@ -51,6 +51,8 @@ type PeerStore interface {
// - if seeder is true, should ideally return more leechers than seeders
// - if seeder is false, should ideally return more seeders than
// leechers
//
// Returns ErrResourceDoesNotExist if the provided infoHash is not tracked.
AnnouncePeers(infoHash bittorrent.InfoHash, seeder bool, numWant int, p bittorrent.Peer) (peers []bittorrent.Peer, err error)
// ScrapeSwarm returns information required to answer a scrape request