Refactor more, add scrape tests

This commit is contained in:
Justin Li 2014-07-15 21:07:33 -04:00
parent bec70e9759
commit 586b6852de
4 changed files with 152 additions and 41 deletions

View file

@ -5,24 +5,14 @@
package http
import (
"io/ioutil"
"net/http"
"net/http/httptest"
"net/url"
"reflect"
"testing"
"github.com/chihaya/bencode"
"github.com/chihaya/chihaya/config"
_ "github.com/chihaya/chihaya/drivers/backend/noop"
_ "github.com/chihaya/chihaya/drivers/tracker/memory"
)
type params map[string]string
const infoHash = "%89%d4%bcR%11%16%ca%1dB%a2%f3%0d%1f%27M%94%e4h%1d%af"
func TestPublicAnnounce(t *testing.T) {
srv, _ := setupTracker(&config.DefaultConfig)
defer srv.Close()
@ -30,12 +20,12 @@ func TestPublicAnnounce(t *testing.T) {
// Add one seeder.
peer := makePeerParams("peer1", true)
expected := makeResponse(1, 0, bencode.List{})
checkResponse(peer, expected, srv, t)
checkAnnounce(peer, expected, srv, t)
// Add another seeder.
peer = makePeerParams("peer2", true)
expected = makeResponse(2, 0, bencode.List{})
checkResponse(peer, expected, srv, t)
checkAnnounce(peer, expected, srv, t)
// Add a leecher.
peer = makePeerParams("peer3", false)
@ -43,20 +33,20 @@ func TestPublicAnnounce(t *testing.T) {
makePeerResponse("peer1"),
makePeerResponse("peer2"),
})
checkResponse(peer, expected, srv, t)
checkAnnounce(peer, expected, srv, t)
// Remove seeder.
peer = makePeerParams("peer1", true)
peer["event"] = "stopped"
expected = makeResponse(1, 1, nil)
checkResponse(peer, expected, srv, t)
checkAnnounce(peer, expected, srv, t)
// Check seeders.
peer = makePeerParams("peer3", false)
expected = makeResponse(1, 1, bencode.List{
makePeerResponse("peer2"),
})
checkResponse(peer, expected, srv, t)
checkAnnounce(peer, expected, srv, t)
}
func makePeerParams(id string, seed bool) params {
@ -99,21 +89,8 @@ func makeResponse(seeders, leechers int64, peers bencode.List) bencode.Dict {
return dict
}
func checkResponse(p params, expected interface{}, srv *httptest.Server, t *testing.T) bool {
values := &url.Values{}
for k, v := range p {
values.Add(k, v)
}
response, err := http.Get(srv.URL + "/announce?" + values.Encode())
if err != nil {
t.Error(err)
return false
}
body, err := ioutil.ReadAll(response.Body)
response.Body.Close()
func checkAnnounce(p params, expected interface{}, srv *httptest.Server, t *testing.T) bool {
body, err := announce(p, srv)
if err != nil {
t.Error(err)
return false
@ -126,12 +103,3 @@ func checkResponse(p params, expected interface{}, srv *httptest.Server, t *test
}
return true
}
func setupTracker(cfg *config.Config) (*httptest.Server, error) {
tkr, err := NewTracker(cfg)
if err != nil {
return nil, err
}
return httptest.NewServer(setupRoutes(tkr, cfg)), nil
}

45
http/http_test.go Normal file
View file

@ -0,0 +1,45 @@
// Copyright 2014 The Chihaya Authors. All rights reserved.
// Use of this source code is governed by the BSD 2-Clause license,
// which can be found in the LICENSE file.
package http
import (
"io/ioutil"
"net/http"
"net/http/httptest"
"net/url"
"github.com/chihaya/chihaya/config"
_ "github.com/chihaya/chihaya/drivers/backend/noop"
_ "github.com/chihaya/chihaya/drivers/tracker/memory"
)
type params map[string]string
const infoHash = "%89%d4%bcR%11%16%ca%1dB%a2%f3%0d%1f%27M%94%e4h%1d%af"
func setupTracker(cfg *config.Config) (*httptest.Server, error) {
tkr, err := NewTracker(cfg)
if err != nil {
return nil, err
}
return httptest.NewServer(setupRoutes(tkr, cfg)), nil
}
func announce(p params, srv *httptest.Server) ([]byte, error) {
values := &url.Values{}
for k, v := range p {
values.Add(k, v)
}
response, err := http.Get(srv.URL + "/announce?" + values.Encode())
if err != nil {
return nil, err
}
body, err := ioutil.ReadAll(response.Body)
response.Body.Close()
return body, err
}

98
http/scrape_test.go Normal file
View file

@ -0,0 +1,98 @@
// Copyright 2014 The Chihaya Authors. All rights reserved.
// Use of this source code is governed by the BSD 2-Clause license,
// which can be found in the LICENSE file.
package http
import (
"io/ioutil"
"net/http"
"net/http/httptest"
"net/url"
"reflect"
"testing"
"github.com/chihaya/bencode"
"github.com/chihaya/chihaya/config"
)
func TestPublicScrape(t *testing.T) {
srv, _ := setupTracker(&config.DefaultConfig)
defer srv.Close()
scrapeParams := params{
"info_hash": infoHash,
}
// Add one seeder.
peer := makePeerParams("peer1", true)
announce(peer, srv)
checkScrape(scrapeParams, makeScrapeResponse(1, 0, 0), srv, t)
// Add another seeder.
peer = makePeerParams("peer2", true)
announce(peer, srv)
checkScrape(scrapeParams, makeScrapeResponse(2, 0, 0), srv, t)
// Add a leecher.
peer = makePeerParams("peer3", false)
announce(peer, srv)
checkScrape(scrapeParams, makeScrapeResponse(2, 1, 0), srv, t)
// Remove seeder.
peer = makePeerParams("peer1", true)
peer["event"] = "stopped"
announce(peer, srv)
checkScrape(scrapeParams, makeScrapeResponse(1, 1, 0), srv, t)
// Complete torrent.
peer = makePeerParams("peer3", true)
peer["event"] = "complete"
announce(peer, srv)
checkScrape(scrapeParams, makeScrapeResponse(2, 0, 0), srv, t)
}
func makeScrapeResponse(seeders, leechers, downloaded int64) bencode.Dict {
return bencode.Dict{
"files": bencode.Dict{
infoHash: bencode.Dict{
"complete": seeders,
"incomplete": leechers,
"downloaded": downloaded,
},
},
}
}
func checkScrape(p params, expected interface{}, srv *httptest.Server, t *testing.T) bool {
values := &url.Values{}
for k, v := range p {
values.Add(k, v)
}
response, err := http.Get(srv.URL + "/scrape?" + values.Encode())
if err != nil {
t.Error(err)
return false
}
body, err := ioutil.ReadAll(response.Body)
response.Body.Close()
if err != nil {
t.Error(err)
return false
}
got, err := bencode.Unmarshal(body)
if !reflect.DeepEqual(got, expected) {
t.Errorf("\ngot: %#v\nwanted: %#v", got, expected)
return false
}
return true
}

View file

@ -281,11 +281,11 @@ func NewScrape(cfg *config.Config, r *http.Request, p httprouter.Params) (*Scrap
}
if q.Infohashes == nil {
if _, exists := q.Params["infohash"]; !exists {
if _, exists := q.Params["info_hash"]; !exists {
// There aren't any infohashes.
return nil, ErrMalformedRequest
}
q.Infohashes = []string{q.Params["infohash"]}
q.Infohashes = []string{q.Params["info_hash"]}
}
return &Scrape{