From 75b4a20e56a2a8335e335059022fdfcd4a3fa698 Mon Sep 17 00:00:00 2001 From: Justin Li Date: Mon, 15 Feb 2016 19:49:25 -0500 Subject: [PATCH] Bring in more old behaviour, use types for peer_id and infohash --- AUTHORS | 5 + LICENSE | 24 +++++ chihaya.go | 32 +++++- cmd/chihaya/main.go | 2 + config/config.go | 4 +- config/example.yaml | 63 ++++++----- errors/errors.go | 41 +++++++ pkg/event/event.go | 12 +-- pkg/event/event_test.go | 2 +- server/http/config.go | 11 +- server/http/query/query.go | 126 ++-------------------- server/http/query/query_test.go | 100 +++++++++++++++++ server/http/request.go | 171 ++++++++++++++++++++++++++++++ server/http/server.go | 33 ++---- server/http/writer.go | 20 +++- server/http/writer_test.go | 18 +++- server/store/memory/peer_store.go | 92 ++++++++-------- server/store/peer_store.go | 12 +-- tracker/middleware.go | 8 +- tracker/middleware_test.go | 8 +- tracker/tracker.go | 4 +- 21 files changed, 529 insertions(+), 259 deletions(-) create mode 100644 AUTHORS create mode 100644 LICENSE create mode 100644 errors/errors.go create mode 100644 server/http/query/query_test.go create mode 100644 server/http/request.go diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 0000000..b62effb --- /dev/null +++ b/AUTHORS @@ -0,0 +1,5 @@ +# This is the official list of Chihaya authors for copyright purposes, in alphabetical order. + +Jimmy Zelinskie +Justin Li + diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..db07654 --- /dev/null +++ b/LICENSE @@ -0,0 +1,24 @@ +Chihaya is released under a BSD 2-Clause license, reproduced below. + +Copyright (c) 2015, The Chihaya Authors +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/chihaya.go b/chihaya.go index 4f23bae..20ca691 100644 --- a/chihaya.go +++ b/chihaya.go @@ -7,10 +7,28 @@ package chihaya import ( "net" "time" + + "github.com/chihaya/chihaya/pkg/event" ) +type PeerID string +type InfoHash string + // AnnounceRequest represents the parsed parameters from an announce request. -type AnnounceRequest map[string]interface{} +type AnnounceRequest struct { + Event event.Event + InfoHash InfoHash + PeerID PeerID + IP string + Port uint16 + + Compact bool + NumWant uint64 + + Left, Downloaded, Uploaded uint64 + + Params Params +} // AnnounceResponse represents the parameters used to create an announce // response. @@ -25,7 +43,10 @@ type AnnounceResponse struct { } // ScrapeRequest represents the parsed parameters from a scrape request. -type ScrapeRequest map[string]interface{} +type ScrapeRequest struct { + InfoHashes []InfoHash + Params Params +} // ScrapeResponse represents the parameters used to create a scrape response. type ScrapeResponse struct { @@ -41,7 +62,12 @@ type Scrape struct { // Peer represents the connection details of a peer that is returned in an // announce response. type Peer struct { - ID string + ID PeerID IP net.IP Port uint16 } + +// Params is used to fetch request parameters. +type Params interface { + String(key string) (string, error) +} diff --git a/cmd/chihaya/main.go b/cmd/chihaya/main.go index b611e94..6864341 100644 --- a/cmd/chihaya/main.go +++ b/cmd/chihaya/main.go @@ -14,6 +14,8 @@ import ( "github.com/chihaya/chihaya/config" "github.com/chihaya/chihaya/server" "github.com/chihaya/chihaya/tracker" + + _ "github.com/chihaya/chihaya/server/http" ) var configPath string diff --git a/config/config.go b/config/config.go index e7487fd..9e91fbb 100644 --- a/config/config.go +++ b/config/config.go @@ -17,8 +17,8 @@ import ( // DefaultConfig is a sane configuration used as a fallback or for testing. var DefaultConfig = Config{ Tracker: TrackerConfig{ - AnnounceInterval: 10 * time.Minute, - MinAnnounceInterval: 5 * time.Minute, + AnnounceInterval: 30 * time.Minute, + MinAnnounceInterval: 20 * time.Minute, AnnounceMiddleware: []string{}, ScrapeMiddleware: []string{}, }, diff --git a/config/example.yaml b/config/example.yaml index 1aedece..88815aa 100644 --- a/config/example.yaml +++ b/config/example.yaml @@ -2,38 +2,37 @@ # Use of this source code is governed by the BSD 2-Clause license, # which can be found in the LICENSE file. -chihaya: - tracker: - announce: "10m" - minAnnounce: "5m" - announceMiddleware: - - "prometheus" - - "storeClientValidation" - - "storeCreateOnAnnounce" - scrapeMiddleware: - - "prometheus" - - "storeClientValidation" +tracker: + announce: 10m + minAnnounce: 5m + announceMiddleware: + - prometheus + - storeClientValidation + - storeCreateOnAnnounce + scrapeMiddleware: + - prometheus + - storeClientValidation - servers: - - name: "store" - config: - addr: "localhost:6880" - requestTimeout: "10s" - readTimeout: "10s" - writeTimeout: "10s" - clientStore: "memory" - peerStore: "memory" - peerStoreConfig: - gcAfter: "30m" - shards: 1 +servers: + - name: store + config: + addr: localhost:6880 + requestTimeout: 10s + readTimeout: 10s + writeTimeout: 10s + clientStore: memory + peerStore: memory + peerStoreConfig: + gcAfter: 30m + shards: 1 - - name: "http" - config: - addr: "localhost:6881" - requestTimeout: "10s" - readTimeout: "10s" - writeTimeout: "10s" + - name: http + config: + addr: localhost:6881 + requestTimeout: 10s + readTimeout: 10s + writeTimeout: 10s - - name: "udp" - config: - addr: "localhost:6882" + - name: udp + config: + addr: localhost:6882 diff --git a/errors/errors.go b/errors/errors.go new file mode 100644 index 0000000..2a915f4 --- /dev/null +++ b/errors/errors.go @@ -0,0 +1,41 @@ +// Copyright 2016 The Chihaya Authors. All rights reserved. +// Use of this source code is governed by the BSD 2-Clause license, +// which can be found in the LICENSE file. + +package errors + +import "net/http" + +type Error struct { + message string + public bool + status int +} + +func (e *Error) Error() string { + return e.message +} + +func (e *Error) Public() bool { + return e.public +} + +func (e *Error) Status() int { + return e.status +} + +func NewBadRequest(msg string) error { + return &Error{ + message: msg, + public: true, + status: http.StatusBadRequest, + } +} + +func NewMessage(msg string) error { + return &Error{ + message: msg, + public: true, + status: http.StatusOK, + } +} diff --git a/pkg/event/event.go b/pkg/event/event.go index bfc8023..cb88c4a 100644 --- a/pkg/event/event.go +++ b/pkg/event/event.go @@ -15,12 +15,12 @@ import ( var ErrUnknownEvent = errors.New("unknown event") // Event represents an event done by a BitTorrent client. -type event uint8 +type Event uint8 const ( // None is the event when a BitTorrent client announces due to time lapsed // since the previous announce. - None event = iota + None Event = iota // Started is the event sent by a BitTorrent client when it joins a swarm. Started @@ -34,8 +34,8 @@ const ( ) var ( - eventToString = make(map[event]string) - stringToEvent = make(map[string]event) + eventToString = make(map[Event]string) + stringToEvent = make(map[string]Event) ) func init() { @@ -50,7 +50,7 @@ func init() { } // New returns the proper Event given a string. -func New(eventStr string) (event, error) { +func New(eventStr string) (Event, error) { if e, ok := stringToEvent[strings.ToLower(eventStr)]; ok { return e, nil } @@ -59,7 +59,7 @@ func New(eventStr string) (event, error) { } // String implements Stringer for an event. -func (e event) String() string { +func (e Event) String() string { if name, ok := eventToString[e]; ok { return name } diff --git a/pkg/event/event_test.go b/pkg/event/event_test.go index 1164b1b..52a5341 100644 --- a/pkg/event/event_test.go +++ b/pkg/event/event_test.go @@ -13,7 +13,7 @@ import ( func TestNew(t *testing.T) { var table = []struct { data string - expected event + expected Event expectedErr error }{ {"", None, ErrUnknownEvent}, diff --git a/server/http/config.go b/server/http/config.go index 497f9e8..afad77b 100644 --- a/server/http/config.go +++ b/server/http/config.go @@ -11,10 +11,13 @@ import ( ) type httpConfig struct { - Addr string `yaml:"addr"` - RequestTimeout time.Duration `yaml:"requestTimeout"` - ReadTimeout time.Duration `yaml:"readTimeout"` - WriteTimeout time.Duration `yaml:"writeTimeout"` + Addr string `yaml:"addr"` + RequestTimeout time.Duration `yaml:"requestTimeout"` + ReadTimeout time.Duration `yaml:"readTimeout"` + WriteTimeout time.Duration `yaml:"writeTimeout"` + AllowIPSpoofing bool `yaml:"allowIPSpoofing"` + DualStackedPeers bool `yaml:"dualStackedPeers"` + RealIPHeader string `yaml:"realIPHeader"` } func newHTTPConfig(srvcfg interface{}) (*httpConfig, error) { diff --git a/server/http/query/query.go b/server/http/query/query.go index fb03bff..5f51555 100644 --- a/server/http/query/query.go +++ b/server/http/query/query.go @@ -14,7 +14,6 @@ import ( "strings" "github.com/chihaya/chihaya" - "github.com/chihaya/chihaya/pkg/event" ) // ErrKeyNotFound is returned when a provided key has no value associated with @@ -24,8 +23,8 @@ var ErrKeyNotFound = errors.New("query: value for the provided key does not exis // Query represents a parsed URL.Query. type Query struct { query string - infohashes []string params map[string]string + infoHashes []chihaya.InfoHash } // New parses a raw URL query. @@ -33,14 +32,12 @@ func New(query string) (*Query, error) { var ( keyStart, keyEnd int valStart, valEnd int - firstInfohash string - onKey = true - hasInfohash = false + onKey = true q = &Query{ query: query, - infohashes: nil, + infoHashes: nil, params: make(map[string]string), } ) @@ -73,19 +70,10 @@ func New(query string) (*Query, error) { } } - q.params[strings.ToLower(keyStr)] = valStr - if keyStr == "info_hash" { - if hasInfohash { - // Multiple infohashes - if q.infohashes == nil { - q.infohashes = []string{firstInfohash} - } - q.infohashes = append(q.infohashes, valStr) - } else { - firstInfohash = valStr - hasInfohash = true - } + q.infoHashes = append(q.infoHashes, chihaya.InfoHash(valStr)) + } else { + q.params[strings.ToLower(keyStr)] = valStr } valEnd = 0 @@ -106,18 +94,6 @@ func New(query string) (*Query, error) { return q, nil } -// Infohashes returns a list of requested infohashes. -func (q *Query) Infohashes() ([]string, error) { - if q.infohashes == nil { - infohash, err := q.String("info_hash") - if err != nil { - return nil, err - } - return []string{infohash}, nil - } - return q.infohashes, nil -} - // String returns a string parsed from a query. Every key can be returned as a // string because they are encoded in the URL as strings. func (q *Query) String(key string) (string, error) { @@ -144,91 +120,7 @@ func (q *Query) Uint64(key string) (uint64, error) { return val, nil } -// AnnounceRequest generates an chihaya.AnnounceRequest with the parameters -// provided by a query. -func (q *Query) AnnounceRequest() (chihaya.AnnounceRequest, error) { - request := make(chihaya.AnnounceRequest) - - request["query"] = q.query - - eventStr, err := q.String("event") - if err != nil { - return nil, errors.New("failed to parse parameter: event") - } - request["event"], err = event.New(eventStr) - if err != nil { - return nil, errors.New("failed to provide valid client event") - } - - compactStr, err := q.String("compact") - if err != nil { - return nil, errors.New("failed to parse parameter: compact") - } - request["compact"] = compactStr != "0" - - request["info_hash"], err = q.String("info_hash") - if err != nil { - return nil, errors.New("failed to parse parameter: info_hash") - } - - request["peer_id"], err = q.String("peer_id") - if err != nil { - return nil, errors.New("failed to parse parameter: peer_id") - } - - request["left"], err = q.Uint64("left") - if err != nil { - return nil, errors.New("failed to parse parameter: left") - } - - request["downloaded"], err = q.Uint64("downloaded") - if err != nil { - return nil, errors.New("failed to parse parameter: downloaded") - } - - request["uploaded"], err = q.Uint64("uploaded") - if err != nil { - return nil, errors.New("failed to parse parameter: uploaded") - } - - request["numwant"], err = q.String("numwant") - if err != nil { - return nil, errors.New("failed to parse parameter: numwant") - } - - request["port"], err = q.Uint64("port") - if err != nil { - return nil, errors.New("failed to parse parameter: port") - } - - request["ip"], err = q.String("ip") - if err != nil { - return nil, errors.New("failed to parse parameter: ip") - } - - request["ipv4"], err = q.String("ipv4") - if err != nil { - return nil, errors.New("failed to parse parameter: ipv4") - } - - request["ipv6"], err = q.String("ipv6") - if err != nil { - return nil, errors.New("failed to parse parameter: ipv6") - } - - return request, nil -} - -// ScrapeRequest generates an chihaya.ScrapeRequeset with the parameters -// provided by a query. -func (q *Query) ScrapeRequest() (chihaya.ScrapeRequest, error) { - request := make(chihaya.ScrapeRequest) - - var err error - request["info_hash"], err = q.Infohashes() - if err != nil { - return nil, errors.New("failed to parse parameter: info_hash") - } - - return request, nil +// InfoHashes returns a list of requested infohashes. +func (q *Query) InfoHashes() []chihaya.InfoHash { + return q.infoHashes } diff --git a/server/http/query/query_test.go b/server/http/query/query_test.go new file mode 100644 index 0000000..f421f1d --- /dev/null +++ b/server/http/query/query_test.go @@ -0,0 +1,100 @@ +// Copyright 2016 The Chihaya Authors. All rights reserved. +// Use of this source code is governed by the BSD 2-Clause license, +// which can be found in the LICENSE file. + +package query + +import ( + "net/url" + "testing" +) + +var ( + baseAddr = "https://www.subdomain.tracker.com:80/" + testInfoHash = "01234567890123456789" + testPeerID = "-TEST01-6wfG2wk6wWLc" + + ValidAnnounceArguments = []url.Values{ + url.Values{"peer_id": {testPeerID}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}}, + url.Values{"peer_id": {testPeerID}, "ip": {"192.168.0.1"}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}}, + url.Values{"peer_id": {testPeerID}, "ip": {"192.168.0.1"}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "numwant": {"28"}}, + url.Values{"peer_id": {testPeerID}, "ip": {"192.168.0.1"}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "event": {"stopped"}}, + url.Values{"peer_id": {testPeerID}, "ip": {"192.168.0.1"}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "event": {"started"}, "numwant": {"13"}}, + url.Values{"peer_id": {testPeerID}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "no_peer_id": {"1"}}, + url.Values{"peer_id": {testPeerID}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "compact": {"0"}, "no_peer_id": {"1"}}, + url.Values{"peer_id": {testPeerID}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "compact": {"0"}, "no_peer_id": {"1"}, "key": {"peerKey"}}, + url.Values{"peer_id": {testPeerID}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "compact": {"0"}, "no_peer_id": {"1"}, "key": {"peerKey"}, "trackerid": {"trackerId"}}, + url.Values{"peer_id": {"%3Ckey%3A+0x90%3E"}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "compact": {"0"}, "no_peer_id": {"1"}, "key": {"peerKey"}, "trackerid": {"trackerId"}}, + url.Values{"peer_id": {"%3Ckey%3A+0x90%3E"}, "compact": {"1"}}, + url.Values{"peer_id": {""}, "compact": {""}}, + } + + InvalidQueries = []string{ + baseAddr + "announce/?" + "info_hash=%0%a", + } +) + +func mapArrayEqual(boxed map[string][]string, unboxed map[string]string) bool { + if len(boxed) != len(unboxed) { + return false + } + + for mapKey, mapVal := range boxed { + // Always expect box to hold only one element + if len(mapVal) != 1 || mapVal[0] != unboxed[mapKey] { + return false + } + } + + return true +} + +func TestValidQueries(t *testing.T) { + for parseIndex, parseVal := range ValidAnnounceArguments { + parsedQueryObj, err := New(baseAddr + "announce/?" + parseVal.Encode()) + if err != nil { + t.Error(err) + } + + if !mapArrayEqual(parseVal, parsedQueryObj.params) { + t.Errorf("Incorrect parse at item %d.\n Expected=%v\n Recieved=%v\n", parseIndex, parseVal, parsedQueryObj.params) + } + } +} + +func TestInvalidQueries(t *testing.T) { + for parseIndex, parseStr := range InvalidQueries { + parsedQueryObj, err := New(parseStr) + if err == nil { + t.Error("Should have produced error", parseIndex) + } + + if parsedQueryObj != nil { + t.Error("Should be nil after error", parsedQueryObj, parseIndex) + } + } +} + +func BenchmarkParseQuery(b *testing.B) { + for bCount := 0; bCount < b.N; bCount++ { + for parseIndex, parseStr := range ValidAnnounceArguments { + parsedQueryObj, err := New(baseAddr + "announce/?" + parseStr.Encode()) + if err != nil { + b.Error(err, parseIndex) + b.Log(parsedQueryObj) + } + } + } +} + +func BenchmarkURLParseQuery(b *testing.B) { + for bCount := 0; bCount < b.N; bCount++ { + for parseIndex, parseStr := range ValidAnnounceArguments { + parsedQueryObj, err := url.ParseQuery(baseAddr + "announce/?" + parseStr.Encode()) + if err != nil { + b.Error(err, parseIndex) + b.Log(parsedQueryObj) + } + } + } +} diff --git a/server/http/request.go b/server/http/request.go new file mode 100644 index 0000000..b6036c4 --- /dev/null +++ b/server/http/request.go @@ -0,0 +1,171 @@ +// Copyright 2016 The Chihaya Authors. All rights reserved. +// Use of this source code is governed by the BSD 2-Clause license, +// which can be found in the LICENSE file. + +package http + +import ( + "net" + "net/http" + + "github.com/chihaya/chihaya" + "github.com/chihaya/chihaya/errors" + "github.com/chihaya/chihaya/pkg/event" + "github.com/chihaya/chihaya/server/http/query" +) + +func announceRequest(r *http.Request, cfg *httpConfig) (*chihaya.AnnounceRequest, error) { + q, err := query.New(r.URL.RawQuery) + if err != nil { + return nil, err + } + + request := &chihaya.AnnounceRequest{Params: q} + + eventStr, err := q.String("event") + if err != nil { + return nil, errors.NewBadRequest("failed to parse parameter: event") + } + request.Event, err = event.New(eventStr) + if err != nil { + return nil, errors.NewBadRequest("failed to provide valid client event") + } + + compactStr, err := q.String("compact") + if err != nil { + return nil, errors.NewBadRequest("failed to parse parameter: compact") + } + request.Compact = compactStr != "0" + + infoHashes := q.InfoHashes() + if len(infoHashes) < 1 { + return nil, errors.NewBadRequest("no info_hash parameter supplied") + } + if len(infoHashes) > 1 { + return nil, errors.NewBadRequest("multiple info_hash parameters supplied") + } + request.InfoHash = infoHashes[0] + + peerID, err := q.String("peer_id") + if err != nil { + return nil, errors.NewBadRequest("failed to parse parameter: peer_id") + } + request.PeerID = chihaya.PeerID(peerID) + + request.Left, err = q.Uint64("left") + if err != nil { + return nil, errors.NewBadRequest("failed to parse parameter: left") + } + + request.Downloaded, err = q.Uint64("downloaded") + if err != nil { + return nil, errors.NewBadRequest("failed to parse parameter: downloaded") + } + + request.Uploaded, err = q.Uint64("uploaded") + if err != nil { + return nil, errors.NewBadRequest("failed to parse parameter: uploaded") + } + + request.NumWant, err = q.Uint64("numwant") + if err != nil { + return nil, errors.NewBadRequest("failed to parse parameter: numwant") + } + + port, err := q.Uint64("port") + if err != nil { + return nil, errors.NewBadRequest("failed to parse parameter: port") + } + request.Port = uint16(port) + + return request, nil +} + +func scrapeRequest(r *http.Request, cfg *httpConfig) (*chihaya.ScrapeRequest, error) { + q, err := query.New(r.URL.RawQuery) + if err != nil { + return nil, err + } + + request := &chihaya.ScrapeRequest{ + InfoHashes: q.InfoHashes(), + Params: q, + } + + return request, nil +} + +// requestedIP returns the IP address for a request. If there are multiple in +// the request, one IPv4 and one IPv6 will be returned. +func requestedIP(q *query.Query, r *http.Request, cfg *httpConfig) (v4, v6 net.IP, err error) { + var done bool + + if cfg.AllowIPSpoofing { + if str, e := q.String("ip"); e == nil { + if v4, v6, done = getIPs(str, v4, v6, cfg); done { + return + } + } + + if str, e := q.String("ipv4"); e == nil { + if v4, v6, done = getIPs(str, v4, v6, cfg); done { + return + } + } + + if str, e := q.String("ipv6"); e == nil { + if v4, v6, done = getIPs(str, v4, v6, cfg); done { + return + } + } + } + + if cfg.RealIPHeader != "" { + if xRealIPs, ok := r.Header[cfg.RealIPHeader]; ok { + if v4, v6, done = getIPs(string(xRealIPs[0]), v4, v6, cfg); done { + return + } + } + } else { + if r.RemoteAddr == "" && v4 == nil { + if v4, v6, done = getIPs("127.0.0.1", v4, v6, cfg); done { + return + } + } + + if v4, v6, done = getIPs(r.RemoteAddr, v4, v6, cfg); done { + return + } + } + + if v4 == nil && v6 == nil { + err = errors.NewBadRequest("failed to parse IP address") + } + + return +} + +func getIPs(ipstr string, ipv4, ipv6 net.IP, cfg *httpConfig) (net.IP, net.IP, bool) { + host, _, err := net.SplitHostPort(ipstr) + if err != nil { + host = ipstr + } + + if ip := net.ParseIP(host); ip != nil { + ipTo4 := ip.To4() + if ipv4 == nil && ipTo4 != nil { + ipv4 = ipTo4 + } else if ipv6 == nil && ipTo4 == nil { + ipv6 = ip + } + } + + var done bool + if cfg.DualStackedPeers { + done = ipv4 != nil && ipv6 != nil + } else { + done = ipv4 != nil || ipv6 != nil + } + + return ipv4, ipv6, done +} diff --git a/server/http/server.go b/server/http/server.go index becda8f..bb72596 100644 --- a/server/http/server.go +++ b/server/http/server.go @@ -15,7 +15,6 @@ import ( "github.com/chihaya/chihaya/config" "github.com/chihaya/chihaya/server" - "github.com/chihaya/chihaya/server/http/query" "github.com/chihaya/chihaya/tracker" ) @@ -99,49 +98,33 @@ func (s *httpServer) routes() *httprouter.Router { } func (s *httpServer) serveAnnounce(w http.ResponseWriter, r *http.Request, p httprouter.Params) { - writer := &writer{w} - - q, err := query.New(r.URL.RawQuery) + req, err := announceRequest(r, s.cfg) if err != nil { - writer.writeError(err) - return - } - - req, err := q.AnnounceRequest() - if err != nil { - writer.writeError(err) + writeError(w, err) return } resp, err := s.tkr.HandleAnnounce(req) if err != nil { - writer.writeError(err) + writeError(w, err) return } - writer.writeAnnounceResponse(resp) + writeAnnounceResponse(w, resp) } func (s *httpServer) serveScrape(w http.ResponseWriter, r *http.Request, p httprouter.Params) { - writer := &writer{w} - - q, err := query.New(r.URL.RawQuery) + req, err := scrapeRequest(r, s.cfg) if err != nil { - writer.writeError(err) - return - } - - req, err := q.ScrapeRequest() - if err != nil { - writer.writeError(err) + writeError(w, err) return } resp, err := s.tkr.HandleScrape(req) if err != nil { - writer.writeError(err) + writeError(w, err) return } - writer.writeScrapeResponse(resp) + writeScrapeResponse(w, resp) } diff --git a/server/http/writer.go b/server/http/writer.go index e55f8cd..22b63e2 100644 --- a/server/http/writer.go +++ b/server/http/writer.go @@ -8,18 +8,28 @@ import ( "net/http" "github.com/chihaya/chihaya" + "github.com/chihaya/chihaya/errors" "github.com/chihaya/chihaya/pkg/bencode" ) -type writer struct{ http.ResponseWriter } +func writeError(w http.ResponseWriter, err error) error { + message := "internal server error" + chihayaErr, ok := err.(*errors.Error) + + if ok { + w.WriteHeader(chihayaErr.Status()) + + if chihayaErr.Public() { + message = err.Error() + } + } -func (w *writer) writeError(err error) error { return bencode.NewEncoder(w).Encode(bencode.Dict{ - "failure reason": err.Error(), + "failure reason": message, }) } -func (w *writer) writeAnnounceResponse(resp *chihaya.AnnounceResponse) error { +func writeAnnounceResponse(w http.ResponseWriter, resp *chihaya.AnnounceResponse) error { bdict := bencode.Dict{ "complete": resp.Complete, "incomplete": resp.Incomplete, @@ -63,7 +73,7 @@ func (w *writer) writeAnnounceResponse(resp *chihaya.AnnounceResponse) error { return bencode.NewEncoder(w).Encode(bdict) } -func (w *writer) writeScrapeResponse(resp *chihaya.ScrapeResponse) error { +func writeScrapeResponse(w http.ResponseWriter, resp *chihaya.ScrapeResponse) error { filesDict := bencode.NewDict() for infohash, scrape := range resp.Files { filesDict[infohash] = bencode.Dict{ diff --git a/server/http/writer_test.go b/server/http/writer_test.go index e82e86c..e900af9 100644 --- a/server/http/writer_test.go +++ b/server/http/writer_test.go @@ -5,10 +5,10 @@ package http import ( - "errors" "net/http/httptest" "testing" + "github.com/chihaya/chihaya/errors" "github.com/stretchr/testify/assert" ) @@ -22,9 +22,17 @@ func TestWriteError(t *testing.T) { for _, tt := range table { r := httptest.NewRecorder() - w := &writer{r} - err := w.writeError(errors.New(tt.reason)) - assert.Nil(t, err, "writeError should not fail with test input") - assert.Equal(t, r.Body.String(), tt.expected, "writer should write the expected value") + err := writeError(r, errors.NewMessage(tt.reason)) + assert.Nil(t, err) + assert.Equal(t, r.Body.String(), tt.expected) + assert.Equal(t, r.Code, 200) } } + +func TestWriteStatus(t *testing.T) { + r := httptest.NewRecorder() + err := writeError(r, errors.NewBadRequest("something is missing")) + assert.Nil(t, err) + assert.Equal(t, r.Body.String(), "d14:failure reason20:something is missinge") + assert.Equal(t, r.Code, 400) +} diff --git a/server/store/memory/peer_store.go b/server/store/memory/peer_store.go index 42fa886..465b8e6 100644 --- a/server/store/memory/peer_store.go +++ b/server/store/memory/peer_store.go @@ -52,9 +52,6 @@ func newPeerStoreConfig(storecfg *store.Config) (*peerStoreConfig, error) { return &cfg, nil } -const seedersSuffix = "-s" -const leechersSuffix = "-l" - type peer struct { chihaya.Peer LastAction time.Time @@ -71,16 +68,28 @@ type peerStore struct { var _ store.PeerStore = &peerStore{} -func (s *peerStore) shardIndex(infohash string) uint32 { +func (s *peerStore) shardIndex(infoHash chihaya.InfoHash) uint32 { idx := fnv.New32() - idx.Write([]byte(infohash)) + idx.Write([]byte(infoHash)) return idx.Sum32() % uint32(len(s.shards)) } -func (s *peerStore) PutSeeder(infohash string, p chihaya.Peer) error { - key := infohash + seedersSuffix +func peerKey(p chihaya.Peer) string { + return string(p.IP) + string(p.ID) +} - shard := s.shards[s.shardIndex(infohash)] +func seedersKey(infoHash chihaya.InfoHash) string { + return string(infoHash) + "-s" +} + +func leechersKey(infoHash chihaya.InfoHash) string { + return string(infoHash) + "-l" +} + +func (s *peerStore) PutSeeder(infoHash chihaya.InfoHash, p chihaya.Peer) error { + key := seedersKey(infoHash) + + shard := s.shards[s.shardIndex(infoHash)] shard.Lock() defer shard.Unlock() @@ -88,7 +97,7 @@ func (s *peerStore) PutSeeder(infohash string, p chihaya.Peer) error { shard.peers[key] = make(map[string]peer) } - shard.peers[key][p.ID] = peer{ + shard.peers[key][peerKey(p)] = peer{ Peer: p, LastAction: time.Now(), } @@ -96,10 +105,10 @@ func (s *peerStore) PutSeeder(infohash string, p chihaya.Peer) error { return nil } -func (s *peerStore) DeleteSeeder(infohash, peerID string) error { - key := infohash + seedersSuffix +func (s *peerStore) DeleteSeeder(infoHash chihaya.InfoHash, p chihaya.Peer) error { + key := seedersKey(infoHash) - shard := s.shards[s.shardIndex(infohash)] + shard := s.shards[s.shardIndex(infoHash)] shard.Lock() defer shard.Unlock() @@ -107,7 +116,7 @@ func (s *peerStore) DeleteSeeder(infohash, peerID string) error { return nil } - delete(shard.peers[key], peerID) + delete(shard.peers[key], peerKey(p)) if len(shard.peers[key]) == 0 { shard.peers[key] = nil @@ -116,10 +125,10 @@ func (s *peerStore) DeleteSeeder(infohash, peerID string) error { return nil } -func (s *peerStore) PutLeecher(infohash string, p chihaya.Peer) error { - key := infohash + leechersSuffix +func (s *peerStore) PutLeecher(infoHash chihaya.InfoHash, p chihaya.Peer) error { + key := leechersKey(infoHash) - shard := s.shards[s.shardIndex(infohash)] + shard := s.shards[s.shardIndex(infoHash)] shard.Lock() defer shard.Unlock() @@ -127,7 +136,7 @@ func (s *peerStore) PutLeecher(infohash string, p chihaya.Peer) error { shard.peers[key] = make(map[string]peer) } - shard.peers[key][p.ID] = peer{ + shard.peers[key][peerKey(p)] = peer{ Peer: p, LastAction: time.Now(), } @@ -135,10 +144,10 @@ func (s *peerStore) PutLeecher(infohash string, p chihaya.Peer) error { return nil } -func (s *peerStore) DeleteLeecher(infohash, peerID string) error { - key := infohash + leechersSuffix +func (s *peerStore) DeleteLeecher(infoHash chihaya.InfoHash, p chihaya.Peer) error { + key := leechersKey(infoHash) - shard := s.shards[s.shardIndex(infohash)] + shard := s.shards[s.shardIndex(infoHash)] shard.Lock() defer shard.Unlock() @@ -146,7 +155,7 @@ func (s *peerStore) DeleteLeecher(infohash, peerID string) error { return nil } - delete(shard.peers[key], peerID) + delete(shard.peers[key], peerKey(p)) if len(shard.peers[key]) == 0 { shard.peers[key] = nil @@ -155,23 +164,23 @@ func (s *peerStore) DeleteLeecher(infohash, peerID string) error { return nil } -func (s *peerStore) GraduateLeecher(infohash string, p chihaya.Peer) error { - leecherKey := infohash + leechersSuffix - seederKey := infohash + seedersSuffix +func (s *peerStore) GraduateLeecher(infoHash chihaya.InfoHash, p chihaya.Peer) error { + lkey := leechersKey(infoHash) + skey := seedersKey(infoHash) - shard := s.shards[s.shardIndex(infohash)] + shard := s.shards[s.shardIndex(infoHash)] shard.Lock() defer shard.Unlock() - if shard.peers[leecherKey] != nil { - delete(shard.peers[leecherKey], p.ID) + if shard.peers[lkey] != nil { + delete(shard.peers[lkey], peerKey(p)) } - if shard.peers[seederKey] == nil { - shard.peers[seederKey] = make(map[string]peer) + if shard.peers[skey] == nil { + shard.peers[skey] = make(map[string]peer) } - shard.peers[seederKey][p.ID] = peer{ + shard.peers[skey][peerKey(p)] = peer{ Peer: p, LastAction: time.Now(), } @@ -191,16 +200,13 @@ func (s *peerStore) CollectGarbage(cutoff time.Time) error { for _, key := range keys { shard.Lock() - var peersToDelete []string - for peerID, p := range shard.peers[key] { + + for peerKey, p := range shard.peers[key] { if p.LastAction.Before(cutoff) { - peersToDelete = append(peersToDelete, peerID) + delete(shard.peers[key], peerKey) } } - for _, peerID := range peersToDelete { - delete(shard.peers[key], peerID) - } shard.Unlock() runtime.Gosched() } @@ -211,17 +217,17 @@ func (s *peerStore) CollectGarbage(cutoff time.Time) error { return nil } -func (s *peerStore) AnnouncePeers(infohash string, seeder bool, numWant int) (peers, peers6 []chihaya.Peer, err error) { - leecherKey := infohash + leechersSuffix - seederKey := infohash + seedersSuffix +func (s *peerStore) AnnouncePeers(infoHash chihaya.InfoHash, seeder bool, numWant int) (peers, peers6 []chihaya.Peer, err error) { + lkey := leechersKey(infoHash) + skey := seedersKey(infoHash) - shard := s.shards[s.shardIndex(infohash)] + shard := s.shards[s.shardIndex(infoHash)] shard.RLock() defer shard.RUnlock() if seeder { // Append leechers as possible. - leechers := shard.peers[leecherKey] + leechers := shard.peers[lkey] for _, p := range leechers { if numWant == 0 { break @@ -236,7 +242,7 @@ func (s *peerStore) AnnouncePeers(infohash string, seeder bool, numWant int) (pe } } else { // Append as many seeders as possible. - seeders := shard.peers[seederKey] + seeders := shard.peers[skey] for _, p := range seeders { if numWant == 0 { break @@ -251,7 +257,7 @@ func (s *peerStore) AnnouncePeers(infohash string, seeder bool, numWant int) (pe } // Append leechers until we reach numWant. - leechers := shard.peers[leecherKey] + leechers := shard.peers[lkey] if numWant > 0 { for _, p := range leechers { if numWant == 0 { diff --git a/server/store/peer_store.go b/server/store/peer_store.go index 2c6c2a9..eb88ea4 100644 --- a/server/store/peer_store.go +++ b/server/store/peer_store.go @@ -15,14 +15,14 @@ var peerStoreDrivers = make(map[string]PeerStoreDriver) // PeerStore represents an interface for manipulating peers. type PeerStore interface { - PutSeeder(infohash string, p chihaya.Peer) error - DeleteSeeder(infohash, peerID string) error + PutSeeder(infoHash chihaya.InfoHash, p chihaya.Peer) error + DeleteSeeder(infoHash chihaya.InfoHash, peerID chihaya.Peer) error - PutLeecher(infohash string, p chihaya.Peer) error - DeleteLeecher(infohash, peerID string) error + PutLeecher(infoHash chihaya.InfoHash, p chihaya.Peer) error + DeleteLeecher(infoHash chihaya.InfoHash, peerID chihaya.Peer) error - GraduateLeecher(infohash string, p chihaya.Peer) error - AnnouncePeers(infohash string, seeder bool, numWant int) (peers, peers6 []chihaya.Peer, err error) + GraduateLeecher(infoHash chihaya.InfoHash, p chihaya.Peer) error + AnnouncePeers(infoHash chihaya.InfoHash, seeder bool, numWant int) (peers, peers6 []chihaya.Peer, err error) CollectGarbage(cutoff time.Time) error } diff --git a/tracker/middleware.go b/tracker/middleware.go index 431f63a..60e4742 100644 --- a/tracker/middleware.go +++ b/tracker/middleware.go @@ -11,7 +11,7 @@ import ( // AnnounceHandler is a function that operates on an AnnounceResponse before it // has been delivered to a client. -type AnnounceHandler func(*config.TrackerConfig, chihaya.AnnounceRequest, *chihaya.AnnounceResponse) error +type AnnounceHandler func(*config.TrackerConfig, *chihaya.AnnounceRequest, *chihaya.AnnounceResponse) error // AnnounceMiddleware is higher-order AnnounceHandler used to implement modular // behavior processing an announce. @@ -24,7 +24,7 @@ func (c *announceChain) Append(mw ...AnnounceMiddleware) { } func (c *announceChain) Handler() AnnounceHandler { - final := func(cfg *config.TrackerConfig, req chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error { + final := func(cfg *config.TrackerConfig, req *chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error { return nil } for i := len(c.mw) - 1; i >= 0; i-- { @@ -54,7 +54,7 @@ func RegisterAnnounceMiddleware(name string, mw AnnounceMiddleware) { // ScrapeHandler is a middleware function that operates on a ScrapeResponse // before it has been delivered to a client. -type ScrapeHandler func(*config.TrackerConfig, chihaya.ScrapeRequest, *chihaya.ScrapeResponse) error +type ScrapeHandler func(*config.TrackerConfig, *chihaya.ScrapeRequest, *chihaya.ScrapeResponse) error // ScrapeMiddleware is higher-order ScrapeHandler used to implement modular // behavior processing a scrape. @@ -67,7 +67,7 @@ func (c *scrapeChain) Append(mw ...ScrapeMiddleware) { } func (c *scrapeChain) Handler() ScrapeHandler { - final := func(cfg *config.TrackerConfig, req chihaya.ScrapeRequest, resp *chihaya.ScrapeResponse) error { + final := func(cfg *config.TrackerConfig, req *chihaya.ScrapeRequest, resp *chihaya.ScrapeResponse) error { return nil } for i := len(c.mw) - 1; i >= 0; i-- { diff --git a/tracker/middleware_test.go b/tracker/middleware_test.go index 6582675..dfe281e 100644 --- a/tracker/middleware_test.go +++ b/tracker/middleware_test.go @@ -14,7 +14,7 @@ import ( ) func testAnnounceMW1(next AnnounceHandler) AnnounceHandler { - return func(cfg *config.TrackerConfig, req chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error { + return func(cfg *config.TrackerConfig, req *chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error { resp.IPv4Peers = append(resp.IPv4Peers, chihaya.Peer{ Port: 1, }) @@ -23,7 +23,7 @@ func testAnnounceMW1(next AnnounceHandler) AnnounceHandler { } func testAnnounceMW2(next AnnounceHandler) AnnounceHandler { - return func(cfg *config.TrackerConfig, req chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error { + return func(cfg *config.TrackerConfig, req *chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error { resp.IPv4Peers = append(resp.IPv4Peers, chihaya.Peer{ Port: 2, }) @@ -32,7 +32,7 @@ func testAnnounceMW2(next AnnounceHandler) AnnounceHandler { } func testAnnounceMW3(next AnnounceHandler) AnnounceHandler { - return func(cfg *config.TrackerConfig, req chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error { + return func(cfg *config.TrackerConfig, req *chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error { resp.IPv4Peers = append(resp.IPv4Peers, chihaya.Peer{ Port: 3, }) @@ -47,7 +47,7 @@ func TestAnnounceChain(t *testing.T) { achain.Append(testAnnounceMW3) handler := achain.Handler() resp := &chihaya.AnnounceResponse{} - err := handler(nil, chihaya.AnnounceRequest{}, resp) + err := handler(nil, &chihaya.AnnounceRequest{}, resp) assert.Nil(t, err, "the handler should not return an error") assert.Equal(t, resp.IPv4Peers, []chihaya.Peer{chihaya.Peer{Port: 1}, chihaya.Peer{Port: 2}, chihaya.Peer{Port: 3}}, "the list of peers added from the middleware should be in the same order.") } diff --git a/tracker/tracker.go b/tracker/tracker.go index fa49e4b..9c59a92 100644 --- a/tracker/tracker.go +++ b/tracker/tracker.go @@ -49,7 +49,7 @@ func NewTracker(cfg *config.TrackerConfig) (*Tracker, error) { // HandleAnnounce runs an AnnounceRequest through a Tracker's middleware and // returns the result. -func (t *Tracker) HandleAnnounce(req chihaya.AnnounceRequest) (*chihaya.AnnounceResponse, error) { +func (t *Tracker) HandleAnnounce(req *chihaya.AnnounceRequest) (*chihaya.AnnounceResponse, error) { resp := &chihaya.AnnounceResponse{} err := t.handleAnnounce(t.cfg, req, resp) return resp, err @@ -57,7 +57,7 @@ func (t *Tracker) HandleAnnounce(req chihaya.AnnounceRequest) (*chihaya.Announce // HandleScrape runs a ScrapeRequest through a Tracker's middleware and returns // the result. -func (t *Tracker) HandleScrape(req chihaya.ScrapeRequest) (*chihaya.ScrapeResponse, error) { +func (t *Tracker) HandleScrape(req *chihaya.ScrapeRequest) (*chihaya.ScrapeResponse, error) { resp := &chihaya.ScrapeResponse{} err := t.handleScrape(t.cfg, req, resp) return resp, err