Bring in more old behaviour, use types for peer_id and infohash
This commit is contained in:
parent
05b7b955a1
commit
75b4a20e56
21 changed files with 529 additions and 259 deletions
5
AUTHORS
Normal file
5
AUTHORS
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
# This is the official list of Chihaya authors for copyright purposes, in alphabetical order.
|
||||||
|
|
||||||
|
Jimmy Zelinskie <jimmyzelinskie@gmail.com>
|
||||||
|
Justin Li <jli@j-li.net>
|
||||||
|
|
24
LICENSE
Normal file
24
LICENSE
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
Chihaya is released under a BSD 2-Clause license, reproduced below.
|
||||||
|
|
||||||
|
Copyright (c) 2015, The Chihaya Authors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer in the documentation
|
||||||
|
and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
32
chihaya.go
32
chihaya.go
|
@ -7,10 +7,28 @@ package chihaya
|
||||||
import (
|
import (
|
||||||
"net"
|
"net"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/chihaya/chihaya/pkg/event"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type PeerID string
|
||||||
|
type InfoHash string
|
||||||
|
|
||||||
// AnnounceRequest represents the parsed parameters from an announce request.
|
// AnnounceRequest represents the parsed parameters from an announce request.
|
||||||
type AnnounceRequest map[string]interface{}
|
type AnnounceRequest struct {
|
||||||
|
Event event.Event
|
||||||
|
InfoHash InfoHash
|
||||||
|
PeerID PeerID
|
||||||
|
IP string
|
||||||
|
Port uint16
|
||||||
|
|
||||||
|
Compact bool
|
||||||
|
NumWant uint64
|
||||||
|
|
||||||
|
Left, Downloaded, Uploaded uint64
|
||||||
|
|
||||||
|
Params Params
|
||||||
|
}
|
||||||
|
|
||||||
// AnnounceResponse represents the parameters used to create an announce
|
// AnnounceResponse represents the parameters used to create an announce
|
||||||
// response.
|
// response.
|
||||||
|
@ -25,7 +43,10 @@ type AnnounceResponse struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ScrapeRequest represents the parsed parameters from a scrape request.
|
// ScrapeRequest represents the parsed parameters from a scrape request.
|
||||||
type ScrapeRequest map[string]interface{}
|
type ScrapeRequest struct {
|
||||||
|
InfoHashes []InfoHash
|
||||||
|
Params Params
|
||||||
|
}
|
||||||
|
|
||||||
// ScrapeResponse represents the parameters used to create a scrape response.
|
// ScrapeResponse represents the parameters used to create a scrape response.
|
||||||
type ScrapeResponse struct {
|
type ScrapeResponse struct {
|
||||||
|
@ -41,7 +62,12 @@ type Scrape struct {
|
||||||
// Peer represents the connection details of a peer that is returned in an
|
// Peer represents the connection details of a peer that is returned in an
|
||||||
// announce response.
|
// announce response.
|
||||||
type Peer struct {
|
type Peer struct {
|
||||||
ID string
|
ID PeerID
|
||||||
IP net.IP
|
IP net.IP
|
||||||
Port uint16
|
Port uint16
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Params is used to fetch request parameters.
|
||||||
|
type Params interface {
|
||||||
|
String(key string) (string, error)
|
||||||
|
}
|
||||||
|
|
|
@ -14,6 +14,8 @@ import (
|
||||||
"github.com/chihaya/chihaya/config"
|
"github.com/chihaya/chihaya/config"
|
||||||
"github.com/chihaya/chihaya/server"
|
"github.com/chihaya/chihaya/server"
|
||||||
"github.com/chihaya/chihaya/tracker"
|
"github.com/chihaya/chihaya/tracker"
|
||||||
|
|
||||||
|
_ "github.com/chihaya/chihaya/server/http"
|
||||||
)
|
)
|
||||||
|
|
||||||
var configPath string
|
var configPath string
|
||||||
|
|
|
@ -17,8 +17,8 @@ import (
|
||||||
// DefaultConfig is a sane configuration used as a fallback or for testing.
|
// DefaultConfig is a sane configuration used as a fallback or for testing.
|
||||||
var DefaultConfig = Config{
|
var DefaultConfig = Config{
|
||||||
Tracker: TrackerConfig{
|
Tracker: TrackerConfig{
|
||||||
AnnounceInterval: 10 * time.Minute,
|
AnnounceInterval: 30 * time.Minute,
|
||||||
MinAnnounceInterval: 5 * time.Minute,
|
MinAnnounceInterval: 20 * time.Minute,
|
||||||
AnnounceMiddleware: []string{},
|
AnnounceMiddleware: []string{},
|
||||||
ScrapeMiddleware: []string{},
|
ScrapeMiddleware: []string{},
|
||||||
},
|
},
|
||||||
|
|
|
@ -2,38 +2,37 @@
|
||||||
# Use of this source code is governed by the BSD 2-Clause license,
|
# Use of this source code is governed by the BSD 2-Clause license,
|
||||||
# which can be found in the LICENSE file.
|
# which can be found in the LICENSE file.
|
||||||
|
|
||||||
chihaya:
|
tracker:
|
||||||
tracker:
|
announce: 10m
|
||||||
announce: "10m"
|
minAnnounce: 5m
|
||||||
minAnnounce: "5m"
|
announceMiddleware:
|
||||||
announceMiddleware:
|
- prometheus
|
||||||
- "prometheus"
|
- storeClientValidation
|
||||||
- "storeClientValidation"
|
- storeCreateOnAnnounce
|
||||||
- "storeCreateOnAnnounce"
|
scrapeMiddleware:
|
||||||
scrapeMiddleware:
|
- prometheus
|
||||||
- "prometheus"
|
- storeClientValidation
|
||||||
- "storeClientValidation"
|
|
||||||
|
|
||||||
servers:
|
servers:
|
||||||
- name: "store"
|
- name: store
|
||||||
config:
|
config:
|
||||||
addr: "localhost:6880"
|
addr: localhost:6880
|
||||||
requestTimeout: "10s"
|
requestTimeout: 10s
|
||||||
readTimeout: "10s"
|
readTimeout: 10s
|
||||||
writeTimeout: "10s"
|
writeTimeout: 10s
|
||||||
clientStore: "memory"
|
clientStore: memory
|
||||||
peerStore: "memory"
|
peerStore: memory
|
||||||
peerStoreConfig:
|
peerStoreConfig:
|
||||||
gcAfter: "30m"
|
gcAfter: 30m
|
||||||
shards: 1
|
shards: 1
|
||||||
|
|
||||||
- name: "http"
|
- name: http
|
||||||
config:
|
config:
|
||||||
addr: "localhost:6881"
|
addr: localhost:6881
|
||||||
requestTimeout: "10s"
|
requestTimeout: 10s
|
||||||
readTimeout: "10s"
|
readTimeout: 10s
|
||||||
writeTimeout: "10s"
|
writeTimeout: 10s
|
||||||
|
|
||||||
- name: "udp"
|
- name: udp
|
||||||
config:
|
config:
|
||||||
addr: "localhost:6882"
|
addr: localhost:6882
|
||||||
|
|
41
errors/errors.go
Normal file
41
errors/errors.go
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
// Copyright 2016 The Chihaya Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by the BSD 2-Clause license,
|
||||||
|
// which can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package errors
|
||||||
|
|
||||||
|
import "net/http"
|
||||||
|
|
||||||
|
type Error struct {
|
||||||
|
message string
|
||||||
|
public bool
|
||||||
|
status int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Error) Error() string {
|
||||||
|
return e.message
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Error) Public() bool {
|
||||||
|
return e.public
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Error) Status() int {
|
||||||
|
return e.status
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewBadRequest(msg string) error {
|
||||||
|
return &Error{
|
||||||
|
message: msg,
|
||||||
|
public: true,
|
||||||
|
status: http.StatusBadRequest,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewMessage(msg string) error {
|
||||||
|
return &Error{
|
||||||
|
message: msg,
|
||||||
|
public: true,
|
||||||
|
status: http.StatusOK,
|
||||||
|
}
|
||||||
|
}
|
|
@ -15,12 +15,12 @@ import (
|
||||||
var ErrUnknownEvent = errors.New("unknown event")
|
var ErrUnknownEvent = errors.New("unknown event")
|
||||||
|
|
||||||
// Event represents an event done by a BitTorrent client.
|
// Event represents an event done by a BitTorrent client.
|
||||||
type event uint8
|
type Event uint8
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// None is the event when a BitTorrent client announces due to time lapsed
|
// None is the event when a BitTorrent client announces due to time lapsed
|
||||||
// since the previous announce.
|
// since the previous announce.
|
||||||
None event = iota
|
None Event = iota
|
||||||
|
|
||||||
// Started is the event sent by a BitTorrent client when it joins a swarm.
|
// Started is the event sent by a BitTorrent client when it joins a swarm.
|
||||||
Started
|
Started
|
||||||
|
@ -34,8 +34,8 @@ const (
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
eventToString = make(map[event]string)
|
eventToString = make(map[Event]string)
|
||||||
stringToEvent = make(map[string]event)
|
stringToEvent = make(map[string]Event)
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -50,7 +50,7 @@ func init() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// New returns the proper Event given a string.
|
// New returns the proper Event given a string.
|
||||||
func New(eventStr string) (event, error) {
|
func New(eventStr string) (Event, error) {
|
||||||
if e, ok := stringToEvent[strings.ToLower(eventStr)]; ok {
|
if e, ok := stringToEvent[strings.ToLower(eventStr)]; ok {
|
||||||
return e, nil
|
return e, nil
|
||||||
}
|
}
|
||||||
|
@ -59,7 +59,7 @@ func New(eventStr string) (event, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// String implements Stringer for an event.
|
// String implements Stringer for an event.
|
||||||
func (e event) String() string {
|
func (e Event) String() string {
|
||||||
if name, ok := eventToString[e]; ok {
|
if name, ok := eventToString[e]; ok {
|
||||||
return name
|
return name
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ import (
|
||||||
func TestNew(t *testing.T) {
|
func TestNew(t *testing.T) {
|
||||||
var table = []struct {
|
var table = []struct {
|
||||||
data string
|
data string
|
||||||
expected event
|
expected Event
|
||||||
expectedErr error
|
expectedErr error
|
||||||
}{
|
}{
|
||||||
{"", None, ErrUnknownEvent},
|
{"", None, ErrUnknownEvent},
|
||||||
|
|
|
@ -11,10 +11,13 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
type httpConfig struct {
|
type httpConfig struct {
|
||||||
Addr string `yaml:"addr"`
|
Addr string `yaml:"addr"`
|
||||||
RequestTimeout time.Duration `yaml:"requestTimeout"`
|
RequestTimeout time.Duration `yaml:"requestTimeout"`
|
||||||
ReadTimeout time.Duration `yaml:"readTimeout"`
|
ReadTimeout time.Duration `yaml:"readTimeout"`
|
||||||
WriteTimeout time.Duration `yaml:"writeTimeout"`
|
WriteTimeout time.Duration `yaml:"writeTimeout"`
|
||||||
|
AllowIPSpoofing bool `yaml:"allowIPSpoofing"`
|
||||||
|
DualStackedPeers bool `yaml:"dualStackedPeers"`
|
||||||
|
RealIPHeader string `yaml:"realIPHeader"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func newHTTPConfig(srvcfg interface{}) (*httpConfig, error) {
|
func newHTTPConfig(srvcfg interface{}) (*httpConfig, error) {
|
||||||
|
|
|
@ -14,7 +14,6 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/chihaya/chihaya"
|
"github.com/chihaya/chihaya"
|
||||||
"github.com/chihaya/chihaya/pkg/event"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// ErrKeyNotFound is returned when a provided key has no value associated with
|
// ErrKeyNotFound is returned when a provided key has no value associated with
|
||||||
|
@ -24,8 +23,8 @@ var ErrKeyNotFound = errors.New("query: value for the provided key does not exis
|
||||||
// Query represents a parsed URL.Query.
|
// Query represents a parsed URL.Query.
|
||||||
type Query struct {
|
type Query struct {
|
||||||
query string
|
query string
|
||||||
infohashes []string
|
|
||||||
params map[string]string
|
params map[string]string
|
||||||
|
infoHashes []chihaya.InfoHash
|
||||||
}
|
}
|
||||||
|
|
||||||
// New parses a raw URL query.
|
// New parses a raw URL query.
|
||||||
|
@ -33,14 +32,12 @@ func New(query string) (*Query, error) {
|
||||||
var (
|
var (
|
||||||
keyStart, keyEnd int
|
keyStart, keyEnd int
|
||||||
valStart, valEnd int
|
valStart, valEnd int
|
||||||
firstInfohash string
|
|
||||||
|
|
||||||
onKey = true
|
onKey = true
|
||||||
hasInfohash = false
|
|
||||||
|
|
||||||
q = &Query{
|
q = &Query{
|
||||||
query: query,
|
query: query,
|
||||||
infohashes: nil,
|
infoHashes: nil,
|
||||||
params: make(map[string]string),
|
params: make(map[string]string),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -73,19 +70,10 @@ func New(query string) (*Query, error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
q.params[strings.ToLower(keyStr)] = valStr
|
|
||||||
|
|
||||||
if keyStr == "info_hash" {
|
if keyStr == "info_hash" {
|
||||||
if hasInfohash {
|
q.infoHashes = append(q.infoHashes, chihaya.InfoHash(valStr))
|
||||||
// Multiple infohashes
|
} else {
|
||||||
if q.infohashes == nil {
|
q.params[strings.ToLower(keyStr)] = valStr
|
||||||
q.infohashes = []string{firstInfohash}
|
|
||||||
}
|
|
||||||
q.infohashes = append(q.infohashes, valStr)
|
|
||||||
} else {
|
|
||||||
firstInfohash = valStr
|
|
||||||
hasInfohash = true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
valEnd = 0
|
valEnd = 0
|
||||||
|
@ -106,18 +94,6 @@ func New(query string) (*Query, error) {
|
||||||
return q, nil
|
return q, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Infohashes returns a list of requested infohashes.
|
|
||||||
func (q *Query) Infohashes() ([]string, error) {
|
|
||||||
if q.infohashes == nil {
|
|
||||||
infohash, err := q.String("info_hash")
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return []string{infohash}, nil
|
|
||||||
}
|
|
||||||
return q.infohashes, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// String returns a string parsed from a query. Every key can be returned as a
|
// String returns a string parsed from a query. Every key can be returned as a
|
||||||
// string because they are encoded in the URL as strings.
|
// string because they are encoded in the URL as strings.
|
||||||
func (q *Query) String(key string) (string, error) {
|
func (q *Query) String(key string) (string, error) {
|
||||||
|
@ -144,91 +120,7 @@ func (q *Query) Uint64(key string) (uint64, error) {
|
||||||
return val, nil
|
return val, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// AnnounceRequest generates an chihaya.AnnounceRequest with the parameters
|
// InfoHashes returns a list of requested infohashes.
|
||||||
// provided by a query.
|
func (q *Query) InfoHashes() []chihaya.InfoHash {
|
||||||
func (q *Query) AnnounceRequest() (chihaya.AnnounceRequest, error) {
|
return q.infoHashes
|
||||||
request := make(chihaya.AnnounceRequest)
|
|
||||||
|
|
||||||
request["query"] = q.query
|
|
||||||
|
|
||||||
eventStr, err := q.String("event")
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to parse parameter: event")
|
|
||||||
}
|
|
||||||
request["event"], err = event.New(eventStr)
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to provide valid client event")
|
|
||||||
}
|
|
||||||
|
|
||||||
compactStr, err := q.String("compact")
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to parse parameter: compact")
|
|
||||||
}
|
|
||||||
request["compact"] = compactStr != "0"
|
|
||||||
|
|
||||||
request["info_hash"], err = q.String("info_hash")
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to parse parameter: info_hash")
|
|
||||||
}
|
|
||||||
|
|
||||||
request["peer_id"], err = q.String("peer_id")
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to parse parameter: peer_id")
|
|
||||||
}
|
|
||||||
|
|
||||||
request["left"], err = q.Uint64("left")
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to parse parameter: left")
|
|
||||||
}
|
|
||||||
|
|
||||||
request["downloaded"], err = q.Uint64("downloaded")
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to parse parameter: downloaded")
|
|
||||||
}
|
|
||||||
|
|
||||||
request["uploaded"], err = q.Uint64("uploaded")
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to parse parameter: uploaded")
|
|
||||||
}
|
|
||||||
|
|
||||||
request["numwant"], err = q.String("numwant")
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to parse parameter: numwant")
|
|
||||||
}
|
|
||||||
|
|
||||||
request["port"], err = q.Uint64("port")
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to parse parameter: port")
|
|
||||||
}
|
|
||||||
|
|
||||||
request["ip"], err = q.String("ip")
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to parse parameter: ip")
|
|
||||||
}
|
|
||||||
|
|
||||||
request["ipv4"], err = q.String("ipv4")
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to parse parameter: ipv4")
|
|
||||||
}
|
|
||||||
|
|
||||||
request["ipv6"], err = q.String("ipv6")
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to parse parameter: ipv6")
|
|
||||||
}
|
|
||||||
|
|
||||||
return request, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ScrapeRequest generates an chihaya.ScrapeRequeset with the parameters
|
|
||||||
// provided by a query.
|
|
||||||
func (q *Query) ScrapeRequest() (chihaya.ScrapeRequest, error) {
|
|
||||||
request := make(chihaya.ScrapeRequest)
|
|
||||||
|
|
||||||
var err error
|
|
||||||
request["info_hash"], err = q.Infohashes()
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.New("failed to parse parameter: info_hash")
|
|
||||||
}
|
|
||||||
|
|
||||||
return request, nil
|
|
||||||
}
|
}
|
||||||
|
|
100
server/http/query/query_test.go
Normal file
100
server/http/query/query_test.go
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
// Copyright 2016 The Chihaya Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by the BSD 2-Clause license,
|
||||||
|
// which can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package query
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/url"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
baseAddr = "https://www.subdomain.tracker.com:80/"
|
||||||
|
testInfoHash = "01234567890123456789"
|
||||||
|
testPeerID = "-TEST01-6wfG2wk6wWLc"
|
||||||
|
|
||||||
|
ValidAnnounceArguments = []url.Values{
|
||||||
|
url.Values{"peer_id": {testPeerID}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}},
|
||||||
|
url.Values{"peer_id": {testPeerID}, "ip": {"192.168.0.1"}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}},
|
||||||
|
url.Values{"peer_id": {testPeerID}, "ip": {"192.168.0.1"}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "numwant": {"28"}},
|
||||||
|
url.Values{"peer_id": {testPeerID}, "ip": {"192.168.0.1"}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "event": {"stopped"}},
|
||||||
|
url.Values{"peer_id": {testPeerID}, "ip": {"192.168.0.1"}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "event": {"started"}, "numwant": {"13"}},
|
||||||
|
url.Values{"peer_id": {testPeerID}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "no_peer_id": {"1"}},
|
||||||
|
url.Values{"peer_id": {testPeerID}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "compact": {"0"}, "no_peer_id": {"1"}},
|
||||||
|
url.Values{"peer_id": {testPeerID}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "compact": {"0"}, "no_peer_id": {"1"}, "key": {"peerKey"}},
|
||||||
|
url.Values{"peer_id": {testPeerID}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "compact": {"0"}, "no_peer_id": {"1"}, "key": {"peerKey"}, "trackerid": {"trackerId"}},
|
||||||
|
url.Values{"peer_id": {"%3Ckey%3A+0x90%3E"}, "port": {"6881"}, "downloaded": {"1234"}, "left": {"4321"}, "compact": {"0"}, "no_peer_id": {"1"}, "key": {"peerKey"}, "trackerid": {"trackerId"}},
|
||||||
|
url.Values{"peer_id": {"%3Ckey%3A+0x90%3E"}, "compact": {"1"}},
|
||||||
|
url.Values{"peer_id": {""}, "compact": {""}},
|
||||||
|
}
|
||||||
|
|
||||||
|
InvalidQueries = []string{
|
||||||
|
baseAddr + "announce/?" + "info_hash=%0%a",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func mapArrayEqual(boxed map[string][]string, unboxed map[string]string) bool {
|
||||||
|
if len(boxed) != len(unboxed) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for mapKey, mapVal := range boxed {
|
||||||
|
// Always expect box to hold only one element
|
||||||
|
if len(mapVal) != 1 || mapVal[0] != unboxed[mapKey] {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidQueries(t *testing.T) {
|
||||||
|
for parseIndex, parseVal := range ValidAnnounceArguments {
|
||||||
|
parsedQueryObj, err := New(baseAddr + "announce/?" + parseVal.Encode())
|
||||||
|
if err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !mapArrayEqual(parseVal, parsedQueryObj.params) {
|
||||||
|
t.Errorf("Incorrect parse at item %d.\n Expected=%v\n Recieved=%v\n", parseIndex, parseVal, parsedQueryObj.params)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidQueries(t *testing.T) {
|
||||||
|
for parseIndex, parseStr := range InvalidQueries {
|
||||||
|
parsedQueryObj, err := New(parseStr)
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Should have produced error", parseIndex)
|
||||||
|
}
|
||||||
|
|
||||||
|
if parsedQueryObj != nil {
|
||||||
|
t.Error("Should be nil after error", parsedQueryObj, parseIndex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkParseQuery(b *testing.B) {
|
||||||
|
for bCount := 0; bCount < b.N; bCount++ {
|
||||||
|
for parseIndex, parseStr := range ValidAnnounceArguments {
|
||||||
|
parsedQueryObj, err := New(baseAddr + "announce/?" + parseStr.Encode())
|
||||||
|
if err != nil {
|
||||||
|
b.Error(err, parseIndex)
|
||||||
|
b.Log(parsedQueryObj)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkURLParseQuery(b *testing.B) {
|
||||||
|
for bCount := 0; bCount < b.N; bCount++ {
|
||||||
|
for parseIndex, parseStr := range ValidAnnounceArguments {
|
||||||
|
parsedQueryObj, err := url.ParseQuery(baseAddr + "announce/?" + parseStr.Encode())
|
||||||
|
if err != nil {
|
||||||
|
b.Error(err, parseIndex)
|
||||||
|
b.Log(parsedQueryObj)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
171
server/http/request.go
Normal file
171
server/http/request.go
Normal file
|
@ -0,0 +1,171 @@
|
||||||
|
// Copyright 2016 The Chihaya Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by the BSD 2-Clause license,
|
||||||
|
// which can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package http
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/chihaya/chihaya"
|
||||||
|
"github.com/chihaya/chihaya/errors"
|
||||||
|
"github.com/chihaya/chihaya/pkg/event"
|
||||||
|
"github.com/chihaya/chihaya/server/http/query"
|
||||||
|
)
|
||||||
|
|
||||||
|
func announceRequest(r *http.Request, cfg *httpConfig) (*chihaya.AnnounceRequest, error) {
|
||||||
|
q, err := query.New(r.URL.RawQuery)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
request := &chihaya.AnnounceRequest{Params: q}
|
||||||
|
|
||||||
|
eventStr, err := q.String("event")
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.NewBadRequest("failed to parse parameter: event")
|
||||||
|
}
|
||||||
|
request.Event, err = event.New(eventStr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.NewBadRequest("failed to provide valid client event")
|
||||||
|
}
|
||||||
|
|
||||||
|
compactStr, err := q.String("compact")
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.NewBadRequest("failed to parse parameter: compact")
|
||||||
|
}
|
||||||
|
request.Compact = compactStr != "0"
|
||||||
|
|
||||||
|
infoHashes := q.InfoHashes()
|
||||||
|
if len(infoHashes) < 1 {
|
||||||
|
return nil, errors.NewBadRequest("no info_hash parameter supplied")
|
||||||
|
}
|
||||||
|
if len(infoHashes) > 1 {
|
||||||
|
return nil, errors.NewBadRequest("multiple info_hash parameters supplied")
|
||||||
|
}
|
||||||
|
request.InfoHash = infoHashes[0]
|
||||||
|
|
||||||
|
peerID, err := q.String("peer_id")
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.NewBadRequest("failed to parse parameter: peer_id")
|
||||||
|
}
|
||||||
|
request.PeerID = chihaya.PeerID(peerID)
|
||||||
|
|
||||||
|
request.Left, err = q.Uint64("left")
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.NewBadRequest("failed to parse parameter: left")
|
||||||
|
}
|
||||||
|
|
||||||
|
request.Downloaded, err = q.Uint64("downloaded")
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.NewBadRequest("failed to parse parameter: downloaded")
|
||||||
|
}
|
||||||
|
|
||||||
|
request.Uploaded, err = q.Uint64("uploaded")
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.NewBadRequest("failed to parse parameter: uploaded")
|
||||||
|
}
|
||||||
|
|
||||||
|
request.NumWant, err = q.Uint64("numwant")
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.NewBadRequest("failed to parse parameter: numwant")
|
||||||
|
}
|
||||||
|
|
||||||
|
port, err := q.Uint64("port")
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.NewBadRequest("failed to parse parameter: port")
|
||||||
|
}
|
||||||
|
request.Port = uint16(port)
|
||||||
|
|
||||||
|
return request, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func scrapeRequest(r *http.Request, cfg *httpConfig) (*chihaya.ScrapeRequest, error) {
|
||||||
|
q, err := query.New(r.URL.RawQuery)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
request := &chihaya.ScrapeRequest{
|
||||||
|
InfoHashes: q.InfoHashes(),
|
||||||
|
Params: q,
|
||||||
|
}
|
||||||
|
|
||||||
|
return request, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// requestedIP returns the IP address for a request. If there are multiple in
|
||||||
|
// the request, one IPv4 and one IPv6 will be returned.
|
||||||
|
func requestedIP(q *query.Query, r *http.Request, cfg *httpConfig) (v4, v6 net.IP, err error) {
|
||||||
|
var done bool
|
||||||
|
|
||||||
|
if cfg.AllowIPSpoofing {
|
||||||
|
if str, e := q.String("ip"); e == nil {
|
||||||
|
if v4, v6, done = getIPs(str, v4, v6, cfg); done {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if str, e := q.String("ipv4"); e == nil {
|
||||||
|
if v4, v6, done = getIPs(str, v4, v6, cfg); done {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if str, e := q.String("ipv6"); e == nil {
|
||||||
|
if v4, v6, done = getIPs(str, v4, v6, cfg); done {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if cfg.RealIPHeader != "" {
|
||||||
|
if xRealIPs, ok := r.Header[cfg.RealIPHeader]; ok {
|
||||||
|
if v4, v6, done = getIPs(string(xRealIPs[0]), v4, v6, cfg); done {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if r.RemoteAddr == "" && v4 == nil {
|
||||||
|
if v4, v6, done = getIPs("127.0.0.1", v4, v6, cfg); done {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if v4, v6, done = getIPs(r.RemoteAddr, v4, v6, cfg); done {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if v4 == nil && v6 == nil {
|
||||||
|
err = errors.NewBadRequest("failed to parse IP address")
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func getIPs(ipstr string, ipv4, ipv6 net.IP, cfg *httpConfig) (net.IP, net.IP, bool) {
|
||||||
|
host, _, err := net.SplitHostPort(ipstr)
|
||||||
|
if err != nil {
|
||||||
|
host = ipstr
|
||||||
|
}
|
||||||
|
|
||||||
|
if ip := net.ParseIP(host); ip != nil {
|
||||||
|
ipTo4 := ip.To4()
|
||||||
|
if ipv4 == nil && ipTo4 != nil {
|
||||||
|
ipv4 = ipTo4
|
||||||
|
} else if ipv6 == nil && ipTo4 == nil {
|
||||||
|
ipv6 = ip
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var done bool
|
||||||
|
if cfg.DualStackedPeers {
|
||||||
|
done = ipv4 != nil && ipv6 != nil
|
||||||
|
} else {
|
||||||
|
done = ipv4 != nil || ipv6 != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return ipv4, ipv6, done
|
||||||
|
}
|
|
@ -15,7 +15,6 @@ import (
|
||||||
|
|
||||||
"github.com/chihaya/chihaya/config"
|
"github.com/chihaya/chihaya/config"
|
||||||
"github.com/chihaya/chihaya/server"
|
"github.com/chihaya/chihaya/server"
|
||||||
"github.com/chihaya/chihaya/server/http/query"
|
|
||||||
"github.com/chihaya/chihaya/tracker"
|
"github.com/chihaya/chihaya/tracker"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -99,49 +98,33 @@ func (s *httpServer) routes() *httprouter.Router {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *httpServer) serveAnnounce(w http.ResponseWriter, r *http.Request, p httprouter.Params) {
|
func (s *httpServer) serveAnnounce(w http.ResponseWriter, r *http.Request, p httprouter.Params) {
|
||||||
writer := &writer{w}
|
req, err := announceRequest(r, s.cfg)
|
||||||
|
|
||||||
q, err := query.New(r.URL.RawQuery)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
writer.writeError(err)
|
writeError(w, err)
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
req, err := q.AnnounceRequest()
|
|
||||||
if err != nil {
|
|
||||||
writer.writeError(err)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
resp, err := s.tkr.HandleAnnounce(req)
|
resp, err := s.tkr.HandleAnnounce(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
writer.writeError(err)
|
writeError(w, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
writer.writeAnnounceResponse(resp)
|
writeAnnounceResponse(w, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *httpServer) serveScrape(w http.ResponseWriter, r *http.Request, p httprouter.Params) {
|
func (s *httpServer) serveScrape(w http.ResponseWriter, r *http.Request, p httprouter.Params) {
|
||||||
writer := &writer{w}
|
req, err := scrapeRequest(r, s.cfg)
|
||||||
|
|
||||||
q, err := query.New(r.URL.RawQuery)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
writer.writeError(err)
|
writeError(w, err)
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
req, err := q.ScrapeRequest()
|
|
||||||
if err != nil {
|
|
||||||
writer.writeError(err)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
resp, err := s.tkr.HandleScrape(req)
|
resp, err := s.tkr.HandleScrape(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
writer.writeError(err)
|
writeError(w, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
writer.writeScrapeResponse(resp)
|
writeScrapeResponse(w, resp)
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,18 +8,28 @@ import (
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/chihaya/chihaya"
|
"github.com/chihaya/chihaya"
|
||||||
|
"github.com/chihaya/chihaya/errors"
|
||||||
"github.com/chihaya/chihaya/pkg/bencode"
|
"github.com/chihaya/chihaya/pkg/bencode"
|
||||||
)
|
)
|
||||||
|
|
||||||
type writer struct{ http.ResponseWriter }
|
func writeError(w http.ResponseWriter, err error) error {
|
||||||
|
message := "internal server error"
|
||||||
|
chihayaErr, ok := err.(*errors.Error)
|
||||||
|
|
||||||
|
if ok {
|
||||||
|
w.WriteHeader(chihayaErr.Status())
|
||||||
|
|
||||||
|
if chihayaErr.Public() {
|
||||||
|
message = err.Error()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (w *writer) writeError(err error) error {
|
|
||||||
return bencode.NewEncoder(w).Encode(bencode.Dict{
|
return bencode.NewEncoder(w).Encode(bencode.Dict{
|
||||||
"failure reason": err.Error(),
|
"failure reason": message,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *writer) writeAnnounceResponse(resp *chihaya.AnnounceResponse) error {
|
func writeAnnounceResponse(w http.ResponseWriter, resp *chihaya.AnnounceResponse) error {
|
||||||
bdict := bencode.Dict{
|
bdict := bencode.Dict{
|
||||||
"complete": resp.Complete,
|
"complete": resp.Complete,
|
||||||
"incomplete": resp.Incomplete,
|
"incomplete": resp.Incomplete,
|
||||||
|
@ -63,7 +73,7 @@ func (w *writer) writeAnnounceResponse(resp *chihaya.AnnounceResponse) error {
|
||||||
return bencode.NewEncoder(w).Encode(bdict)
|
return bencode.NewEncoder(w).Encode(bdict)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *writer) writeScrapeResponse(resp *chihaya.ScrapeResponse) error {
|
func writeScrapeResponse(w http.ResponseWriter, resp *chihaya.ScrapeResponse) error {
|
||||||
filesDict := bencode.NewDict()
|
filesDict := bencode.NewDict()
|
||||||
for infohash, scrape := range resp.Files {
|
for infohash, scrape := range resp.Files {
|
||||||
filesDict[infohash] = bencode.Dict{
|
filesDict[infohash] = bencode.Dict{
|
||||||
|
|
|
@ -5,10 +5,10 @@
|
||||||
package http
|
package http
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/chihaya/chihaya/errors"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -22,9 +22,17 @@ func TestWriteError(t *testing.T) {
|
||||||
|
|
||||||
for _, tt := range table {
|
for _, tt := range table {
|
||||||
r := httptest.NewRecorder()
|
r := httptest.NewRecorder()
|
||||||
w := &writer{r}
|
err := writeError(r, errors.NewMessage(tt.reason))
|
||||||
err := w.writeError(errors.New(tt.reason))
|
assert.Nil(t, err)
|
||||||
assert.Nil(t, err, "writeError should not fail with test input")
|
assert.Equal(t, r.Body.String(), tt.expected)
|
||||||
assert.Equal(t, r.Body.String(), tt.expected, "writer should write the expected value")
|
assert.Equal(t, r.Code, 200)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestWriteStatus(t *testing.T) {
|
||||||
|
r := httptest.NewRecorder()
|
||||||
|
err := writeError(r, errors.NewBadRequest("something is missing"))
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, r.Body.String(), "d14:failure reason20:something is missinge")
|
||||||
|
assert.Equal(t, r.Code, 400)
|
||||||
|
}
|
||||||
|
|
|
@ -52,9 +52,6 @@ func newPeerStoreConfig(storecfg *store.Config) (*peerStoreConfig, error) {
|
||||||
return &cfg, nil
|
return &cfg, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const seedersSuffix = "-s"
|
|
||||||
const leechersSuffix = "-l"
|
|
||||||
|
|
||||||
type peer struct {
|
type peer struct {
|
||||||
chihaya.Peer
|
chihaya.Peer
|
||||||
LastAction time.Time
|
LastAction time.Time
|
||||||
|
@ -71,16 +68,28 @@ type peerStore struct {
|
||||||
|
|
||||||
var _ store.PeerStore = &peerStore{}
|
var _ store.PeerStore = &peerStore{}
|
||||||
|
|
||||||
func (s *peerStore) shardIndex(infohash string) uint32 {
|
func (s *peerStore) shardIndex(infoHash chihaya.InfoHash) uint32 {
|
||||||
idx := fnv.New32()
|
idx := fnv.New32()
|
||||||
idx.Write([]byte(infohash))
|
idx.Write([]byte(infoHash))
|
||||||
return idx.Sum32() % uint32(len(s.shards))
|
return idx.Sum32() % uint32(len(s.shards))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *peerStore) PutSeeder(infohash string, p chihaya.Peer) error {
|
func peerKey(p chihaya.Peer) string {
|
||||||
key := infohash + seedersSuffix
|
return string(p.IP) + string(p.ID)
|
||||||
|
}
|
||||||
|
|
||||||
shard := s.shards[s.shardIndex(infohash)]
|
func seedersKey(infoHash chihaya.InfoHash) string {
|
||||||
|
return string(infoHash) + "-s"
|
||||||
|
}
|
||||||
|
|
||||||
|
func leechersKey(infoHash chihaya.InfoHash) string {
|
||||||
|
return string(infoHash) + "-l"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *peerStore) PutSeeder(infoHash chihaya.InfoHash, p chihaya.Peer) error {
|
||||||
|
key := seedersKey(infoHash)
|
||||||
|
|
||||||
|
shard := s.shards[s.shardIndex(infoHash)]
|
||||||
shard.Lock()
|
shard.Lock()
|
||||||
defer shard.Unlock()
|
defer shard.Unlock()
|
||||||
|
|
||||||
|
@ -88,7 +97,7 @@ func (s *peerStore) PutSeeder(infohash string, p chihaya.Peer) error {
|
||||||
shard.peers[key] = make(map[string]peer)
|
shard.peers[key] = make(map[string]peer)
|
||||||
}
|
}
|
||||||
|
|
||||||
shard.peers[key][p.ID] = peer{
|
shard.peers[key][peerKey(p)] = peer{
|
||||||
Peer: p,
|
Peer: p,
|
||||||
LastAction: time.Now(),
|
LastAction: time.Now(),
|
||||||
}
|
}
|
||||||
|
@ -96,10 +105,10 @@ func (s *peerStore) PutSeeder(infohash string, p chihaya.Peer) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *peerStore) DeleteSeeder(infohash, peerID string) error {
|
func (s *peerStore) DeleteSeeder(infoHash chihaya.InfoHash, p chihaya.Peer) error {
|
||||||
key := infohash + seedersSuffix
|
key := seedersKey(infoHash)
|
||||||
|
|
||||||
shard := s.shards[s.shardIndex(infohash)]
|
shard := s.shards[s.shardIndex(infoHash)]
|
||||||
shard.Lock()
|
shard.Lock()
|
||||||
defer shard.Unlock()
|
defer shard.Unlock()
|
||||||
|
|
||||||
|
@ -107,7 +116,7 @@ func (s *peerStore) DeleteSeeder(infohash, peerID string) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
delete(shard.peers[key], peerID)
|
delete(shard.peers[key], peerKey(p))
|
||||||
|
|
||||||
if len(shard.peers[key]) == 0 {
|
if len(shard.peers[key]) == 0 {
|
||||||
shard.peers[key] = nil
|
shard.peers[key] = nil
|
||||||
|
@ -116,10 +125,10 @@ func (s *peerStore) DeleteSeeder(infohash, peerID string) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *peerStore) PutLeecher(infohash string, p chihaya.Peer) error {
|
func (s *peerStore) PutLeecher(infoHash chihaya.InfoHash, p chihaya.Peer) error {
|
||||||
key := infohash + leechersSuffix
|
key := leechersKey(infoHash)
|
||||||
|
|
||||||
shard := s.shards[s.shardIndex(infohash)]
|
shard := s.shards[s.shardIndex(infoHash)]
|
||||||
shard.Lock()
|
shard.Lock()
|
||||||
defer shard.Unlock()
|
defer shard.Unlock()
|
||||||
|
|
||||||
|
@ -127,7 +136,7 @@ func (s *peerStore) PutLeecher(infohash string, p chihaya.Peer) error {
|
||||||
shard.peers[key] = make(map[string]peer)
|
shard.peers[key] = make(map[string]peer)
|
||||||
}
|
}
|
||||||
|
|
||||||
shard.peers[key][p.ID] = peer{
|
shard.peers[key][peerKey(p)] = peer{
|
||||||
Peer: p,
|
Peer: p,
|
||||||
LastAction: time.Now(),
|
LastAction: time.Now(),
|
||||||
}
|
}
|
||||||
|
@ -135,10 +144,10 @@ func (s *peerStore) PutLeecher(infohash string, p chihaya.Peer) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *peerStore) DeleteLeecher(infohash, peerID string) error {
|
func (s *peerStore) DeleteLeecher(infoHash chihaya.InfoHash, p chihaya.Peer) error {
|
||||||
key := infohash + leechersSuffix
|
key := leechersKey(infoHash)
|
||||||
|
|
||||||
shard := s.shards[s.shardIndex(infohash)]
|
shard := s.shards[s.shardIndex(infoHash)]
|
||||||
shard.Lock()
|
shard.Lock()
|
||||||
defer shard.Unlock()
|
defer shard.Unlock()
|
||||||
|
|
||||||
|
@ -146,7 +155,7 @@ func (s *peerStore) DeleteLeecher(infohash, peerID string) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
delete(shard.peers[key], peerID)
|
delete(shard.peers[key], peerKey(p))
|
||||||
|
|
||||||
if len(shard.peers[key]) == 0 {
|
if len(shard.peers[key]) == 0 {
|
||||||
shard.peers[key] = nil
|
shard.peers[key] = nil
|
||||||
|
@ -155,23 +164,23 @@ func (s *peerStore) DeleteLeecher(infohash, peerID string) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *peerStore) GraduateLeecher(infohash string, p chihaya.Peer) error {
|
func (s *peerStore) GraduateLeecher(infoHash chihaya.InfoHash, p chihaya.Peer) error {
|
||||||
leecherKey := infohash + leechersSuffix
|
lkey := leechersKey(infoHash)
|
||||||
seederKey := infohash + seedersSuffix
|
skey := seedersKey(infoHash)
|
||||||
|
|
||||||
shard := s.shards[s.shardIndex(infohash)]
|
shard := s.shards[s.shardIndex(infoHash)]
|
||||||
shard.Lock()
|
shard.Lock()
|
||||||
defer shard.Unlock()
|
defer shard.Unlock()
|
||||||
|
|
||||||
if shard.peers[leecherKey] != nil {
|
if shard.peers[lkey] != nil {
|
||||||
delete(shard.peers[leecherKey], p.ID)
|
delete(shard.peers[lkey], peerKey(p))
|
||||||
}
|
}
|
||||||
|
|
||||||
if shard.peers[seederKey] == nil {
|
if shard.peers[skey] == nil {
|
||||||
shard.peers[seederKey] = make(map[string]peer)
|
shard.peers[skey] = make(map[string]peer)
|
||||||
}
|
}
|
||||||
|
|
||||||
shard.peers[seederKey][p.ID] = peer{
|
shard.peers[skey][peerKey(p)] = peer{
|
||||||
Peer: p,
|
Peer: p,
|
||||||
LastAction: time.Now(),
|
LastAction: time.Now(),
|
||||||
}
|
}
|
||||||
|
@ -191,16 +200,13 @@ func (s *peerStore) CollectGarbage(cutoff time.Time) error {
|
||||||
|
|
||||||
for _, key := range keys {
|
for _, key := range keys {
|
||||||
shard.Lock()
|
shard.Lock()
|
||||||
var peersToDelete []string
|
|
||||||
for peerID, p := range shard.peers[key] {
|
for peerKey, p := range shard.peers[key] {
|
||||||
if p.LastAction.Before(cutoff) {
|
if p.LastAction.Before(cutoff) {
|
||||||
peersToDelete = append(peersToDelete, peerID)
|
delete(shard.peers[key], peerKey)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, peerID := range peersToDelete {
|
|
||||||
delete(shard.peers[key], peerID)
|
|
||||||
}
|
|
||||||
shard.Unlock()
|
shard.Unlock()
|
||||||
runtime.Gosched()
|
runtime.Gosched()
|
||||||
}
|
}
|
||||||
|
@ -211,17 +217,17 @@ func (s *peerStore) CollectGarbage(cutoff time.Time) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *peerStore) AnnouncePeers(infohash string, seeder bool, numWant int) (peers, peers6 []chihaya.Peer, err error) {
|
func (s *peerStore) AnnouncePeers(infoHash chihaya.InfoHash, seeder bool, numWant int) (peers, peers6 []chihaya.Peer, err error) {
|
||||||
leecherKey := infohash + leechersSuffix
|
lkey := leechersKey(infoHash)
|
||||||
seederKey := infohash + seedersSuffix
|
skey := seedersKey(infoHash)
|
||||||
|
|
||||||
shard := s.shards[s.shardIndex(infohash)]
|
shard := s.shards[s.shardIndex(infoHash)]
|
||||||
shard.RLock()
|
shard.RLock()
|
||||||
defer shard.RUnlock()
|
defer shard.RUnlock()
|
||||||
|
|
||||||
if seeder {
|
if seeder {
|
||||||
// Append leechers as possible.
|
// Append leechers as possible.
|
||||||
leechers := shard.peers[leecherKey]
|
leechers := shard.peers[lkey]
|
||||||
for _, p := range leechers {
|
for _, p := range leechers {
|
||||||
if numWant == 0 {
|
if numWant == 0 {
|
||||||
break
|
break
|
||||||
|
@ -236,7 +242,7 @@ func (s *peerStore) AnnouncePeers(infohash string, seeder bool, numWant int) (pe
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Append as many seeders as possible.
|
// Append as many seeders as possible.
|
||||||
seeders := shard.peers[seederKey]
|
seeders := shard.peers[skey]
|
||||||
for _, p := range seeders {
|
for _, p := range seeders {
|
||||||
if numWant == 0 {
|
if numWant == 0 {
|
||||||
break
|
break
|
||||||
|
@ -251,7 +257,7 @@ func (s *peerStore) AnnouncePeers(infohash string, seeder bool, numWant int) (pe
|
||||||
}
|
}
|
||||||
|
|
||||||
// Append leechers until we reach numWant.
|
// Append leechers until we reach numWant.
|
||||||
leechers := shard.peers[leecherKey]
|
leechers := shard.peers[lkey]
|
||||||
if numWant > 0 {
|
if numWant > 0 {
|
||||||
for _, p := range leechers {
|
for _, p := range leechers {
|
||||||
if numWant == 0 {
|
if numWant == 0 {
|
||||||
|
|
|
@ -15,14 +15,14 @@ var peerStoreDrivers = make(map[string]PeerStoreDriver)
|
||||||
|
|
||||||
// PeerStore represents an interface for manipulating peers.
|
// PeerStore represents an interface for manipulating peers.
|
||||||
type PeerStore interface {
|
type PeerStore interface {
|
||||||
PutSeeder(infohash string, p chihaya.Peer) error
|
PutSeeder(infoHash chihaya.InfoHash, p chihaya.Peer) error
|
||||||
DeleteSeeder(infohash, peerID string) error
|
DeleteSeeder(infoHash chihaya.InfoHash, peerID chihaya.Peer) error
|
||||||
|
|
||||||
PutLeecher(infohash string, p chihaya.Peer) error
|
PutLeecher(infoHash chihaya.InfoHash, p chihaya.Peer) error
|
||||||
DeleteLeecher(infohash, peerID string) error
|
DeleteLeecher(infoHash chihaya.InfoHash, peerID chihaya.Peer) error
|
||||||
|
|
||||||
GraduateLeecher(infohash string, p chihaya.Peer) error
|
GraduateLeecher(infoHash chihaya.InfoHash, p chihaya.Peer) error
|
||||||
AnnouncePeers(infohash string, seeder bool, numWant int) (peers, peers6 []chihaya.Peer, err error)
|
AnnouncePeers(infoHash chihaya.InfoHash, seeder bool, numWant int) (peers, peers6 []chihaya.Peer, err error)
|
||||||
CollectGarbage(cutoff time.Time) error
|
CollectGarbage(cutoff time.Time) error
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ import (
|
||||||
|
|
||||||
// AnnounceHandler is a function that operates on an AnnounceResponse before it
|
// AnnounceHandler is a function that operates on an AnnounceResponse before it
|
||||||
// has been delivered to a client.
|
// has been delivered to a client.
|
||||||
type AnnounceHandler func(*config.TrackerConfig, chihaya.AnnounceRequest, *chihaya.AnnounceResponse) error
|
type AnnounceHandler func(*config.TrackerConfig, *chihaya.AnnounceRequest, *chihaya.AnnounceResponse) error
|
||||||
|
|
||||||
// AnnounceMiddleware is higher-order AnnounceHandler used to implement modular
|
// AnnounceMiddleware is higher-order AnnounceHandler used to implement modular
|
||||||
// behavior processing an announce.
|
// behavior processing an announce.
|
||||||
|
@ -24,7 +24,7 @@ func (c *announceChain) Append(mw ...AnnounceMiddleware) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *announceChain) Handler() AnnounceHandler {
|
func (c *announceChain) Handler() AnnounceHandler {
|
||||||
final := func(cfg *config.TrackerConfig, req chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error {
|
final := func(cfg *config.TrackerConfig, req *chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
for i := len(c.mw) - 1; i >= 0; i-- {
|
for i := len(c.mw) - 1; i >= 0; i-- {
|
||||||
|
@ -54,7 +54,7 @@ func RegisterAnnounceMiddleware(name string, mw AnnounceMiddleware) {
|
||||||
|
|
||||||
// ScrapeHandler is a middleware function that operates on a ScrapeResponse
|
// ScrapeHandler is a middleware function that operates on a ScrapeResponse
|
||||||
// before it has been delivered to a client.
|
// before it has been delivered to a client.
|
||||||
type ScrapeHandler func(*config.TrackerConfig, chihaya.ScrapeRequest, *chihaya.ScrapeResponse) error
|
type ScrapeHandler func(*config.TrackerConfig, *chihaya.ScrapeRequest, *chihaya.ScrapeResponse) error
|
||||||
|
|
||||||
// ScrapeMiddleware is higher-order ScrapeHandler used to implement modular
|
// ScrapeMiddleware is higher-order ScrapeHandler used to implement modular
|
||||||
// behavior processing a scrape.
|
// behavior processing a scrape.
|
||||||
|
@ -67,7 +67,7 @@ func (c *scrapeChain) Append(mw ...ScrapeMiddleware) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *scrapeChain) Handler() ScrapeHandler {
|
func (c *scrapeChain) Handler() ScrapeHandler {
|
||||||
final := func(cfg *config.TrackerConfig, req chihaya.ScrapeRequest, resp *chihaya.ScrapeResponse) error {
|
final := func(cfg *config.TrackerConfig, req *chihaya.ScrapeRequest, resp *chihaya.ScrapeResponse) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
for i := len(c.mw) - 1; i >= 0; i-- {
|
for i := len(c.mw) - 1; i >= 0; i-- {
|
||||||
|
|
|
@ -14,7 +14,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func testAnnounceMW1(next AnnounceHandler) AnnounceHandler {
|
func testAnnounceMW1(next AnnounceHandler) AnnounceHandler {
|
||||||
return func(cfg *config.TrackerConfig, req chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error {
|
return func(cfg *config.TrackerConfig, req *chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error {
|
||||||
resp.IPv4Peers = append(resp.IPv4Peers, chihaya.Peer{
|
resp.IPv4Peers = append(resp.IPv4Peers, chihaya.Peer{
|
||||||
Port: 1,
|
Port: 1,
|
||||||
})
|
})
|
||||||
|
@ -23,7 +23,7 @@ func testAnnounceMW1(next AnnounceHandler) AnnounceHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
func testAnnounceMW2(next AnnounceHandler) AnnounceHandler {
|
func testAnnounceMW2(next AnnounceHandler) AnnounceHandler {
|
||||||
return func(cfg *config.TrackerConfig, req chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error {
|
return func(cfg *config.TrackerConfig, req *chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error {
|
||||||
resp.IPv4Peers = append(resp.IPv4Peers, chihaya.Peer{
|
resp.IPv4Peers = append(resp.IPv4Peers, chihaya.Peer{
|
||||||
Port: 2,
|
Port: 2,
|
||||||
})
|
})
|
||||||
|
@ -32,7 +32,7 @@ func testAnnounceMW2(next AnnounceHandler) AnnounceHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
func testAnnounceMW3(next AnnounceHandler) AnnounceHandler {
|
func testAnnounceMW3(next AnnounceHandler) AnnounceHandler {
|
||||||
return func(cfg *config.TrackerConfig, req chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error {
|
return func(cfg *config.TrackerConfig, req *chihaya.AnnounceRequest, resp *chihaya.AnnounceResponse) error {
|
||||||
resp.IPv4Peers = append(resp.IPv4Peers, chihaya.Peer{
|
resp.IPv4Peers = append(resp.IPv4Peers, chihaya.Peer{
|
||||||
Port: 3,
|
Port: 3,
|
||||||
})
|
})
|
||||||
|
@ -47,7 +47,7 @@ func TestAnnounceChain(t *testing.T) {
|
||||||
achain.Append(testAnnounceMW3)
|
achain.Append(testAnnounceMW3)
|
||||||
handler := achain.Handler()
|
handler := achain.Handler()
|
||||||
resp := &chihaya.AnnounceResponse{}
|
resp := &chihaya.AnnounceResponse{}
|
||||||
err := handler(nil, chihaya.AnnounceRequest{}, resp)
|
err := handler(nil, &chihaya.AnnounceRequest{}, resp)
|
||||||
assert.Nil(t, err, "the handler should not return an error")
|
assert.Nil(t, err, "the handler should not return an error")
|
||||||
assert.Equal(t, resp.IPv4Peers, []chihaya.Peer{chihaya.Peer{Port: 1}, chihaya.Peer{Port: 2}, chihaya.Peer{Port: 3}}, "the list of peers added from the middleware should be in the same order.")
|
assert.Equal(t, resp.IPv4Peers, []chihaya.Peer{chihaya.Peer{Port: 1}, chihaya.Peer{Port: 2}, chihaya.Peer{Port: 3}}, "the list of peers added from the middleware should be in the same order.")
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,7 @@ func NewTracker(cfg *config.TrackerConfig) (*Tracker, error) {
|
||||||
|
|
||||||
// HandleAnnounce runs an AnnounceRequest through a Tracker's middleware and
|
// HandleAnnounce runs an AnnounceRequest through a Tracker's middleware and
|
||||||
// returns the result.
|
// returns the result.
|
||||||
func (t *Tracker) HandleAnnounce(req chihaya.AnnounceRequest) (*chihaya.AnnounceResponse, error) {
|
func (t *Tracker) HandleAnnounce(req *chihaya.AnnounceRequest) (*chihaya.AnnounceResponse, error) {
|
||||||
resp := &chihaya.AnnounceResponse{}
|
resp := &chihaya.AnnounceResponse{}
|
||||||
err := t.handleAnnounce(t.cfg, req, resp)
|
err := t.handleAnnounce(t.cfg, req, resp)
|
||||||
return resp, err
|
return resp, err
|
||||||
|
@ -57,7 +57,7 @@ func (t *Tracker) HandleAnnounce(req chihaya.AnnounceRequest) (*chihaya.Announce
|
||||||
|
|
||||||
// HandleScrape runs a ScrapeRequest through a Tracker's middleware and returns
|
// HandleScrape runs a ScrapeRequest through a Tracker's middleware and returns
|
||||||
// the result.
|
// the result.
|
||||||
func (t *Tracker) HandleScrape(req chihaya.ScrapeRequest) (*chihaya.ScrapeResponse, error) {
|
func (t *Tracker) HandleScrape(req *chihaya.ScrapeRequest) (*chihaya.ScrapeResponse, error) {
|
||||||
resp := &chihaya.ScrapeResponse{}
|
resp := &chihaya.ScrapeResponse{}
|
||||||
err := t.handleScrape(t.cfg, req, resp)
|
err := t.handleScrape(t.cfg, req, resp)
|
||||||
return resp, err
|
return resp, err
|
||||||
|
|
Loading…
Reference in a new issue