bittorrent: fix out-of-range panics for URL parsing
This commit is contained in:
parent
2dcb4344cb
commit
2764717657
2 changed files with 69 additions and 73 deletions
|
@ -88,69 +88,45 @@ func ParseURLData(urlData string) (*QueryParams, error) {
|
||||||
|
|
||||||
// parseQuery parses a URL query into QueryParams.
|
// parseQuery parses a URL query into QueryParams.
|
||||||
// The query is expected to exclude the delimiting '?'.
|
// The query is expected to exclude the delimiting '?'.
|
||||||
func parseQuery(rawQuery string) (*QueryParams, error) {
|
func parseQuery(query string) (q *QueryParams, err error) {
|
||||||
var (
|
// This is basically url.parseQuery, but with a map[string]string
|
||||||
keyStart, keyEnd int
|
// instead of map[string][]string for the values.
|
||||||
valStart, valEnd int
|
q = &QueryParams{
|
||||||
|
query: query,
|
||||||
|
infoHashes: nil,
|
||||||
|
params: make(map[string]string),
|
||||||
|
}
|
||||||
|
|
||||||
onKey = true
|
for query != "" {
|
||||||
|
key := query
|
||||||
q = &QueryParams{
|
if i := strings.IndexAny(key, "&;"); i >= 0 {
|
||||||
query: rawQuery,
|
key, query = key[:i], key[i+1:]
|
||||||
infoHashes: nil,
|
|
||||||
params: make(map[string]string),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
for i, length := 0, len(rawQuery); i < length; i++ {
|
|
||||||
separator := rawQuery[i] == '&' || rawQuery[i] == ';'
|
|
||||||
last := i == length-1
|
|
||||||
|
|
||||||
if separator || last {
|
|
||||||
if onKey && !last {
|
|
||||||
keyStart = i + 1
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if last && !separator && !onKey {
|
|
||||||
valEnd = i
|
|
||||||
}
|
|
||||||
|
|
||||||
keyStr, err := url.QueryUnescape(rawQuery[keyStart : keyEnd+1])
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var valStr string
|
|
||||||
|
|
||||||
if valEnd > 0 {
|
|
||||||
valStr, err = url.QueryUnescape(rawQuery[valStart : valEnd+1])
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if keyStr == "info_hash" {
|
|
||||||
if len(valStr) != 20 {
|
|
||||||
return nil, ErrInvalidInfohash
|
|
||||||
}
|
|
||||||
q.infoHashes = append(q.infoHashes, InfoHashFromString(valStr))
|
|
||||||
} else {
|
|
||||||
q.params[strings.ToLower(keyStr)] = valStr
|
|
||||||
}
|
|
||||||
|
|
||||||
valEnd = 0
|
|
||||||
onKey = true
|
|
||||||
keyStart = i + 1
|
|
||||||
|
|
||||||
} else if rawQuery[i] == '=' {
|
|
||||||
onKey = false
|
|
||||||
valStart = i + 1
|
|
||||||
valEnd = 0
|
|
||||||
} else if onKey {
|
|
||||||
keyEnd = i
|
|
||||||
} else {
|
} else {
|
||||||
valEnd = i
|
query = ""
|
||||||
|
}
|
||||||
|
if key == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
value := ""
|
||||||
|
if i := strings.Index(key, "="); i >= 0 {
|
||||||
|
key, value = key[:i], key[i+1:]
|
||||||
|
}
|
||||||
|
key, err = url.QueryUnescape(key)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
value, err = url.QueryUnescape(value)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if key == "info_hash" {
|
||||||
|
if len(value) != 20 {
|
||||||
|
return nil, ErrInvalidInfohash
|
||||||
|
}
|
||||||
|
q.infoHashes = append(q.infoHashes, InfoHashFromString(value))
|
||||||
|
} else {
|
||||||
|
q.params[strings.ToLower(key)] = value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,12 @@ var (
|
||||||
InvalidQueries = []string{
|
InvalidQueries = []string{
|
||||||
"/announce?" + "info_hash=%0%a",
|
"/announce?" + "info_hash=%0%a",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// See https://github.com/chihaya/chihaya/issues/334.
|
||||||
|
shouldNotPanicQueries = []string{
|
||||||
|
"/annnounce?" + "info_hash=" + testPeerID + "&a",
|
||||||
|
"/annnounce?" + "info_hash=" + testPeerID + "&=b?",
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
func mapArrayEqual(boxed map[string][]string, unboxed map[string]string) bool {
|
func mapArrayEqual(boxed map[string][]string, unboxed map[string]string) bool {
|
||||||
|
@ -84,26 +90,40 @@ func TestParseInvalidURLData(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestParseShouldNotPanicURLData(t *testing.T) {
|
||||||
|
for _, parseStr := range shouldNotPanicQueries {
|
||||||
|
ParseURLData(parseStr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func BenchmarkParseQuery(b *testing.B) {
|
func BenchmarkParseQuery(b *testing.B) {
|
||||||
|
announceStrings := make([]string, 0)
|
||||||
|
for i := range ValidAnnounceArguments {
|
||||||
|
announceStrings = append(announceStrings, ValidAnnounceArguments[i].Encode())
|
||||||
|
}
|
||||||
|
b.ResetTimer()
|
||||||
for bCount := 0; bCount < b.N; bCount++ {
|
for bCount := 0; bCount < b.N; bCount++ {
|
||||||
for parseIndex, parseStr := range ValidAnnounceArguments {
|
i := bCount % len(announceStrings)
|
||||||
parsedQueryObj, err := parseQuery(parseStr.Encode())
|
parsedQueryObj, err := parseQuery(announceStrings[i])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Error(err, parseIndex)
|
b.Error(err, i)
|
||||||
b.Log(parsedQueryObj)
|
b.Log(parsedQueryObj)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkURLParseQuery(b *testing.B) {
|
func BenchmarkURLParseQuery(b *testing.B) {
|
||||||
|
announceStrings := make([]string, 0)
|
||||||
|
for i := range ValidAnnounceArguments {
|
||||||
|
announceStrings = append(announceStrings, ValidAnnounceArguments[i].Encode())
|
||||||
|
}
|
||||||
|
b.ResetTimer()
|
||||||
for bCount := 0; bCount < b.N; bCount++ {
|
for bCount := 0; bCount < b.N; bCount++ {
|
||||||
for parseIndex, parseStr := range ValidAnnounceArguments {
|
i := bCount % len(announceStrings)
|
||||||
parsedQueryObj, err := url.ParseQuery(parseStr.Encode())
|
parsedQueryObj, err := url.ParseQuery(announceStrings[i])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Error(err, parseIndex)
|
b.Error(err, i)
|
||||||
b.Log(parsedQueryObj)
|
b.Log(parsedQueryObj)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue