[rpc mempool] More tweaks to dynamicMemUsage(). Add toggleable assertions for max depth and switch completness. Toggle them when running in mempool_test.go. Drop support for reflect.Map, as it's not needed at this time.

This commit is contained in:
Jonathan Moody 2022-07-18 11:52:15 -04:00 committed by Roy Lee
parent eefb1260eb
commit 7f9fe4b970
2 changed files with 43 additions and 36 deletions

View file

@ -21,6 +21,12 @@ import (
btcutil "github.com/lbryio/lbcutil" btcutil "github.com/lbryio/lbcutil"
) )
func init() {
// Toggle assert & debug messages when running tests.
dynamicMemUsageAssert = true
dynamicMemUsageDebug = false
}
// fakeChain is used by the pool harness to provide generated test utxos and // fakeChain is used by the pool harness to provide generated test utxos and
// a current faked chain height to the pool callbacks. This, in turn, allows // a current faked chain height to the pool callbacks. This, in turn, allows
// transactions to appear as though they are spending completely valid utxos. // transactions to appear as though they are spending completely valid utxos.

View file

@ -5,83 +5,84 @@
package mempool package mempool
import ( import (
"fmt"
"reflect" "reflect"
) )
var (
dynamicMemUsageAssert = false
dynamicMemUsageDebug = false
dynamicMemUsageMaxDepth = 10
)
func dynamicMemUsage(v reflect.Value) uintptr { func dynamicMemUsage(v reflect.Value) uintptr {
return _dynamicMemUsage(v, false, 0) return dynamicMemUsageCrawl(v, 0)
} }
func _dynamicMemUsage(v reflect.Value, debug bool, level int) uintptr { func dynamicMemUsageCrawl(v reflect.Value, depth int) uintptr {
t := v.Type() t := v.Type()
bytes := t.Size() bytes := t.Size()
if debug { if dynamicMemUsageDebug {
println("[", level, "]", t.Kind().String(), "(", t.String(), ") ->", t.Size()) println("[", depth, "]", t.Kind().String(), "(", t.String(), ") ->", t.Size())
} }
// For complex types, we need to peek inside slices/arrays/structs/maps and chase pointers. if depth >= dynamicMemUsageMaxDepth {
if dynamicMemUsageAssert {
panic("crawl reached maximum depth")
}
return bytes
}
// For complex types, we need to peek inside slices/arrays/structs and chase pointers.
switch t.Kind() { switch t.Kind() {
case reflect.Pointer, reflect.Interface: case reflect.Pointer, reflect.Interface:
if !v.IsNil() { if !v.IsNil() {
bytes += _dynamicMemUsage(v.Elem(), debug, level+1) bytes += dynamicMemUsageCrawl(v.Elem(), depth+1)
} }
case reflect.Array, reflect.Slice: case reflect.Array, reflect.Slice:
for j := 0; j < v.Len(); j++ { for j := 0; j < v.Len(); j++ {
vi := v.Index(j) vi := v.Index(j)
k := vi.Type().Kind() k := vi.Type().Kind()
if debug { if dynamicMemUsageDebug {
println("[", level, "] index:", j, "kind:", k.String()) println("[", depth, "] index:", j, "kind:", k.String())
} }
elemB := uintptr(0) elemBytes := uintptr(0)
if t.Kind() == reflect.Array { if t.Kind() == reflect.Array {
if (k == reflect.Pointer || k == reflect.Interface) && !vi.IsNil() { if (k == reflect.Pointer || k == reflect.Interface) && !vi.IsNil() {
elemB += _dynamicMemUsage(vi.Elem(), debug, level+1) elemBytes += dynamicMemUsageCrawl(vi.Elem(), depth+1)
} }
} else { // slice } else { // slice
elemB += _dynamicMemUsage(vi, debug, level+1) elemBytes += dynamicMemUsageCrawl(vi, depth+1)
} }
if k == reflect.Uint8 { if k == reflect.Uint8 {
// short circuit for byte slice/array // short circuit for byte slice/array
bytes += elemB * uintptr(v.Len()) bytes += elemBytes * uintptr(v.Len())
if debug { if dynamicMemUsageDebug {
println("...", v.Len(), "elements") println("...", v.Len(), "elements")
} }
break break
} }
bytes += elemB bytes += elemBytes
}
case reflect.Map:
iter := v.MapRange()
for iter.Next() {
vk := iter.Key()
vv := iter.Value()
if debug {
println("[", level, "] key:", vk.Type().Kind().String())
}
bytes += _dynamicMemUsage(vk, debug, level+1)
if debug {
println("[", level, "] value:", vv.Type().Kind().String())
}
bytes += _dynamicMemUsage(vv, debug, level+1)
if debug {
println("...", v.Len(), "map elements")
}
debug = false
} }
case reflect.Struct: case reflect.Struct:
for _, f := range reflect.VisibleFields(t) { for _, f := range reflect.VisibleFields(t) {
vf := v.FieldByIndex(f.Index) vf := v.FieldByIndex(f.Index)
k := vf.Type().Kind() k := vf.Type().Kind()
if debug { if dynamicMemUsageDebug {
println("[", level, "] field:", f.Name, "kind:", k.String()) println("[", depth, "] field:", f.Name, "kind:", k.String())
} }
if (k == reflect.Pointer || k == reflect.Interface) && !vf.IsNil() { if (k == reflect.Pointer || k == reflect.Interface) && !vf.IsNil() {
bytes += _dynamicMemUsage(vf.Elem(), debug, level+1) bytes += dynamicMemUsageCrawl(vf.Elem(), depth+1)
} else if k == reflect.Array || k == reflect.Slice { } else if k == reflect.Array || k == reflect.Slice {
bytes -= vf.Type().Size() bytes -= vf.Type().Size()
bytes += _dynamicMemUsage(vf, debug, level+1) bytes += dynamicMemUsageCrawl(vf, depth+1)
} }
} }
case reflect.Uint8:
default:
if dynamicMemUsageAssert {
panic(fmt.Sprintf("unsupported kind: %v", t.Kind()))
}
} }
return bytes return bytes