Catchup to python-herald schema. Plus lots of refactoring. #49
16 changed files with 1659 additions and 853 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -1 +1,3 @@
|
|||
.idea/
|
||||
.vscode/
|
||||
.DS_Store
|
||||
|
|
12
db/db.go
12
db/db.go
|
@ -850,8 +850,8 @@ func ReadPrefixN(db *grocksdb.DB, prefix []byte, n int) []*prefixes.PrefixRowKV
|
|||
value := it.Value()
|
||||
|
||||
res[i] = &prefixes.PrefixRowKV{
|
||||
Key: key.Data(),
|
||||
Value: value.Data(),
|
||||
RawKey: key.Data(),
|
||||
RawValue: value.Data(),
|
||||
}
|
||||
|
||||
key.Free()
|
||||
|
@ -908,8 +908,8 @@ func readWriteRawNCF(db *grocksdb.DB, options *IterOptions, out string, n int, f
|
|||
if i >= n {
|
||||
return
|
||||
}
|
||||
key := kv.Key.([]byte)
|
||||
value := kv.Value.([]byte)
|
||||
key := kv.RawKey
|
||||
value := kv.RawValue
|
||||
keyHex := hex.EncodeToString(key)
|
||||
valueHex := hex.EncodeToString(value)
|
||||
//log.Println(keyHex)
|
||||
|
@ -947,8 +947,8 @@ func ReadWriteRawN(db *grocksdb.DB, options *IterOptions, out string, n int) {
|
|||
if i >= n {
|
||||
return
|
||||
}
|
||||
key := kv.Key.([]byte)
|
||||
value := kv.Value.([]byte)
|
||||
key := kv.RawKey
|
||||
value := kv.RawValue
|
||||
keyHex := hex.EncodeToString(key)
|
||||
valueHex := hex.EncodeToString(value)
|
||||
log.Println(keyHex)
|
||||
|
|
18
db/db_get.go
18
db/db_get.go
|
@ -94,7 +94,7 @@ func (db *ReadOnlyDBColumnFamily) GetStreamsAndChannelRepostedByChannelHashes(re
|
|||
|
||||
for _, reposterChannelHash := range reposterChannelHashes {
|
||||
key := prefixes.NewChannelToClaimKeyWHash(reposterChannelHash)
|
||||
rawKeyPrefix := prefixes.ChannelToClaimKeyPackPartial(key, 1)
|
||||
rawKeyPrefix := key.PartialPack(1)
|
||||
options := NewIterateOptions().WithCfHandle(handle).WithPrefix(rawKeyPrefix)
|
||||
options = options.WithIncludeKey(false).WithIncludeValue(true)
|
||||
ch := IterCF(db.DB, options)
|
||||
|
@ -167,7 +167,7 @@ func (db *ReadOnlyDBColumnFamily) GetShortClaimIdUrl(name string, normalizedName
|
|||
partialClaimId := claimId[:j]
|
||||
partialKey := prefixes.NewClaimShortIDKey(normalizedName, partialClaimId)
|
||||
log.Printf("partialKey: %#v\n", partialKey)
|
||||
keyPrefix := prefixes.ClaimShortIDKeyPackPartial(partialKey, 2)
|
||||
keyPrefix := partialKey.PartialPack(2)
|
||||
// Prefix and handle
|
||||
options := NewIterateOptions().WithPrefix(prefix).WithCfHandle(handle)
|
||||
// Start and stop bounds
|
||||
|
@ -218,7 +218,7 @@ func (db *ReadOnlyDBColumnFamily) GetRepostedCount(claimHash []byte) (int, error
|
|||
}
|
||||
|
||||
key := prefixes.NewRepostedKey(claimHash)
|
||||
keyPrefix := prefixes.RepostedKeyPackPartial(key, 1)
|
||||
keyPrefix := key.PartialPack(1)
|
||||
// Prefix and handle
|
||||
options := NewIterateOptions().WithPrefix(keyPrefix).WithCfHandle(handle)
|
||||
// Start and stop bounds
|
||||
|
@ -267,8 +267,8 @@ func (db *ReadOnlyDBColumnFamily) GetActiveAmount(claimHash []byte, txoType uint
|
|||
startKey := prefixes.NewActiveAmountKey(claimHash, txoType, 0)
|
||||
endKey := prefixes.NewActiveAmountKey(claimHash, txoType, height)
|
||||
|
||||
startKeyRaw := prefixes.ActiveAmountKeyPackPartial(startKey, 3)
|
||||
endKeyRaw := prefixes.ActiveAmountKeyPackPartial(endKey, 3)
|
||||
startKeyRaw := startKey.PartialPack(3)
|
||||
endKeyRaw := endKey.PartialPack(3)
|
||||
// Prefix and handle
|
||||
options := NewIterateOptions().WithPrefix([]byte{prefixes.ActiveAmount}).WithCfHandle(handle)
|
||||
// Start and stop bounds
|
||||
|
@ -416,7 +416,7 @@ func (db *ReadOnlyDBColumnFamily) ControllingClaimIter() <-chan *prefixes.Prefix
|
|||
|
||||
key := prefixes.NewClaimTakeoverKey("")
|
||||
var rawKeyPrefix []byte = nil
|
||||
rawKeyPrefix = prefixes.ClaimTakeoverKeyPackPartial(key, 0)
|
||||
rawKeyPrefix = key.PartialPack(0)
|
||||
options := NewIterateOptions().WithCfHandle(handle).WithPrefix(rawKeyPrefix)
|
||||
options = options.WithIncludeValue(true) //.WithIncludeStop(true)
|
||||
ch := IterCF(db.DB, options)
|
||||
|
@ -527,7 +527,7 @@ func (db *ReadOnlyDBColumnFamily) EffectiveAmountNameIter(normalizedName string)
|
|||
|
||||
key := prefixes.NewEffectiveAmountKey(normalizedName)
|
||||
var rawKeyPrefix []byte = nil
|
||||
rawKeyPrefix = prefixes.EffectiveAmountKeyPackPartial(key, 1)
|
||||
rawKeyPrefix = key.PartialPack(1)
|
||||
options := NewIterateOptions().WithCfHandle(handle).WithPrefix(rawKeyPrefix)
|
||||
options = options.WithIncludeValue(true) //.WithIncludeStop(true)
|
||||
ch := IterCF(db.DB, options)
|
||||
|
@ -542,9 +542,9 @@ func (db *ReadOnlyDBColumnFamily) ClaimShortIdIter(normalizedName string, claimI
|
|||
key := prefixes.NewClaimShortIDKey(normalizedName, claimId)
|
||||
var rawKeyPrefix []byte = nil
|
||||
if claimId != "" {
|
||||
rawKeyPrefix = prefixes.ClaimShortIDKeyPackPartial(key, 2)
|
||||
rawKeyPrefix = key.PartialPack(2)
|
||||
} else {
|
||||
rawKeyPrefix = prefixes.ClaimShortIDKeyPackPartial(key, 1)
|
||||
rawKeyPrefix = key.PartialPack(1)
|
||||
}
|
||||
options := NewIterateOptions().WithCfHandle(handle).WithPrefix(rawKeyPrefix)
|
||||
options = options.WithIncludeValue(true) //.WithIncludeStop(true)
|
||||
|
|
|
@ -323,7 +323,7 @@ func (db *ReadOnlyDBColumnFamily) ResolveClaimInChannel(channelHash []byte, norm
|
|||
}
|
||||
|
||||
key := prefixes.NewChannelToClaimKey(channelHash, normalizedName)
|
||||
rawKeyPrefix := prefixes.ChannelToClaimKeyPackPartial(key, 2)
|
||||
rawKeyPrefix := key.PartialPack(2)
|
||||
options := NewIterateOptions().WithCfHandle(handle).WithPrefix(rawKeyPrefix)
|
||||
options = options.WithIncludeValue(true) //.WithIncludeStop(true)
|
||||
ch := IterCF(db.DB, options)
|
||||
|
|
|
@ -729,9 +729,9 @@ func TestIter(t *testing.T) {
|
|||
// log.Println(kv.Key)
|
||||
gotKey := kv.Key.(*prefixes.RepostedKey).PackKey()
|
||||
|
||||
keyPartial3 := prefixes.RepostedKeyPackPartial(kv.Key.(*prefixes.RepostedKey), 3)
|
||||
keyPartial2 := prefixes.RepostedKeyPackPartial(kv.Key.(*prefixes.RepostedKey), 2)
|
||||
keyPartial1 := prefixes.RepostedKeyPackPartial(kv.Key.(*prefixes.RepostedKey), 1)
|
||||
keyPartial3 := kv.Key.(*prefixes.RepostedKey).PartialPack(3)
|
||||
keyPartial2 := kv.Key.(*prefixes.RepostedKey).PartialPack(2)
|
||||
keyPartial1 := kv.Key.(*prefixes.RepostedKey).PartialPack(1)
|
||||
|
||||
// Check pack partial for sanity
|
||||
if !bytes.HasPrefix(gotKey, keyPartial3) {
|
||||
|
|
|
@ -24,6 +24,7 @@ type IterOptions struct {
|
|||
RawValue bool
|
||||
CfHandle *grocksdb.ColumnFamilyHandle
|
||||
It *grocksdb.Iterator
|
||||
Serializer *prefixes.SerializationAPI
|
||||
}
|
||||
|
||||
// NewIterateOptions creates a defualt options structure for a db iterator.
|
||||
|
@ -41,6 +42,7 @@ func NewIterateOptions() *IterOptions {
|
|||
RawValue: false,
|
||||
CfHandle: nil,
|
||||
It: nil,
|
||||
Serializer: prefixes.ProductionAPI,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,6 +101,11 @@ func (o *IterOptions) WithRawValue(rawValue bool) *IterOptions {
|
|||
return o
|
||||
}
|
||||
|
||||
func (o *IterOptions) WithSerializer(serializer *prefixes.SerializationAPI) *IterOptions {
|
||||
o.Serializer = serializer
|
||||
return o
|
||||
}
|
||||
|
||||
// ReadRow reads a row from the db, returns nil when no more rows are available.
|
||||
func (opts *IterOptions) ReadRow(prevKey *[]byte) *prefixes.PrefixRowKV {
|
||||
it := opts.It
|
||||
|
@ -117,8 +124,10 @@ func (opts *IterOptions) ReadRow(prevKey *[]byte) *prefixes.PrefixRowKV {
|
|||
valueData := value.Data()
|
||||
valueLen := len(valueData)
|
||||
|
||||
var outKey interface{} = nil
|
||||
var outValue interface{} = nil
|
||||
var outKey prefixes.BaseKey = nil
|
||||
var outValue prefixes.BaseValue = nil
|
||||
var rawOutKey []byte = nil
|
||||
var rawOutValue []byte = nil
|
||||
var err error = nil
|
||||
|
||||
log.Trace("keyData:", keyData)
|
||||
|
@ -136,12 +145,12 @@ func (opts *IterOptions) ReadRow(prevKey *[]byte) *prefixes.PrefixRowKV {
|
|||
newKeyData := make([]byte, keyLen)
|
||||
copy(newKeyData, keyData)
|
||||
if opts.IncludeKey && !opts.RawKey {
|
||||
outKey, err = prefixes.UnpackGenericKey(newKeyData)
|
||||
outKey, err = opts.Serializer.UnpackKey(newKeyData)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
}
|
||||
} else if opts.IncludeKey {
|
||||
outKey = newKeyData
|
||||
rawOutKey = newKeyData
|
||||
}
|
||||
|
||||
// Value could be quite large, so this setting could be important
|
||||
|
@ -150,18 +159,20 @@ func (opts *IterOptions) ReadRow(prevKey *[]byte) *prefixes.PrefixRowKV {
|
|||
newValueData := make([]byte, valueLen)
|
||||
copy(newValueData, valueData)
|
||||
if !opts.RawValue {
|
||||
outValue, err = prefixes.UnpackGenericValue(newKeyData, newValueData)
|
||||
outValue, err = opts.Serializer.UnpackValue(newKeyData, newValueData)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
}
|
||||
} else {
|
||||
outValue = newValueData
|
||||
rawOutValue = newValueData
|
||||
}
|
||||
}
|
||||
|
||||
kv := &prefixes.PrefixRowKV{
|
||||
Key: outKey,
|
||||
Value: outValue,
|
||||
RawKey: rawOutKey,
|
||||
RawValue: rawOutValue,
|
||||
}
|
||||
*prevKey = newKeyData
|
||||
|
||||
|
|
201
db/prefixes/generic.go
Normal file
201
db/prefixes/generic.go
Normal file
|
@ -0,0 +1,201 @@
|
|||
package prefixes
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/go-restruct/restruct"
|
||||
"github.com/lbryio/lbcd/chaincfg/chainhash"
|
||||
)
|
||||
|
||||
func init() {
|
||||
restruct.EnableExprBeta()
|
||||
}
|
||||
|
||||
// Type OnesComplementEffectiveAmount (uint64) has to be encoded specially
|
||||
// to get the desired sort ordering.
|
||||
// Implement the Sizer, Packer, Unpacker interface to handle it manually.
|
||||
|
||||
func (amt *OnesComplementEffectiveAmount) SizeOf() int {
|
||||
return 8
|
||||
}
|
||||
|
||||
func (amt *OnesComplementEffectiveAmount) Pack(buf []byte, order binary.ByteOrder) ([]byte, error) {
|
||||
binary.BigEndian.PutUint64(buf, OnesCompTwiddle64-uint64(*amt))
|
||||
return buf[8:], nil
|
||||
}
|
||||
|
||||
func (amt *OnesComplementEffectiveAmount) Unpack(buf []byte, order binary.ByteOrder) ([]byte, error) {
|
||||
*amt = OnesComplementEffectiveAmount(OnesCompTwiddle64 - binary.BigEndian.Uint64(buf))
|
||||
return buf[8:], nil
|
||||
}
|
||||
|
||||
// Struct BlockTxsValue has a field TxHashes of type []*chainhash.Hash.
|
||||
// I haven't been able to figure out the right annotations to make
|
||||
// restruct.Pack,Unpack work automagically.
|
||||
// Implement the Sizer, Packer, Unpacker interface to handle it manually.
|
||||
|
||||
func (kv *BlockTxsValue) SizeOf() int {
|
||||
return 32 * len(kv.TxHashes)
|
||||
}
|
||||
|
||||
func (kv *BlockTxsValue) Pack(buf []byte, order binary.ByteOrder) ([]byte, error) {
|
||||
offset := 0
|
||||
for _, h := range kv.TxHashes {
|
||||
offset += copy(buf[offset:], h[:])
|
||||
}
|
||||
return buf[offset:], nil
|
||||
}
|
||||
|
||||
func (kv *BlockTxsValue) Unpack(buf []byte, order binary.ByteOrder) ([]byte, error) {
|
||||
offset := 0
|
||||
kv.TxHashes = make([]*chainhash.Hash, len(buf)/32)
|
||||
for i := range kv.TxHashes {
|
||||
kv.TxHashes[i] = (*chainhash.Hash)(buf[offset:32])
|
||||
offset += 32
|
||||
}
|
||||
return buf[offset:], nil
|
||||
}
|
||||
|
||||
func genericNew(prefix []byte, key bool) (interface{}, error) {
|
||||
t, ok := prefixRegistry[prefix[0]]
|
||||
if !ok {
|
||||
panic(fmt.Sprintf("not handled: prefix=%v", prefix))
|
||||
}
|
||||
if key {
|
||||
return t.newKey(), nil
|
||||
}
|
||||
return t.newValue(), nil
|
||||
}
|
||||
|
||||
func GenericPack(kv interface{}, fields int) ([]byte, error) {
|
||||
// Locate the byte offset of the first excluded field.
|
||||
offset := 0
|
||||
if fields > 0 {
|
||||
v := reflect.ValueOf(kv)
|
||||
t := v.Type()
|
||||
// Handle indirection to reach kind=Struct.
|
||||
switch t.Kind() {
|
||||
case reflect.Interface, reflect.Pointer:
|
||||
v = v.Elem()
|
||||
t = v.Type()
|
||||
default:
|
||||
panic(fmt.Sprintf("not handled: %v", t.Kind()))
|
||||
}
|
||||
count := 0
|
||||
for _, sf := range reflect.VisibleFields(t) {
|
||||
if !sf.IsExported() {
|
||||
continue
|
||||
}
|
||||
if sf.Anonymous && strings.HasPrefix(sf.Name, "LengthEncoded") {
|
||||
fields += 1 // Skip it but process NameLen and Name instead.
|
||||
continue
|
||||
}
|
||||
if count > fields {
|
||||
break
|
||||
}
|
||||
sz, err := restruct.SizeOf(v.FieldByIndex(sf.Index).Interface())
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("not handled: %v: %v", sf.Name, sf.Type.Kind()))
|
||||
}
|
||||
offset += sz
|
||||
count += 1
|
||||
}
|
||||
}
|
||||
// Pack the struct. No ability to partially pack.
|
||||
buf, err := restruct.Pack(binary.BigEndian, kv)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("not handled: %v", err))
|
||||
}
|
||||
// Return a prefix if some fields were excluded.
|
||||
if fields > 0 {
|
||||
return buf[:offset], nil
|
||||
}
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func GenericUnpack(pfx []byte, key bool, buf []byte) (interface{}, error) {
|
||||
kv, _ := genericNew(pfx, key)
|
||||
err := restruct.Unpack(buf, binary.BigEndian, kv)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("not handled: %v", err))
|
||||
}
|
||||
return kv, nil
|
||||
}
|
||||
|
||||
func GetSerializationAPI(prefix []byte) *SerializationAPI {
|
||||
t, ok := prefixRegistry[prefix[0]]
|
||||
if !ok {
|
||||
panic(fmt.Sprintf("not handled: prefix=%v", prefix))
|
||||
}
|
||||
if t.API != nil {
|
||||
return t.API
|
||||
}
|
||||
return ProductionAPI
|
||||
}
|
||||
|
||||
type SerializationAPI struct {
|
||||
PackKey func(key BaseKey) ([]byte, error)
|
||||
PackPartialKey func(key BaseKey, fields int) ([]byte, error)
|
||||
PackValue func(value BaseValue) ([]byte, error)
|
||||
UnpackKey func(key []byte) (BaseKey, error)
|
||||
UnpackValue func(prefix []byte, value []byte) (BaseValue, error)
|
||||
}
|
||||
|
||||
var ProductionAPI = &SerializationAPI{
|
||||
PackKey: PackGenericKey,
|
||||
PackPartialKey: PackPartialGenericKey,
|
||||
PackValue: PackGenericValue,
|
||||
UnpackKey: UnpackGenericKey,
|
||||
UnpackValue: UnpackGenericValue,
|
||||
}
|
||||
|
||||
var RegressionAPI_1 = &SerializationAPI{
|
||||
PackKey: func(key BaseKey) ([]byte, error) {
|
||||
return GenericPack(key, -1)
|
||||
},
|
||||
PackPartialKey: func(key BaseKey, fields int) ([]byte, error) {
|
||||
return GenericPack(key, fields)
|
||||
},
|
||||
PackValue: func(value BaseValue) ([]byte, error) {
|
||||
return GenericPack(value, -1)
|
||||
},
|
||||
UnpackKey: UnpackGenericKey,
|
||||
UnpackValue: UnpackGenericValue,
|
||||
}
|
||||
|
||||
var RegressionAPI_2 = &SerializationAPI{
|
||||
PackKey: PackGenericKey,
|
||||
PackPartialKey: PackPartialGenericKey,
|
||||
PackValue: PackGenericValue,
|
||||
UnpackKey: func(key []byte) (BaseKey, error) {
|
||||
k, err := GenericUnpack(key, true, key)
|
||||
return k.(BaseKey), err
|
||||
},
|
||||
UnpackValue: func(prefix []byte, value []byte) (BaseValue, error) {
|
||||
k, err := GenericUnpack(prefix, false, value)
|
||||
return k.(BaseValue), err
|
||||
},
|
||||
}
|
||||
|
||||
var RegressionAPI_3 = &SerializationAPI{
|
||||
PackKey: func(key BaseKey) ([]byte, error) {
|
||||
return GenericPack(key, -1)
|
||||
},
|
||||
PackPartialKey: func(key BaseKey, fields int) ([]byte, error) {
|
||||
return GenericPack(key, fields)
|
||||
},
|
||||
PackValue: func(value BaseValue) ([]byte, error) {
|
||||
return GenericPack(value, -1)
|
||||
},
|
||||
UnpackKey: func(key []byte) (BaseKey, error) {
|
||||
k, err := GenericUnpack(key, true, key)
|
||||
return k.(BaseKey), err
|
||||
},
|
||||
UnpackValue: func(prefix []byte, value []byte) (BaseValue, error) {
|
||||
k, err := GenericUnpack(prefix, false, value)
|
||||
return k.(BaseValue), err
|
||||
},
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -2,11 +2,15 @@ package prefixes_test
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/rand"
|
||||
"encoding/csv"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"math/big"
|
||||
"os"
|
||||
"sort"
|
||||
"testing"
|
||||
|
||||
dbpkg "github.com/lbryio/herald.go/db"
|
||||
|
@ -14,6 +18,14 @@ import (
|
|||
"github.com/linxGnu/grocksdb"
|
||||
)
|
||||
|
||||
func TestPrefixRegistry(t *testing.T) {
|
||||
for _, prefix := range prefixes.GetPrefixes() {
|
||||
if prefixes.GetSerializationAPI(prefix) == nil {
|
||||
t.Errorf("prefix %c not registered", prefix)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testInit(filePath string) (*grocksdb.DB, [][]string, func(), *grocksdb.ColumnFamilyHandle) {
|
||||
log.Println(filePath)
|
||||
file, err := os.Open(filePath)
|
||||
|
@ -28,12 +40,25 @@ func testInit(filePath string) (*grocksdb.DB, [][]string, func(), *grocksdb.Colu
|
|||
columnFamily := records[0][0]
|
||||
records = records[1:]
|
||||
|
||||
cleanupFiles := func() {
|
||||
err = os.RemoveAll("./tmp")
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
}
|
||||
|
||||
// wOpts := grocksdb.NewDefaultWriteOptions()
|
||||
opts := grocksdb.NewDefaultOptions()
|
||||
opts.SetCreateIfMissing(true)
|
||||
db, err := grocksdb.OpenDb(opts, "tmp")
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
// Garbage might have been left behind by a prior crash.
|
||||
cleanupFiles()
|
||||
db, err = grocksdb.OpenDb(opts, "tmp")
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
}
|
||||
handle, err := db.CreateColumnFamily(opts, columnFamily)
|
||||
if err != nil {
|
||||
|
@ -41,16 +66,30 @@ func testInit(filePath string) (*grocksdb.DB, [][]string, func(), *grocksdb.Colu
|
|||
}
|
||||
toDefer := func() {
|
||||
db.Close()
|
||||
err = os.RemoveAll("./tmp")
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
cleanupFiles()
|
||||
}
|
||||
|
||||
return db, records, toDefer, handle
|
||||
}
|
||||
|
||||
func testGeneric(filePath string, prefix byte, numPartials int) func(*testing.T) {
|
||||
return func(t *testing.T) {
|
||||
APIs := []*prefixes.SerializationAPI{
|
||||
prefixes.GetSerializationAPI([]byte{prefix}),
|
||||
// Verify combinations of production vs. "restruct" implementations of
|
||||
// serialization API (e.g production Pack() with "restruct" Unpack()).
|
||||
prefixes.RegressionAPI_1,
|
||||
prefixes.RegressionAPI_2,
|
||||
prefixes.RegressionAPI_3,
|
||||
}
|
||||
for _, api := range APIs {
|
||||
opts := dbpkg.NewIterateOptions().WithPrefix([]byte{prefix}).WithSerializer(api).WithIncludeValue(true)
|
||||
testGenericOptions(opts, filePath, prefix, numPartials)(t)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testGenericOptions(options *dbpkg.IterOptions, filePath string, prefix byte, numPartials int) func(*testing.T) {
|
||||
return func(t *testing.T) {
|
||||
|
||||
wOpts := grocksdb.NewDefaultWriteOptions()
|
||||
|
@ -69,26 +108,34 @@ func testGeneric(filePath string, prefix byte, numPartials int) func(*testing.T)
|
|||
db.PutCF(wOpts, handle, key, val)
|
||||
}
|
||||
// test prefix
|
||||
options := dbpkg.NewIterateOptions().WithPrefix([]byte{prefix}).WithIncludeValue(true)
|
||||
options = options.WithCfHandle(handle)
|
||||
ch := dbpkg.IterCF(db, options)
|
||||
var i = 0
|
||||
for kv := range ch {
|
||||
// log.Println(kv.Key)
|
||||
gotKey, err := prefixes.PackGenericKey(prefix, kv.Key)
|
||||
gotKey, err := options.Serializer.PackKey(kv.Key)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
|
||||
if numPartials != kv.Key.NumFields() {
|
||||
t.Errorf("key reports %v fields but %v expected", kv.Key.NumFields(), numPartials)
|
||||
}
|
||||
for j := 1; j <= numPartials; j++ {
|
||||
keyPartial, _ := prefixes.PackPartialGenericKey(prefix, kv.Key, j)
|
||||
keyPartial, _ := options.Serializer.PackPartialKey(kv.Key, j)
|
||||
// Check pack partial for sanity
|
||||
if !bytes.HasPrefix(gotKey, keyPartial) {
|
||||
if j < numPartials {
|
||||
if !bytes.HasPrefix(gotKey, keyPartial) || (len(keyPartial) >= len(gotKey)) {
|
||||
t.Errorf("%+v should be prefix of %+v\n", keyPartial, gotKey)
|
||||
}
|
||||
} else {
|
||||
if !bytes.Equal(gotKey, keyPartial) {
|
||||
t.Errorf("%+v should be equal to %+v\n", keyPartial, gotKey)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
got, err := prefixes.PackGenericValue(prefix, kv.Value)
|
||||
got, err := options.Serializer.PackValue(kv.Value)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
|
@ -101,7 +148,7 @@ func testGeneric(filePath string, prefix byte, numPartials int) func(*testing.T)
|
|||
log.Println(err)
|
||||
}
|
||||
if !bytes.Equal(gotKey, wantKey) {
|
||||
t.Errorf("gotKey: %+v, wantKey: %+v\n", got, want)
|
||||
t.Errorf("gotKey: %+v, wantKey: %+v\n", gotKey, wantKey)
|
||||
}
|
||||
if !bytes.Equal(got, want) {
|
||||
t.Errorf("got: %+v, want: %+v\n", got, want)
|
||||
|
@ -123,12 +170,12 @@ func testGeneric(filePath string, prefix byte, numPartials int) func(*testing.T)
|
|||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
options2 := dbpkg.NewIterateOptions().WithStart(start).WithStop(stop).WithIncludeValue(true)
|
||||
options2 := dbpkg.NewIterateOptions().WithSerializer(options.Serializer).WithStart(start).WithStop(stop).WithIncludeValue(true)
|
||||
options2 = options2.WithCfHandle(handle)
|
||||
ch2 := dbpkg.IterCF(db, options2)
|
||||
i = 0
|
||||
for kv := range ch2 {
|
||||
got, err := prefixes.PackGenericValue(prefix, kv.Value)
|
||||
got, err := options2.Serializer.PackValue(kv.Value)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
|
@ -216,7 +263,7 @@ func TestTXOToClaim(t *testing.T) {
|
|||
|
||||
func TestClaimShortID(t *testing.T) {
|
||||
filePath := fmt.Sprintf("../../testdata/%c.csv", prefixes.ClaimShortIdPrefix)
|
||||
testGeneric(filePath, prefixes.ClaimShortIdPrefix, 3)(t)
|
||||
testGeneric(filePath, prefixes.ClaimShortIdPrefix, 4)(t)
|
||||
}
|
||||
|
||||
func TestClaimToChannel(t *testing.T) {
|
||||
|
@ -286,7 +333,7 @@ func TestClaimDiff(t *testing.T) {
|
|||
|
||||
func TestUTXO(t *testing.T) {
|
||||
filePath := fmt.Sprintf("../../testdata/%c.csv", prefixes.UTXO)
|
||||
testGeneric(filePath, prefixes.UTXO, 1)(t)
|
||||
testGeneric(filePath, prefixes.UTXO, 3)(t)
|
||||
}
|
||||
|
||||
func TestHashXUTXO(t *testing.T) {
|
||||
|
@ -330,3 +377,175 @@ func TestUTXOKey_String(t *testing.T) {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTrendingNotifications(t *testing.T) {
|
||||
prefix := byte(prefixes.TrendingNotifications)
|
||||
filePath := fmt.Sprintf("../../testdata/%c.csv", prefix)
|
||||
//synthesizeTestData([]byte{prefix}, filePath, []int{4, 20}, []int{8, 8}, [][3]int{})
|
||||
key := &prefixes.TrendingNotificationKey{}
|
||||
testGeneric(filePath, prefix, key.NumFields())(t)
|
||||
}
|
||||
|
||||
func TestMempoolTx(t *testing.T) {
|
||||
prefix := byte(prefixes.MempoolTx)
|
||||
filePath := fmt.Sprintf("../../testdata/%c.csv", prefix)
|
||||
//synthesizeTestData([]byte{prefix}, filePath, []int{32}, []int{}, [][3]int{{20, 100, 1}})
|
||||
key := &prefixes.MempoolTxKey{}
|
||||
testGeneric(filePath, prefix, key.NumFields())(t)
|
||||
}
|
||||
|
||||
func TestTouchedHashX(t *testing.T) {
|
||||
prefix := byte(prefixes.TouchedHashX)
|
||||
filePath := fmt.Sprintf("../../testdata/%c.csv", prefix)
|
||||
//synthesizeTestData([]byte{prefix}, filePath, []int{4}, []int{}, [][3]int{{1, 5, 11}})
|
||||
key := &prefixes.TouchedHashXKey{}
|
||||
testGeneric(filePath, prefix, key.NumFields())(t)
|
||||
}
|
||||
|
||||
func TestHashXStatus(t *testing.T) {
|
||||
prefix := byte(prefixes.HashXStatus)
|
||||
filePath := fmt.Sprintf("../../testdata/%c.csv", prefix)
|
||||
//synthesizeTestData([]byte{prefix}, filePath, []int{20}, []int{32}, [][3]int{})
|
||||
key := &prefixes.HashXStatusKey{}
|
||||
testGeneric(filePath, prefix, key.NumFields())(t)
|
||||
}
|
||||
|
||||
func TestHashXMempoolStatus(t *testing.T) {
|
||||
prefix := byte(prefixes.HashXMempoolStatus)
|
||||
filePath := fmt.Sprintf("../../testdata/%c.csv", prefix)
|
||||
//synthesizeTestData([]byte{prefix}, filePath, []int{20}, []int{32}, [][3]int{})
|
||||
key := &prefixes.HashXMempoolStatusKey{}
|
||||
testGeneric(filePath, prefix, key.NumFields())(t)
|
||||
}
|
||||
|
||||
func synthesizeTestData(prefix []byte, filePath string, keyFixed, valFixed []int, valVariable [][3]int) {
|
||||
file, err := os.OpenFile(filePath, os.O_CREATE|os.O_TRUNC|os.O_RDWR, 0644)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
records := make([][2][]byte, 0, 20)
|
||||
for r := 0; r < 20; r++ {
|
||||
key := make([]byte, 0, 1000)
|
||||
key = append(key, prefix...)
|
||||
val := make([]byte, 0, 1000)
|
||||
// Handle fixed columns of key.
|
||||
for _, width := range keyFixed {
|
||||
v := make([]byte, width)
|
||||
rand.Read(v)
|
||||
key = append(key, v...)
|
||||
}
|
||||
// Handle fixed columns of value.
|
||||
for _, width := range valFixed {
|
||||
v := make([]byte, width)
|
||||
rand.Read(v)
|
||||
val = append(val, v...)
|
||||
}
|
||||
// Handle variable length array in value. Each element is "chunk" size.
|
||||
for _, w := range valVariable {
|
||||
low, high, chunk := w[0], w[1], w[2]
|
||||
n, _ := rand.Int(rand.Reader, big.NewInt(int64(high-low)))
|
||||
v := make([]byte, chunk*(low+int(n.Int64())))
|
||||
rand.Read(v)
|
||||
val = append(val, v...)
|
||||
}
|
||||
records = append(records, [2][]byte{key, val})
|
||||
}
|
||||
|
||||
sort.Slice(records, func(i, j int) bool { return bytes.Compare(records[i][0], records[j][0]) == -1 })
|
||||
|
||||
wr := csv.NewWriter(file)
|
||||
wr.Write([]string{string(prefix), ""}) // column headers
|
||||
for _, rec := range records {
|
||||
encoded := []string{hex.EncodeToString(rec[0]), hex.EncodeToString(rec[1])}
|
||||
err := wr.Write(encoded)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
wr.Flush()
|
||||
}
|
||||
|
||||
// Fuzz tests for various Key and Value types (EXPERIMENTAL)
|
||||
|
||||
func FuzzTouchedHashXKey(f *testing.F) {
|
||||
kvs := []prefixes.TouchedHashXKey{
|
||||
{
|
||||
Prefix: []byte{prefixes.TouchedHashX},
|
||||
Height: 0,
|
||||
},
|
||||
{
|
||||
Prefix: []byte{prefixes.TouchedHashX},
|
||||
Height: 1,
|
||||
},
|
||||
{
|
||||
Prefix: []byte{prefixes.TouchedHashX},
|
||||
Height: math.MaxUint32,
|
||||
},
|
||||
}
|
||||
|
||||
for _, kv := range kvs {
|
||||
seed := make([]byte, 0, 200)
|
||||
seed = append(seed, kv.PackKey()...)
|
||||
f.Add(seed)
|
||||
}
|
||||
|
||||
f.Fuzz(func(t *testing.T, in []byte) {
|
||||
t.Logf("testing: %+v", in)
|
||||
out := make([]byte, 0, 200)
|
||||
var kv prefixes.TouchedHashXKey
|
||||
kv.UnpackKey(in)
|
||||
out = append(out, kv.PackKey()...)
|
||||
if len(in) >= 5 {
|
||||
if !bytes.HasPrefix(in, out) {
|
||||
t.Fatalf("%v: not equal after round trip: %v", in, out)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func FuzzTouchedHashXValue(f *testing.F) {
|
||||
kvs := []prefixes.TouchedHashXValue{
|
||||
{
|
||||
TouchedHashXs: [][]byte{},
|
||||
},
|
||||
{
|
||||
TouchedHashXs: [][]byte{
|
||||
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
|
||||
},
|
||||
},
|
||||
{
|
||||
TouchedHashXs: [][]byte{
|
||||
{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
|
||||
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
|
||||
},
|
||||
},
|
||||
{
|
||||
TouchedHashXs: [][]byte{
|
||||
{0xff, 0xff, 2, 3, 4, 5, 6, 7, 8, 9, 10},
|
||||
{0, 1, 0xff, 0xff, 4, 5, 6, 7, 8, 9, 10},
|
||||
{0, 1, 2, 3, 0xff, 0xff, 6, 7, 8, 9, 10},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, kv := range kvs {
|
||||
seed := make([]byte, 0, 200)
|
||||
seed = append(seed, kv.PackValue()...)
|
||||
f.Add(seed)
|
||||
}
|
||||
|
||||
f.Fuzz(func(t *testing.T, in []byte) {
|
||||
t.Logf("testing: %+v", in)
|
||||
out := make([]byte, 0, 200)
|
||||
var kv prefixes.TouchedHashXValue
|
||||
kv.UnpackValue(in)
|
||||
out = append(out, kv.PackValue()...)
|
||||
if len(in) >= 5 {
|
||||
if !bytes.HasPrefix(in, out) {
|
||||
t.Fatalf("%v: not equal after round trip: %v", in, out)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
5
go.mod
5
go.mod
|
@ -19,7 +19,10 @@ require (
|
|||
gopkg.in/karalabe/cookiejar.v1 v1.0.0-20141109175019-e1490cae028c
|
||||
)
|
||||
|
||||
require golang.org/x/crypto v0.0.0-20211209193657-4570a0811e8b // indirect
|
||||
require (
|
||||
github.com/go-restruct/restruct v1.2.0-alpha // indirect
|
||||
golang.org/x/crypto v0.0.0-20211209193657-4570a0811e8b // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
|
|
4
go.sum
4
go.sum
|
@ -179,6 +179,8 @@ github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTg
|
|||
github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8=
|
||||
github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||
github.com/go-ozzo/ozzo-validation v3.6.0+incompatible/go.mod h1:gsEKFIVnabGBt6mXmxK0MoFy+cZoTJY6mu5Ll3LVLBU=
|
||||
github.com/go-restruct/restruct v1.2.0-alpha h1:2Lp474S/9660+SJjpVxoKuWX09JsXHSrdV7Nv3/gkvc=
|
||||
github.com/go-restruct/restruct v1.2.0-alpha/go.mod h1:KqrpKpn4M8OLznErihXTGLlsXFGeLxHUrLRRI/1YjGk=
|
||||
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
|
@ -362,6 +364,8 @@ github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-b
|
|||
github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4=
|
||||
github.com/linxGnu/grocksdb v1.6.42 h1:nJLoXFuzwBwQQQrXTUgRGRz1QRm7y8pR6CNV/gwrbqs=
|
||||
github.com/linxGnu/grocksdb v1.6.42/go.mod h1:JcMMDBFaDNhRXFYcYXmgQwb/RarSld1PulTI7UzE+w0=
|
||||
github.com/linxGnu/grocksdb v1.7.0 h1:UyFDykX0CUfxDN10cqlFho/rwt9K6KoDaLXL9Ej5z9g=
|
||||
github.com/linxGnu/grocksdb v1.7.0/go.mod h1:JcMMDBFaDNhRXFYcYXmgQwb/RarSld1PulTI7UzE+w0=
|
||||
github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ=
|
||||
github.com/lyoshenka/bencode v0.0.0-20180323155644-b7abd7672df5/go.mod h1:H0aPCWffGOaDcjkw1iB7W9DVLp6GXmfcJY/7YZCWPA4=
|
||||
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||
|
|
21
testdata/c.csv
vendored
Normal file
21
testdata/c.csv
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
c,
|
||||
631457da9061c90a8fd211994ba8e3701a76c43fa66937673f,e41d47b10d8b768793c75e4b2bb35784
|
||||
632de81b0213a1b6e390e4c1859dba94b2b0e9a74e360a2b9e,1326b6b9eb9ad8ecc591aa54c365dafa
|
||||
63325af388b77d3ed3df8a5b1483b83fb0b5153ad51de15ac0,b3985bb638840f1c0c7aadaa32848fc1
|
||||
6339c7574004d908068b73e2f898a241dceaa19d2e4f5fd2c6,b55b277d1598b93cad3cbcdbdc796c04
|
||||
6363d895c26d023913ae5c84680d8acbf0c4b2dd6fa1842a2c,9c33af364b69814cc868fadc48547ef9
|
||||
637d45cd2b29ba27353f889660780d2c5edd0d490058c06dd1,6597a63fa0de8aaf717e031029830cc1
|
||||
637e1d5b825273eaf7457f40d97fc18ab2f99e25552e14e185,2c9e0e7145297d8eaee06f36567a529c
|
||||
638151f59e498873ef82ef0271186f0b60b9ceeaa10aec120e,9b64b4276a1059e4ecf19560b566d503
|
||||
6384e22f9b0fc6f63c9a221786091ecf02b0df2925895b8132,e12f4a8a130f1419ff4ae3a9bb8a31ee
|
||||
63a92ad4fe7abbf72db94f49092764329c4d9b5cf30115eb2a,152300368cecfaf42debe1e7cccba9cc
|
||||
63ab7cc5574087640b78b46e9548cfbefabc581e479883eb70,1f8e2f0abf79e263c3bd3fa29085f454
|
||||
63b7cceb793d1e8a3729c9f9bc7a580b7d3d1b42a3c13c5e99,fb5b20d556d3362da5e4e880b8feec7a
|
||||
63b9b943c661dfad86644fdf34d956273996a261692227d6a9,8b4aeb0ad6f6275025df1fb2a173c5a7
|
||||
63bba32a7015a47db0da6381c30f95200858637fb82cf367ee,83841279d3c9a345e87f02ba431479fe
|
||||
63beea81eeec6fadf422df5800d013278ccd351dc77cabf363,d3ea0bcc5e7a5453855d96220fc02e97
|
||||
63bf6872e4541eaa7ffe0659e11eff43520a6571a634576c56,d01ae01321c2617c17767446f624a348
|
||||
63cce2b1651ed5575052abbb75747d059b5a54e09c7a330b56,46a4dbf4d155da400b30038a0ccd3bdc
|
||||
63d5165b6b9c42249409c8e616fc17481bd296f69d0b4564f2,a18bff62b8cbe7aea8a46aa2e83432a3
|
||||
63e616d85d1425ea0686aa58438ff416db5176da015cef2eb3,8c1e763b02f9f3f1b4c6f0e5dd18cb19
|
||||
63f5476e70301ba6fdd6d0317b2c03d678e2623ee66fd4110a,f04df6c132e1d2d14feeb17ca34b65f3
|
|
21
testdata/d.csv
vendored
Normal file
21
testdata/d.csv
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
d,
|
||||
64188d8e8e56c823919ba5eea5b60d0e2a27b313b314a83cd79ec882e042ba47d1,27f60d5852ab8e9538b5c35891ebd915c14b02a679607b01ae33e040a816685fba36f7e9918136dba9999c13cc
|
||||
64254b85d06da94e2c7723699a684dfcf38664bcadb4e6aa35541cd5b2975bbcb9,fbc9d8e21a2192182aba69c73a6e3f7f56ba2fac8a634ef1f0b16625a12db3757c27dbddd74c3e598005a7c529f13410d4ff3a02456164e973040dec661f78106441
|
||||
642984b5855a4a1894d881f82d3703f184e6c1b380daa5d09147c98c1b71bee9ea,3ff17d6d132128a85f8262399a6ee09401672ec20e668ff70fe63024753d8b9ecd915720e2fc4b52d857034b066c2e316ab2d2d3c77d20649bfdd1e86d7f0ffa1b44302989e1f103470aebbaf4
|
||||
64299c1c1b5dabf41bd83f3c91efce9eb5c0acd635dc6e669b42c3bf27cc4dc418,144ab7485a18bdfc8ed9543e1d5783941d602f9b012441da55f028b37d679f046173b4ab1c10e424
|
||||
6435d0497f800004c1a23d3471242dbcf8012eb45792621e2185d675b1c3a21021,a03bf241d35ac46c51aad53c83b2f445fc8e97654e843b0d83b0ba85b0d8130c9e7c7b13bb4d6157f5f73df8c80e4f4851d29c0501e8fcba518d3dbd80c0e87e94ec1bc781e0f6092fd0d4749c418afd
|
||||
644515ee2686c2e0410a965fae5a8ff3e707bab2ba3969d9557ab529aa219da650,662ce7d0284408744733f63ea84cb9db34413f261913c3fce59933a196458b3a1e9b52a636af1fb778a0edaedae51be1aedb09b9d605e1e7ef8c0da3e8eba9b99d723a9c1635473554b0bf45db5fb790a110f0d3f89cbe
|
||||
6458f48aa991fc0a2c6f79f138fcc758646b025fce9d02525ee077dbbb56c64043,a48b7d67a08ebf8a9298c7b6576a1daae2e0b8fcc35fc95bd7097c54fed39df5bab602e389e1378523688109525e8be4b23d
|
||||
645b00b38d41e9e74d7af8b88c6840deacd9af74a25de3f352440b0087a111af2e,0d6b55f6eae73445f41335666b345be2afc15989331f8478efd86f7c420d7f71cd6a23723a25c1da963dce93e5993a74529a4cddced9ca3a6ede21b597ba2c26d2
|
||||
645c00301ef63070ab0912e3378b2d59d19953a74143b584d686e59638ede0250c,16fa8a614ee7bc188c92772bd8f41311e518ea04a4063eae2e3f0ac6c86fcb34a821afe711c4cabe6a6b4245dec139
|
||||
645c241e29e0a3e406f4a908faa7d39df87c91190fb3e073b006d22f6695735873,84b2dd6db4cdd508d31f4fa0ca561f90d0cdffdb958cf8a5d297260d
|
||||
6468c52a1fbf769451bcd1c99021ee0b309ae67bb5f03e83ab50674bb959e5845c,ae39e4716dc15ece68c57794720d787193b28632e13dea5050e95f1f251674370ef3aa64
|
||||
646acbb4b11cfa5ead5a2c38515ace8f4fc87d39c3cf8866401900ee822e8ce238,c31db7d0ce2537e1fe0c6fc9cd4e84d5c9f73df537425f1035938fa49fb0f9334f86be59b8
|
||||
6478d257a7fd6779ad36b351e88cc9f34e55cf8d200bc3f095505168d31dafc21c,f8e3051555b19ecc5af92ba46f7db73190d9e1e0ecf84c259cad97371480ea3c7c5036157fad5c1d0d008bf1ab4ae558b78f4426a9303cc53401b9085b5c23966f48fbb1d76809ea3376e3d08a6d10b048d06da6a5ff32
|
||||
64b099e855102c54d054907e42637536b93f2b5c8482795a4d89bd420dff876fe3,19bfabe9d9633c1741bf051db2ba9b0d0b265a66ac9869ce
|
||||
64b567cd2cb2d61062b66aeb2364f7bf3fc706f67ecf34674fdfc0b793587c6e3b,ccfc02a82b2e0f925a53aff5c040e610af1eee11f2aba92a9ce57e975c1937fb7888e9da98712bc5be906f0ed4946077f4ecb7d5c2fd167d892a67
|
||||
64bfd045aaaeded94be7a756ca44bf3c3b1825c32ce8df02023ba5349aab3cae4e,2a890e23f7282e5d38f5575e83d72b369c365a4772b0f109ce
|
||||
64c3fbfe842cf0e183d79b9340da544ac8afeee1351f4d67ba407afd0db8dc20b7,df3b8fc3e4b169c0cbeeb701ddc8a50ea4dab3ce5a32553bc5be28e5cd1c65a76669fa71c141c639965f8a7d71ef93f2a193cf9025a67509ac7bae8152a6e36a3c283e3186dc35ed11de23810a1cbe13b0889f465b8e70dfc96671821a4504c0
|
||||
64c610888ad1cb913b13be9f52e51269bfa664862b213d102838cfa04350eb3431,7a065900bc937ec5426525b13375ccc7f07b1230a3369eb6a107ba5a253182a2660ebe7f45
|
||||
64d41e007768c674b134ff3f75b7c682a08fe673929673a445cd2e176b63d5aff5,9fd9c6ceee853474dbd77c73640befc524d8e3f3
|
||||
64ee07557244e772cf9384d37ace73921388c05a8cadcab8aa17e82935bd5b95a7,4f396aef717bd3b9f57ca99af6db26114794c059472b8951dfe0cf588f35c7c74a91dbbac4f26faa565c18fb5b7d0ddbef53ae92945bf74e3f81a453d6b16b03208dbf5ae310f0
|
|
21
testdata/e.csv
vendored
Normal file
21
testdata/e.csv
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
e,
|
||||
6500f23ec1,7b471b15ac811403113bf4
|
||||
654b6af788,7c38d58c240503b936f4c1204a4ed317680f6fbc09c95c4d6ab2598f31d3e09e9a
|
||||
654dceae45,2b36ece4081037b0ec8136d4a41a667f9736548ff85892fb178ed0008ea17fe7582985b489d9d3c455d23b1b
|
||||
65673f9cef,8cc057ce0c7190316c9269a6e2807e63417637b5f82eef1f94762e584191166662f6a446199ab950a6b96a98
|
||||
656845f85a,4ef94f090853d39618c561f4d6b1dab800b3fd46b95c56641079f36f8e3d8c3d24126ef86be8d456e93a5d4c
|
||||
656fd477dc,08e664da615c0dd584b91e210848ea2949dc60c555bc
|
||||
6575c86b58,421fb2a0f544ae76b850b45af8749b65eb5880fca17f6ba9b70cc9f6746cf04632
|
||||
6585892310,c2043f7e7ff3b392d46c381682da2f60baf85c34ed6e9f5a2a5cced6f972b9847b
|
||||
659459b414,8f8a3713c0abe3c94ef3aa4b449693df448683aa6192395d4bd61c66ef71f69e89
|
||||
659839e3bd,6baddd761d7c6b8bbc8dce4f7a0240f4db5bbe19b9eb0874ff3b8c1d0fd5ba48ff
|
||||
65a0e881ac,c7ccd582382f46df2095dff1d484af80f40fff68a3a92397d413a9818260e18cd40d2d35b4072dea89eb0d08
|
||||
65b4164cd2,6b8bcfd57d29fb94128767b24e4b09f3f6fbf1773785
|
||||
65b8989fc8,7e712054cbb6dc0e292684
|
||||
65b9996832,997ed9e6c10df1c78f3e1f
|
||||
65d805f1ba,3af5fcf80e392d3daec547de5d9171d9c24a79c5e3cc5551ea432377c277f58aa0
|
||||
65edc9cdf2,7e37479e9bb38fc69e1b0d
|
||||
65ef0d9209,c88ffcfba33856508b4ba58c82b65cf60927ffaa45faf1f671b27965ab7e87fc4e
|
||||
65f2b2764b,2a5cc7a625a03a55170954202ba6a95675acbb79897a79256c6913deeb583918198769fe1e2e4c2802623315
|
||||
65f72d65f3,77ef24d0a1a6d1c17580a8612cccd8398148834ff341
|
||||
65ffbd56f8,2a015033fd5beb3320f748a4589a5eb81d9a5241ab3c561341f1ae2de993957dc29a273e6056c5676e5ebabc
|
|
21
testdata/f.csv
vendored
Normal file
21
testdata/f.csv
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
f,
|
||||
660d649ba1defa4ab5ab71f8a977d7f7cedb11056e,919be5811844077f4660af66afa9a59a5ad17cf5c541524e780fe2137bfa250c
|
||||
6623c6895027f70a5330bbcb1153d635abcb4d5224,8dadcde1a6f676d4004eacd399f825006ddf136d1e92b1c92113377b3e1741b4
|
||||
664f095b24484ebce8f31fbf008e63cc4aa163d401,c0c4a751f569c1f9c01531f57ba674b2ad2338d9c08f9e9fc85b0209d15466b2
|
||||
665201a38de7d7243df717c9f9279cdd30105f0f77,d9293577cc0d51fe3a5bee78fea9b2b2222e6c2aa0d26a4ef4bfb7dd095587e8
|
||||
665328b2449e537b0ca4733f87ac5ebcdf033c5ebd,624f80a361e47c7eb1b815e8714a40f67b4f642a5546547a3fcb5bf5593d8fab
|
||||
665ec882021f55b1fbaa5fad00df5c5d07633b7af3,1e917fbc04385290d654f711bdef12773dd54b6b5ea26fe2a9d58ed051f2cb7f
|
||||
6671c131cd433750ba6d3908150ca4910841164b74,a2ebfbdf7a23024c340a45f201645aa46f48bc1fdd8d34ed83fcffbf1ee90523
|
||||
667fb93d9ae877ba11f337f21422b0679852580802,4710649e06619e13250754937e9c17c20b07434751171aac2f2f78b184aa0146
|
||||
668ed5f39a5db059dc3261377f2a47728f7a357d33,8dd8ca749b87f43e290904749a546fe319c9d53e765f065bb8beb234a117655e
|
||||
66951782f6ba94f2b71e46d0cc4a2411b14d81eb70,4f5c9434dd0886c57c2530991cebd973e1b50d5ba8fcfc019e54561217a49bbb
|
||||
66970565dfe2b01cad49b73a085a3c3f7a3be61c4c,f6ca0ae18c896d9bc97c5a9d0c3a06256485f59c77fb91780b213f933b80f48b
|
||||
669f6a30a6712062da0cc27181845c04d7430abf73,5c6604bfd63b871daceb7893dd618850458974fe4108871c1a1323fb8ae34e4e
|
||||
66a9a7b89b78553592acf3dfc417c1d7654dab3273,0561f28c3a5ea0027ecb3c53fa068772a6b7cb73d23104a14f9aba8cd1f070a2
|
||||
66aba81567ba48f001f843f01354d575c2e2687847,b0f6ae2c1db8263f7e11fc79423109e718d1f3c30bd123c4243401b5e4f1fee6
|
||||
66b569cc3d28be4466fb28d147f66d6d8769598964,ecee392ad8217f325508ba38d280436fb0a520b79a9627e5e18197bf55540885
|
||||
66d4662cd100d66055917d6342d48f49d948fcc255,5762a8ac767fa30d2ca76db7081f8a2e4f5da4f0bf92d29e1322da9a154cc3d6
|
||||
66d6fa6ac71d0255dd3f185de6480d5b4316b6b050,5fc193e5e51b3bd8e95f4eb9df63236da7abf678fc47c0b339ceb5c127d0f488
|
||||
66e5b6c7c231a02a32eedd8383a5750fd135244a03,58c70ffbfada12550f24bf7931cee06eb2e267dec3560e2e46843e383415f163
|
||||
66e673cce02c2163f756491ef05d7535ceb578e215,b8db43d1f6e62361e2e3b8fa765f79c08ddfb3035caa06f8250d6d1b063a7140
|
||||
66fc4ad75184e6029c805d9494eed4e81be770c002,fc7ac5e785f73732d95183d6bdc3423d41a074fc3f04b1304bae1efa652edde1
|
|
21
testdata/g.csv
vendored
Normal file
21
testdata/g.csv
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
g,
|
||||
6702c124856d5168381a32971d8933440a1728fc41,575696fd653a4de2f9a8c1f580cf0c229631b0f5d95fceb354cda133e2eb2d34
|
||||
6707f1511e3a2cb28493f91b85e9e4a9d9d07c86a5,ba368e0f859ee36da8701df1c0b52cbf0c0f8a4b1a91f6d0db83a408f5a937d1
|
||||
6707fd4213cae8d5342a98ba49b255fa80b2a9a6e4,bd3a44d30f66444f8732119bc7e0cf0bb47f8f0ab2840987fc06b629f3e6d3f4
|
||||
6710294a5693224a6222404ba45fd38eb2e77979a4,de35a8ea0a26d17445e2f509db23188961b5cd1229b96d2411565adf63731b5c
|
||||
6716a9f84e02143b50d9034aec126b12d7f2708cc4,5823640ae4529f8df2dab20386c887d0a1ba1ffa4583b99dff761c01f670c2fa
|
||||
672e51bc65c9b97d482b0b720e6cb673c41fe7b5c5,0687df449bd8cb8d8f526f4189973d084d786ab0927d81c127f56b03c61aa955
|
||||
67682620db65932047689e5eaf392d6b85be801864,b262d40758edb28d1c04fa3a24d8268990516de6846ad94d002ce55640866239
|
||||
676e8c320dbbf5eebc2969a93fbc51dd7f6062a7d1,c9e2a8e7181a70e2a488b884c8baadb4043a075c6876cb012c67fbec5aa9f615
|
||||
6772e2ac48891ee3c2c727835702a374ad0cb70fd6,985a9c9ee7a0626d78dab431e663289762ce6959be314f91f7b08b1466097fd6
|
||||
67847dd1dac117b85d1e20d93580cdf42f00001a77,62e6b1b8c2961703a90276dcde6dad182b2d14e23f27dccc927cca7770b9890e
|
||||
678f49948c72b7295f12092a24d300eeff894f1dd7,2e7c456dac5206c5627736924e96ac016a09a88ec5f4835fbe0cf9e294611c88
|
||||
67948b9633ab2ec07d7525936254e66f8c957d026c,66b5c54b3a685de3ea18f9e69254eec065eb3207ac1f93494fdcd585e9a267a0
|
||||
679674c162db8d3bb57c434fe87825625c4d4daf63,05425880d80258f7441859b3494415a3fd7398c9e209a19674abd48372b283c6
|
||||
67a8d3f17df85502bd644a364721e6364d61635b73,1efce69a3a05c505e9f9cc5c2241d02099c043d934389b430fd8b185e6dfe6cb
|
||||
67bad7f4fb3c6828b6fc4624d43786fc8f55d6eb0f,04a1c0a7ffe7acbf974ca18cf3debbd8e1be3d6703f842f57ef14af6d4c336d3
|
||||
67c13fb0c65acca5520bc2f59bd91ca3482dbec156,7fdc6989cd778baad45cd98358ea060237b169a4aeaeb14da6ac4686b7858c9f
|
||||
67d4314588b4424b0ee026536b9bd7857f11cab2ee,c63fd7a85a533b8591577bab805104708ba5458fab0e343d46b3e24a28b92cb5
|
||||
67d734244f85f32a58e34e2d9cadf225a56973d32f,d19a6307c24470b3973973319770bdb896218bb58d1f2d07c7226266075057d0
|
||||
67d9c159c5d5e407e6b0a4cacf9d6fe62a55b0fedc,89cbdb903fdfe0b44e74b0a69eed3de7029f18c28f77e5509f8ace766ab86610
|
||||
67fafc73d674250f11e559ab08b287f5714e531761,1752ffbf9807bb2e4e480bf045b4bacc472befe755287384b5a526065a58c065
|
|
Loading…
Reference in a new issue