From c3871df113c71c61bd6c339121bb146d320ec768 Mon Sep 17 00:00:00 2001 From: Jeffrey Picard Date: Fri, 14 Jan 2022 14:31:07 -0500 Subject: [PATCH] ClaimShortID --- db/prefixes/prefixes.go | 115 ++++++++++++++++++++++++++-- db/prefixes/prefixes_test.go | 84 ++++++++++++++++++++ main.go | 4 +- resources/claim_short_id_prefix.csv | 10 +++ 4 files changed, 205 insertions(+), 8 deletions(-) create mode 100644 resources/claim_short_id_prefix.csv diff --git a/db/prefixes/prefixes.go b/db/prefixes/prefixes.go index 9ef0548..1973bed 100644 --- a/db/prefixes/prefixes.go +++ b/db/prefixes/prefixes.go @@ -346,13 +346,116 @@ type ClaimShortIDKey struct { Prefix []byte `json:"prefix"` NormalizedName string `json:"normalized_name"` PartialClaimId string `json:"partial_claim_id"` - RootTxNum int32 `json:"root_tx_num"` - RootPosition int32 `json:"root_position"` + RootTxNum uint32 `json:"root_tx_num"` + RootPosition uint16 `json:"root_position"` } type ClaimShortIDValue struct { - TxNum int32 `json:"tx_num"` - Position int32 `json:"position"` + TxNum uint32 `json:"tx_num"` + Position uint16 `json:"position"` +} + +func (k *ClaimShortIDKey) PackKey() []byte { + prefixLen := 1 + nameLen := len(k.NormalizedName) + partialClaimLen := len(k.PartialClaimId) + log.Printf("nameLen: %d, partialClaimLen: %d\n", nameLen, partialClaimLen) + n := prefixLen + 2 + nameLen + 1 + partialClaimLen + 4 + 2 + key := make([]byte, n) + copy(key, k.Prefix) + binary.BigEndian.PutUint16(key[prefixLen:], uint16(nameLen)) + copy(key[prefixLen+2:], []byte(k.NormalizedName[:nameLen])) + key[prefixLen+2+nameLen] = uint8(partialClaimLen) + copy(key[prefixLen+2+nameLen+1:], []byte(k.PartialClaimId[:partialClaimLen])) + binary.BigEndian.PutUint32(key[prefixLen+2+nameLen+1+partialClaimLen:], k.RootTxNum) + binary.BigEndian.PutUint16(key[prefixLen+2+nameLen+1+partialClaimLen+4:], k.RootPosition) + + return key +} + +func (v *ClaimShortIDValue) PackValue() []byte { + value := make([]byte, 6) + binary.BigEndian.PutUint32(value, v.TxNum) + binary.BigEndian.PutUint16(value[4:], v.Position) + + return value +} + +func ClaimShortIDKeyPackPartialNFields(nFields int) func(*ClaimShortIDKey) []byte { + return func(u *ClaimShortIDKey) []byte { + return ClaimShortIDKeyPackPartial(u, nFields) + } +} + +func ClaimShortIDKeyPackPartial(k *ClaimShortIDKey, nFields int) []byte { + // Limit nFields between 0 and number of fields, we always at least need + // the prefix, and we never need to iterate past the number of fields. + if nFields > 4 { + nFields = 4 + } + if nFields < 0 { + nFields = 0 + } + + // b'>4sLH' + prefixLen := 1 + nameLen := len(k.NormalizedName) + partialClaimLen := len(k.PartialClaimId) + + var n = prefixLen + for i := 0; i <= nFields; i++ { + switch i { + case 1: + n += 2 + nameLen + case 2: + n += 1 + partialClaimLen + case 3: + n += 4 + case 4: + n += 2 + } + } + + key := make([]byte, n) + + for i := 0; i <= nFields; i++ { + switch i { + case 0: + copy(key, k.Prefix) + case 1: + binary.BigEndian.PutUint16(key[prefixLen:], uint16(nameLen)) + copy(key[prefixLen+2:], []byte(k.NormalizedName)) + case 2: + key[prefixLen+2+nameLen] = uint8(partialClaimLen) + copy(key[prefixLen+2+nameLen+1:], []byte(k.PartialClaimId)) + case 3: + binary.BigEndian.PutUint32(key[prefixLen+2+nameLen+1+partialClaimLen:], k.RootTxNum) + case 4: + binary.BigEndian.PutUint16(key[prefixLen+2+nameLen+1+partialClaimLen+4:], k.RootPosition) + } + } + + return key +} + +func ClaimShortIDKeyUnpack(key []byte) *ClaimShortIDKey { + prefixLen := 1 + nameLen := int(binary.BigEndian.Uint16(key[prefixLen:])) + partialClaimLen := int(uint8(key[prefixLen+2+nameLen])) + return &ClaimShortIDKey{ + Prefix: key[:prefixLen], + NormalizedName: string(key[prefixLen+2 : prefixLen+2+nameLen]), + PartialClaimId: string(key[prefixLen+2+nameLen+1 : prefixLen+2+nameLen+1+partialClaimLen]), + RootTxNum: binary.BigEndian.Uint32(key[prefixLen+2+nameLen+1+partialClaimLen:]), + RootPosition: binary.BigEndian.Uint16(key[prefixLen+2+nameLen+1+partialClaimLen+4:]), + } +} + +func ClaimShortIDValueUnpack(value []byte) *ClaimShortIDValue { + return &ClaimShortIDValue{ + TxNum: binary.BigEndian.Uint32(value), + Position: binary.BigEndian.Uint16(value[4:]), + } } /* @@ -2213,7 +2316,7 @@ func UnpackGenericKey(key []byte) (byte, interface{}, error) { return ChannelToClaim, ChannelToClaimKeyUnpack(key), nil case ClaimShortIdPrefix: - return 0x0, nil, errors.Base("key unpack function for %v not implemented", firstByte) + return ClaimShortIdPrefix, ClaimShortIDKeyUnpack(key), nil case EffectiveAmount: return EffectiveAmount, EffectiveAmountKeyUnpack(key), nil case ClaimExpiration: @@ -2283,7 +2386,7 @@ func UnpackGenericValue(key, value []byte) (byte, interface{}, error) { return ChannelToClaim, ChannelToClaimValueUnpack(value), nil case ClaimShortIdPrefix: - return 0x0, nil, errors.Base("value unpack not implemented for key %v", key) + return ClaimShortIdPrefix, ClaimShortIDValueUnpack(value), nil case EffectiveAmount: return EffectiveAmount, EffectiveAmountValueUnpack(value), nil case ClaimExpiration: diff --git a/db/prefixes/prefixes_test.go b/db/prefixes/prefixes_test.go index 515fc59..11ba64d 100644 --- a/db/prefixes/prefixes_test.go +++ b/db/prefixes/prefixes_test.go @@ -44,6 +44,90 @@ func testInit(filePath string) (*grocksdb.DB, [][]string, func()) { return db, records, toDefer } +func TestClaimShortID(t *testing.T) { + + filePath := "../../resources/claim_short_id_prefix.csv" + + wOpts := grocksdb.NewDefaultWriteOptions() + db, records, toDefer := testInit(filePath) + defer toDefer() + for _, record := range records { + key, err := hex.DecodeString(record[0]) + if err != nil { + log.Println(err) + } + val, err := hex.DecodeString(record[1]) + if err != nil { + log.Println(err) + } + db.Put(wOpts, key, val) + } + // test prefix + options := dbpkg.NewIterateOptions().WithPrefix([]byte{prefixes.ClaimShortIdPrefix}).WithIncludeValue(true) + ch := dbpkg.Iter(db, options) + var i = 0 + for kv := range ch { + // log.Println(kv.Key) + gotKey := kv.Key.(*prefixes.ClaimShortIDKey).PackKey() + + keyPartial1 := prefixes.ClaimShortIDKeyPackPartial(kv.Key.(*prefixes.ClaimShortIDKey), 1) + keyPartial2 := prefixes.ClaimShortIDKeyPackPartial(kv.Key.(*prefixes.ClaimShortIDKey), 2) + keyPartial3 := prefixes.ClaimShortIDKeyPackPartial(kv.Key.(*prefixes.ClaimShortIDKey), 3) + + // Check pack partial for sanity + if !bytes.HasPrefix(gotKey, keyPartial1) { + t.Errorf("%+v should be prefix of %+v\n", keyPartial1, gotKey) + } + if !bytes.HasPrefix(gotKey, keyPartial2) { + t.Errorf("%+v should be prefix of %+v\n", keyPartial2, gotKey) + } + if !bytes.HasPrefix(gotKey, keyPartial3) { + t.Errorf("%+v should be prefix of %+v\n", keyPartial3, gotKey) + } + + got := kv.Value.(*prefixes.ClaimShortIDValue).PackValue() + wantKey, err := hex.DecodeString(records[i][0]) + if err != nil { + log.Println(err) + } + want, err := hex.DecodeString(records[i][1]) + if err != nil { + log.Println(err) + } + if !bytes.Equal(gotKey, wantKey) { + t.Errorf("gotKey: %+v, wantKey: %+v\n", got, want) + } + if !bytes.Equal(got, want) { + t.Errorf("got: %+v, want: %+v\n", got, want) + } + i++ + } + + // Test start / stop + start, err := hex.DecodeString(records[0][0]) + if err != nil { + log.Println(err) + } + stop, err := hex.DecodeString(records[9][0]) + if err != nil { + log.Println(err) + } + options2 := dbpkg.NewIterateOptions().WithStart(start).WithStop(stop).WithIncludeValue(true) + ch2 := dbpkg.Iter(db, options2) + i = 0 + for kv := range ch2 { + got := kv.Value.(*prefixes.ClaimShortIDValue).PackValue() + want, err := hex.DecodeString(records[i][1]) + if err != nil { + log.Println(err) + } + if !bytes.Equal(got, want) { + t.Errorf("got: %+v, want: %+v\n", got, want) + } + i++ + } +} + func TestClaimToChannel(t *testing.T) { filePath := "../../resources/claim_to_channel.csv" diff --git a/main.go b/main.go index 86dbea0..149f730 100644 --- a/main.go +++ b/main.go @@ -38,7 +38,7 @@ func main() { options := &db.IterOptions{ FillCache: false, - Prefix: []byte{prefixes.ClaimToChannel}, + Prefix: []byte{prefixes.ClaimShortIdPrefix}, Start: nil, Stop: nil, IncludeStart: true, @@ -49,7 +49,7 @@ func main() { RawValue: true, } - db.ReadWriteRawN(dbVal, options, "./resources/claim_to_channel.csv", 10) + db.ReadWriteRawN(dbVal, options, "./resources/claim_short_id_prefix.csv", 10) return } diff --git a/resources/claim_short_id_prefix.csv b/resources/claim_short_id_prefix.csv new file mode 100644 index 0000000..fe322c6 --- /dev/null +++ b/resources/claim_short_id_prefix.csv @@ -0,0 +1,10 @@ +460001000161002741130000,002741130000 +46000100026135002741130000,002741130000 +4600010003613531002741130000,002741130000 +460001000461353164002741130000,002741130000 +46000100056135316435002741130000,002741130000 +4600010006613531643563002741130000,002741130000 +460001000761353164356335002741130000,002741130000 +46000100086135316435633536002741130000,002741130000 +4600010009613531643563353637002741130000,002741130000 +460001000a61353164356335363734002741130000,002741130000