BlockHeader
This commit is contained in:
parent
ee4369c3bc
commit
6283d9672e
4 changed files with 168 additions and 4 deletions
|
@ -245,13 +245,85 @@ class BlockHeaderValue(NamedTuple):
|
|||
|
||||
type BlockHeaderKey struct {
|
||||
Prefix []byte `json:"prefix"`
|
||||
Height int32 `json:"height"`
|
||||
Height uint32 `json:"height"`
|
||||
}
|
||||
|
||||
type BlockHeaderValue struct {
|
||||
Header []byte `json:"header"`
|
||||
}
|
||||
|
||||
func (k *BlockHeaderKey) PackKey() []byte {
|
||||
prefixLen := 1
|
||||
// b'>L'
|
||||
n := prefixLen + 4
|
||||
key := make([]byte, n)
|
||||
copy(key, k.Prefix)
|
||||
binary.BigEndian.PutUint32(key[prefixLen:], k.Height)
|
||||
|
||||
return key
|
||||
}
|
||||
|
||||
func (v *BlockHeaderValue) PackValue() []byte {
|
||||
value := make([]byte, 112)
|
||||
copy(value, v.Header)
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
func BlockHeaderKeyPackPartialNFields(nFields int) func(*BlockHeaderKey) []byte {
|
||||
return func(u *BlockHeaderKey) []byte {
|
||||
return BlockHeaderKeyPackPartial(u, nFields)
|
||||
}
|
||||
}
|
||||
|
||||
func BlockHeaderKeyPackPartial(k *BlockHeaderKey, nFields int) []byte {
|
||||
// Limit nFields between 0 and number of fields, we always at least need
|
||||
// the prefix, and we never need to iterate past the number of fields.
|
||||
if nFields > 1 {
|
||||
nFields = 1
|
||||
}
|
||||
if nFields < 0 {
|
||||
nFields = 0
|
||||
}
|
||||
|
||||
// b'>4sLH'
|
||||
prefixLen := 1
|
||||
var n = prefixLen
|
||||
for i := 0; i <= nFields; i++ {
|
||||
switch i {
|
||||
case 1:
|
||||
n += 4
|
||||
}
|
||||
}
|
||||
|
||||
key := make([]byte, n)
|
||||
|
||||
for i := 0; i <= nFields; i++ {
|
||||
switch i {
|
||||
case 0:
|
||||
copy(key, k.Prefix)
|
||||
case 1:
|
||||
binary.BigEndian.PutUint32(key[prefixLen:], k.Height)
|
||||
}
|
||||
}
|
||||
|
||||
return key
|
||||
}
|
||||
|
||||
func BlockHeaderKeyUnpack(key []byte) *BlockHeaderKey {
|
||||
prefixLen := 1
|
||||
return &BlockHeaderKey{
|
||||
Prefix: key[:prefixLen],
|
||||
Height: binary.BigEndian.Uint32(key[prefixLen:]),
|
||||
}
|
||||
}
|
||||
|
||||
func BlockHeaderValueUnpack(value []byte) *BlockHeaderValue {
|
||||
return &BlockHeaderValue{
|
||||
Header: value[:112],
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
class ClaimToTXOKey(typing.NamedTuple):
|
||||
|
@ -295,7 +367,7 @@ type ClaimToTXOValue struct {
|
|||
}
|
||||
|
||||
func (v *ClaimToTXOValue) NormalizedName() string {
|
||||
//TODO implement
|
||||
//TODO implement? Might not need to do anything.
|
||||
return v.Name
|
||||
}
|
||||
|
||||
|
@ -2519,8 +2591,11 @@ func UnpackGenericKey(key []byte) (byte, interface{}, error) {
|
|||
|
||||
case Tx:
|
||||
case BlockHash:
|
||||
return 0x0, nil, errors.Base("key unpack function for %v not implemented", firstByte)
|
||||
case Header:
|
||||
return Header, BlockHeaderKeyUnpack(key), nil
|
||||
case TxNum:
|
||||
return 0x0, nil, errors.Base("key unpack function for %v not implemented", firstByte)
|
||||
case TxCount:
|
||||
case TxHash:
|
||||
return 0x0, nil, errors.Base("key unpack function for %v not implemented", firstByte)
|
||||
|
@ -2590,8 +2665,11 @@ func UnpackGenericValue(key, value []byte) (byte, interface{}, error) {
|
|||
|
||||
case Tx:
|
||||
case BlockHash:
|
||||
return 0x0, nil, errors.Base("value unpack not implemented for key %v", key)
|
||||
case Header:
|
||||
return Header, BlockHeaderValueUnpack(value), nil
|
||||
case TxNum:
|
||||
return 0x0, nil, errors.Base("value unpack not implemented for key %v", key)
|
||||
case TxCount:
|
||||
case TxHash:
|
||||
return 0x0, nil, errors.Base("value unpack not implemented for key %v", key)
|
||||
|
|
|
@ -44,6 +44,82 @@ func testInit(filePath string) (*grocksdb.DB, [][]string, func()) {
|
|||
return db, records, toDefer
|
||||
}
|
||||
|
||||
func TestBlockHeader(t *testing.T) {
|
||||
|
||||
filePath := "../../resources/header.csv"
|
||||
|
||||
wOpts := grocksdb.NewDefaultWriteOptions()
|
||||
db, records, toDefer := testInit(filePath)
|
||||
defer toDefer()
|
||||
for _, record := range records {
|
||||
key, err := hex.DecodeString(record[0])
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
val, err := hex.DecodeString(record[1])
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
db.Put(wOpts, key, val)
|
||||
}
|
||||
// test prefix
|
||||
options := dbpkg.NewIterateOptions().WithPrefix([]byte{prefixes.Header}).WithIncludeValue(true)
|
||||
ch := dbpkg.Iter(db, options)
|
||||
var i = 0
|
||||
for kv := range ch {
|
||||
// log.Println(kv.Key)
|
||||
gotKey := kv.Key.(*prefixes.BlockHeaderKey).PackKey()
|
||||
|
||||
keyPartial1 := prefixes.BlockHeaderKeyPackPartial(kv.Key.(*prefixes.BlockHeaderKey), 1)
|
||||
|
||||
// Check pack partial for sanity
|
||||
if !bytes.HasPrefix(gotKey, keyPartial1) {
|
||||
t.Errorf("%+v should be prefix of %+v\n", keyPartial1, gotKey)
|
||||
}
|
||||
|
||||
got := kv.Value.(*prefixes.BlockHeaderValue).PackValue()
|
||||
wantKey, err := hex.DecodeString(records[i][0])
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
want, err := hex.DecodeString(records[i][1])
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
if !bytes.Equal(gotKey, wantKey) {
|
||||
t.Errorf("gotKey: %+v, wantKey: %+v\n", got, want)
|
||||
}
|
||||
if !bytes.Equal(got, want) {
|
||||
t.Errorf("got: %+v, want: %+v\n", got, want)
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
// Test start / stop
|
||||
start, err := hex.DecodeString(records[0][0])
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
stop, err := hex.DecodeString(records[9][0])
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
options2 := dbpkg.NewIterateOptions().WithStart(start).WithStop(stop).WithIncludeValue(true)
|
||||
ch2 := dbpkg.Iter(db, options2)
|
||||
i = 0
|
||||
for kv := range ch2 {
|
||||
got := kv.Value.(*prefixes.BlockHeaderValue).PackValue()
|
||||
want, err := hex.DecodeString(records[i][1])
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
if !bytes.Equal(got, want) {
|
||||
t.Errorf("got: %+v, want: %+v\n", got, want)
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
func TestClaimToTXO(t *testing.T) {
|
||||
|
||||
filePath := "../../resources/claim_to_txo.csv"
|
||||
|
|
4
main.go
4
main.go
|
@ -38,7 +38,7 @@ func main() {
|
|||
|
||||
options := &db.IterOptions{
|
||||
FillCache: false,
|
||||
Prefix: []byte{prefixes.ClaimToTXO},
|
||||
Prefix: []byte{prefixes.Header},
|
||||
Start: nil,
|
||||
Stop: nil,
|
||||
IncludeStart: true,
|
||||
|
@ -49,7 +49,7 @@ func main() {
|
|||
RawValue: true,
|
||||
}
|
||||
|
||||
db.ReadWriteRawN(dbVal, options, "./resources/claim_to_txo.csv", 10)
|
||||
db.ReadWriteRawN(dbVal, options, "./resources/header.csv", 10)
|
||||
|
||||
return
|
||||
}
|
||||
|
|
10
resources/header.csv
Normal file
10
resources/header.csv
Normal file
|
@ -0,0 +1,10 @@
|
|||
4800000000,010000000000000000000000000000000000000000000000000000000000000000000000cc59e59ff97ac092b55e423aa5495151ed6fb80570a5bb78cd5bd1c3821c21b8010000000000000000000000000000000000000000000000000000000000000033193156ffff001f07050000
|
||||
4800000001,0000002063f4346a4db34fdfce29a70f5e8d11f065f6b91602b7036c7f22f3a03b28899cba888e2f9c037f831046f8ad09f6d378f79c728d003b177a64d29621f481da5d01000000000000000000000000000000000000000000000000000000000000003c406b5746e1001f5b4f0000
|
||||
4800000002,00000020246cb85843ac936d55388f2ff288b011add5b1b20cca9cfd19a403ca2c9ecbde09d8734d81b5f2eb1b653caf17491544ddfbc72f2f4c0c3f22a3362db5ba9d4701000000000000000000000000000000000000000000000000000000000000003d406b57ffff001f4ff20000
|
||||
4800000003,000000200044e1258b865d262587c28ff98853bc52bb31266230c1c648cc9004047a5428e285dbf24334585b9a924536a717160ee185a86d1eeb7b19684538685eca761a01000000000000000000000000000000000000000000000000000000000000003d406b5746e1001fce9c0100
|
||||
4800000004,00000020bbf8980e3f7604896821203bf62f97f311124da1fbb95bf523fcfdb356ad19c9d83cf1408debbd631950b7a95b0c940772119cd8a615a3d44601568713fec80c01000000000000000000000000000000000000000000000000000000000000003e406b573dc6001fec7b0000
|
||||
4800000005,000000201a650b9b7b9d132e257ff6b336ba7cd96b1796357c4fc8dd7d0bd1ff1de057d547638e54178dbdddf2e81a3b7566860e5264df6066755f9760a893f5caecc57901000000000000000000000000000000000000000000000000000000000000003e406b5773ae001fcf770000
|
||||
4800000006,000000206d694b93a2bb5ac23a13ed6749a789ca751cf73d5982c459e0cd9d5d303da74cec91627e0dba856b933983425d7f72958e8f974682632a0fa2acee9cfd81940101000000000000000000000000000000000000000000000000000000000000003e406b578399001f225c0100
|
||||
4800000007,00000020b57808c188b7315583cf120fe89de923583bc7a8ebff03189145b86bf859b21ba3c4a19948a1263722c45c5601fd10a7aea7cf73bfa45e060508f109155e80ab01000000000000000000000000000000000000000000000000000000000000003f406b571787001f08160700
|
||||
4800000008,00000020a6a5b330e816242d54c8586ba9b6d63c19d921171ef3d4525b8ffc635742e83a0fc2da46cf0de0057c1b9fc93d997105ff6cf2c8c43269b446c1dbf5ac18be8c010000000000000000000000000000000000000000000000000000000000000040406b570ae1761edd8f0300
|
||||
4800000009,00000020b8447f415279dffe8a09afe6f6d5e335a2f6911fce8e1d1866723d5e5e8a53067356a733f87e592ea133328792dd9d676ed83771c8ff0f519928ce752f159ba6010000000000000000000000000000000000000000000000000000000000000040406b57139d681ed40d0000
|
|
Loading…
Reference in a new issue