Merge pull request #1045 from davecgh/multi_utxoentry_outpoints

multi: Rework utxoset/view to use outpoints.
This commit is contained in:
Olaoluwa Osuntokun 2018-05-30 19:59:44 -07:00 committed by GitHub
commit 86fed78113
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
23 changed files with 1533 additions and 1363 deletions

View file

@ -399,7 +399,7 @@ func (b *BlockChain) calcSequenceLock(node *blockNode, tx *btcutil.Tx, utxoView
nextHeight := node.height + 1 nextHeight := node.height + 1
for txInIndex, txIn := range mTx.TxIn { for txInIndex, txIn := range mTx.TxIn {
utxo := utxoView.LookupEntry(&txIn.PreviousOutPoint.Hash) utxo := utxoView.LookupEntry(txIn.PreviousOutPoint)
if utxo == nil { if utxo == nil {
str := fmt.Sprintf("output %v referenced from "+ str := fmt.Sprintf("output %v referenced from "+
"transaction %s:%d either does not exist or "+ "transaction %s:%d either does not exist or "+
@ -848,7 +848,7 @@ func (b *BlockChain) reorganizeChain(detachNodes, attachNodes *list.List) error
// journal. // journal.
var stxos []spentTxOut var stxos []spentTxOut
err = b.db.View(func(dbTx database.Tx) error { err = b.db.View(func(dbTx database.Tx) error {
stxos, err = dbFetchSpendJournalEntry(dbTx, block, view) stxos, err = dbFetchSpendJournalEntry(dbTx, block)
return err return err
}) })
if err != nil { if err != nil {
@ -859,7 +859,7 @@ func (b *BlockChain) reorganizeChain(detachNodes, attachNodes *list.List) error
detachBlocks = append(detachBlocks, block) detachBlocks = append(detachBlocks, block)
detachSpentTxOuts = append(detachSpentTxOuts, stxos) detachSpentTxOuts = append(detachSpentTxOuts, stxos)
err = view.disconnectTransactions(block, stxos) err = view.disconnectTransactions(b.db, block, stxos)
if err != nil { if err != nil {
return err return err
} }
@ -961,7 +961,8 @@ func (b *BlockChain) reorganizeChain(detachNodes, attachNodes *list.List) error
// Update the view to unspend all of the spent txos and remove // Update the view to unspend all of the spent txos and remove
// the utxos created by the block. // the utxos created by the block.
err = view.disconnectTransactions(block, detachSpentTxOuts[i]) err = view.disconnectTransactions(b.db, block,
detachSpentTxOuts[i])
if err != nil { if err != nil {
return err return err
} }
@ -1702,6 +1703,11 @@ func New(config *Config) (*BlockChain, error) {
return nil, err return nil, err
} }
// Perform any upgrades to the various chain-specific buckets as needed.
if err := b.maybeUpgradeDbBuckets(config.Interrupt); err != nil {
return nil, err
}
// Initialize and catch up all of the currently active optional indexes // Initialize and catch up all of the currently active optional indexes
// as needed. // as needed.
if config.IndexManager != nil { if config.IndexManager != nil {

View file

@ -9,7 +9,7 @@ import (
"encoding/binary" "encoding/binary"
"fmt" "fmt"
"math/big" "math/big"
"sort" "sync"
"time" "time"
"github.com/btcsuite/btcd/chaincfg/chainhash" "github.com/btcsuite/btcd/chaincfg/chainhash"
@ -23,6 +23,15 @@ const (
// constant from wire and is only provided here for convenience since // constant from wire and is only provided here for convenience since
// wire.MaxBlockHeaderPayload is quite long. // wire.MaxBlockHeaderPayload is quite long.
blockHdrSize = wire.MaxBlockHeaderPayload blockHdrSize = wire.MaxBlockHeaderPayload
// latestUtxoSetBucketVersion is the current version of the utxo set
// bucket that is used to track all unspent outputs.
latestUtxoSetBucketVersion = 2
// latestSpendJournalBucketVersion is the current version of the spend
// journal bucket that is used to track all spent transactions for use
// in reorgs.
latestSpendJournalBucketVersion = 1
) )
var ( var (
@ -42,13 +51,21 @@ var (
// chain state. // chain state.
chainStateKeyName = []byte("chainstate") chainStateKeyName = []byte("chainstate")
// spendJournalVersionKeyName is the name of the db key used to store
// the version of the spend journal currently in the database.
spendJournalVersionKeyName = []byte("spendjournalversion")
// spendJournalBucketName is the name of the db bucket used to house // spendJournalBucketName is the name of the db bucket used to house
// transactions outputs that are spent in each block. // transactions outputs that are spent in each block.
spendJournalBucketName = []byte("spendjournal") spendJournalBucketName = []byte("spendjournal")
// utxoSetVersionKeyName is the name of the db key used to store the
// version of the utxo set currently in the database.
utxoSetVersionKeyName = []byte("utxosetversion")
// utxoSetBucketName is the name of the db bucket used to house the // utxoSetBucketName is the name of the db bucket used to house the
// unspent transaction output set. // unspent transaction output set.
utxoSetBucketName = []byte("utxoset") utxoSetBucketName = []byte("utxosetv2")
// byteOrder is the preferred byte order used for serializing numeric // byteOrder is the preferred byte order used for serializing numeric
// fields for storage in the database. // fields for storage in the database.
@ -94,6 +111,45 @@ func isDbBucketNotFoundErr(err error) bool {
return ok && dbErr.ErrorCode == database.ErrBucketNotFound return ok && dbErr.ErrorCode == database.ErrBucketNotFound
} }
// dbFetchVersion fetches an individual version with the given key from the
// metadata bucket. It is primarily used to track versions on entities such as
// buckets. It returns zero if the provided key does not exist.
func dbFetchVersion(dbTx database.Tx, key []byte) uint32 {
serialized := dbTx.Metadata().Get(key)
if serialized == nil {
return 0
}
return byteOrder.Uint32(serialized[:])
}
// dbPutVersion uses an existing database transaction to update the provided
// key in the metadata bucket to the given version. It is primarily used to
// track versions on entities such as buckets.
func dbPutVersion(dbTx database.Tx, key []byte, version uint32) error {
var serialized [4]byte
byteOrder.PutUint32(serialized[:], version)
return dbTx.Metadata().Put(key, serialized[:])
}
// dbFetchOrCreateVersion uses an existing database transaction to attempt to
// fetch the provided key from the metadata bucket as a version and in the case
// it doesn't exist, it adds the entry with the provided default version and
// returns that. This is useful during upgrades to automatically handle loading
// and adding version keys as necessary.
func dbFetchOrCreateVersion(dbTx database.Tx, key []byte, defaultVersion uint32) (uint32, error) {
version := dbFetchVersion(dbTx, key)
if version == 0 {
version = defaultVersion
err := dbPutVersion(dbTx, key, version)
if err != nil {
return 0, err
}
}
return version, nil
}
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// The transaction spend journal consists of an entry for each block connected // The transaction spend journal consists of an entry for each block connected
// to the main chain which contains the transaction outputs the block spends // to the main chain which contains the transaction outputs the block spends
@ -110,18 +166,23 @@ func isDbBucketNotFoundErr(err error) bool {
// //
// NOTE: This format is NOT self describing. The additional details such as // NOTE: This format is NOT self describing. The additional details such as
// the number of entries (transaction inputs) are expected to come from the // the number of entries (transaction inputs) are expected to come from the
// block itself and the utxo set. The rationale in doing this is to save a // block itself and the utxo set (for legacy entries). The rationale in doing
// significant amount of space. This is also the reason the spent outputs are // this is to save space. This is also the reason the spent outputs are
// serialized in the reverse order they are spent because later transactions // serialized in the reverse order they are spent because later transactions are
// are allowed to spend outputs from earlier ones in the same block. // allowed to spend outputs from earlier ones in the same block.
//
// The reserved field below used to keep track of the version of the containing
// transaction when the height in the header code was non-zero, however the
// height is always non-zero now, but keeping the extra reserved field allows
// backwards compatibility.
// //
// The serialized format is: // The serialized format is:
// //
// [<header code><version><compressed txout>],... // [<header code><reserved><compressed txout>],...
// //
// Field Type Size // Field Type Size
// header code VLQ variable // header code VLQ variable
// version VLQ variable // reserved byte 1
// compressed txout // compressed txout
// compressed amount VLQ variable // compressed amount VLQ variable
// compressed script []byte variable // compressed script []byte variable
@ -130,23 +191,17 @@ func isDbBucketNotFoundErr(err error) bool {
// bit 0 - containing transaction is a coinbase // bit 0 - containing transaction is a coinbase
// bits 1-x - height of the block that contains the spent txout // bits 1-x - height of the block that contains the spent txout
// //
// NOTE: The header code and version are only encoded when the spent txout was
// the final unspent output of the containing transaction. Otherwise, the
// header code will be 0 and the version is not serialized at all. This is
// done because that information is only needed when the utxo set no longer
// has it.
//
// Example 1: // Example 1:
// From block 170 in main blockchain. // From block 170 in main blockchain.
// //
// 1301320511db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5c // 1300320511db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5c
// <><><------------------------------------------------------------------> // <><><------------------------------------------------------------------>
// | | | // | | |
// | version compressed txout // | reserved compressed txout
// header code // header code
// //
// - header code: 0x13 (coinbase, height 9) // - header code: 0x13 (coinbase, height 9)
// - transaction version: 1 // - reserved: 0x00
// - compressed txout 0: // - compressed txout 0:
// - 0x32: VLQ-encoded compressed amount for 5000000000 (50 BTC) // - 0x32: VLQ-encoded compressed amount for 5000000000 (50 BTC)
// - 0x05: special script type pay-to-pubkey // - 0x05: special script type pay-to-pubkey
@ -155,22 +210,22 @@ func isDbBucketNotFoundErr(err error) bool {
// Example 2: // Example 2:
// Adapted from block 100025 in main blockchain. // Adapted from block 100025 in main blockchain.
// //
// 0091f20f006edbc6c4d31bae9f1ccc38538a114bf42de65e868b99700186c64700b2fb57eadf61e106a100a7445a8c3f67898841ec // 8b99700091f20f006edbc6c4d31bae9f1ccc38538a114bf42de65e868b99700086c64700b2fb57eadf61e106a100a7445a8c3f67898841ec
// <><----------------------------------------------><----><><----------------------------------------------> // <----><><----------------------------------------------><----><><---------------------------------------------->
// | | | | | // | | | | | |
// | compressed txout | version compressed txout // | reserved compressed txout | reserved compressed txout
// header code header code // header code header code
// //
// - Last spent output: // - Last spent output:
// - header code: 0x00 (was not the final unspent output for containing tx) // - header code: 0x8b9970 (not coinbase, height 100024)
// - transaction version: Nothing since header code is 0 // - reserved: 0x00
// - compressed txout: // - compressed txout:
// - 0x91f20f: VLQ-encoded compressed amount for 34405000000 (344.05 BTC) // - 0x91f20f: VLQ-encoded compressed amount for 34405000000 (344.05 BTC)
// - 0x00: special script type pay-to-pubkey-hash // - 0x00: special script type pay-to-pubkey-hash
// - 0x6e...86: pubkey hash // - 0x6e...86: pubkey hash
// - Second to last spent output: // - Second to last spent output:
// - header code: 0x8b9970 (not coinbase, height 100024) // - header code: 0x8b9970 (not coinbase, height 100024)
// - transaction version: 1 // - reserved: 0x00
// - compressed txout: // - compressed txout:
// - 0x86c647: VLQ-encoded compressed amount for 13761000000 (137.61 BTC) // - 0x86c647: VLQ-encoded compressed amount for 13761000000 (137.61 BTC)
// - 0x00: special script type pay-to-pubkey-hash // - 0x00: special script type pay-to-pubkey-hash
@ -185,25 +240,15 @@ func isDbBucketNotFoundErr(err error) bool {
// when this spent txout is spending the last unspent output of the containing // when this spent txout is spending the last unspent output of the containing
// transaction. // transaction.
type spentTxOut struct { type spentTxOut struct {
compressed bool // The amount and public key script are compressed.
version int32 // The version of creating tx.
amount int64 // The amount of the output. amount int64 // The amount of the output.
pkScript []byte // The public key script for the output. pkScript []byte // The public key script for the output.
height int32 // Height of the the block containing the creating tx.
// These fields are only set when this is spending the final output of isCoinBase bool // Whether creating tx is a coinbase.
// the creating tx.
height int32 // Height of the the block containing the creating tx.
isCoinBase bool // Whether creating tx is a coinbase.
} }
// spentTxOutHeaderCode returns the calculated header code to be used when // spentTxOutHeaderCode returns the calculated header code to be used when
// serializing the provided stxo entry. // serializing the provided stxo entry.
func spentTxOutHeaderCode(stxo *spentTxOut) uint64 { func spentTxOutHeaderCode(stxo *spentTxOut) uint64 {
// The header code is 0 when there is no height set for the stxo.
if stxo.height == 0 {
return 0
}
// As described in the serialization format comments, the header code // As described in the serialization format comments, the header code
// encodes the height shifted over one bit and the coinbase flag in the // encodes the height shifted over one bit and the coinbase flag in the
// lowest bit. // lowest bit.
@ -218,13 +263,14 @@ func spentTxOutHeaderCode(stxo *spentTxOut) uint64 {
// spentTxOutSerializeSize returns the number of bytes it would take to // spentTxOutSerializeSize returns the number of bytes it would take to
// serialize the passed stxo according to the format described above. // serialize the passed stxo according to the format described above.
func spentTxOutSerializeSize(stxo *spentTxOut) int { func spentTxOutSerializeSize(stxo *spentTxOut) int {
headerCode := spentTxOutHeaderCode(stxo) size := serializeSizeVLQ(spentTxOutHeaderCode(stxo))
size := serializeSizeVLQ(headerCode) if stxo.height > 0 {
if headerCode != 0 { // The legacy v1 spend journal format conditionally tracked the
size += serializeSizeVLQ(uint64(stxo.version)) // containing transaction version when the height was non-zero,
// so this is required for backwards compat.
size += serializeSizeVLQ(0)
} }
return size + compressedTxOutSize(uint64(stxo.amount), stxo.pkScript, return size + compressedTxOutSize(uint64(stxo.amount), stxo.pkScript)
stxo.version, stxo.compressed)
} }
// putSpentTxOut serializes the passed stxo according to the format described // putSpentTxOut serializes the passed stxo according to the format described
@ -234,26 +280,20 @@ func spentTxOutSerializeSize(stxo *spentTxOut) int {
func putSpentTxOut(target []byte, stxo *spentTxOut) int { func putSpentTxOut(target []byte, stxo *spentTxOut) int {
headerCode := spentTxOutHeaderCode(stxo) headerCode := spentTxOutHeaderCode(stxo)
offset := putVLQ(target, headerCode) offset := putVLQ(target, headerCode)
if headerCode != 0 { if stxo.height > 0 {
offset += putVLQ(target[offset:], uint64(stxo.version)) // The legacy v1 spend journal format conditionally tracked the
// containing transaction version when the height was non-zero,
// so this is required for backwards compat.
offset += putVLQ(target[offset:], 0)
} }
return offset + putCompressedTxOut(target[offset:], uint64(stxo.amount), return offset + putCompressedTxOut(target[offset:], uint64(stxo.amount),
stxo.pkScript, stxo.version, stxo.compressed) stxo.pkScript)
} }
// decodeSpentTxOut decodes the passed serialized stxo entry, possibly followed // decodeSpentTxOut decodes the passed serialized stxo entry, possibly followed
// by other data, into the passed stxo struct. It returns the number of bytes // by other data, into the passed stxo struct. It returns the number of bytes
// read. // read.
// func decodeSpentTxOut(serialized []byte, stxo *spentTxOut) (int, error) {
// Since the serialized stxo entry does not contain the height, version, or
// coinbase flag of the containing transaction when it still has utxos, the
// caller is responsible for passing in the containing transaction version in
// that case. The provided version is ignore when it is serialized as a part of
// the stxo.
//
// An error will be returned if the version is not serialized as a part of the
// stxo and is also not provided to the function.
func decodeSpentTxOut(serialized []byte, stxo *spentTxOut, txVersion int32) (int, error) {
// Ensure there are bytes to decode. // Ensure there are bytes to decode.
if len(serialized) == 0 { if len(serialized) == 0 {
return 0, errDeserialize("no serialized bytes") return 0, errDeserialize("no serialized bytes")
@ -266,47 +306,34 @@ func decodeSpentTxOut(serialized []byte, stxo *spentTxOut, txVersion int32) (int
"header code") "header code")
} }
// Decode the header code and deserialize the containing transaction // Decode the header code.
// version if needed.
// //
// Bit 0 indicates containing transaction is a coinbase. // Bit 0 indicates containing transaction is a coinbase.
// Bits 1-x encode height of containing transaction. // Bits 1-x encode height of containing transaction.
if code != 0 { stxo.isCoinBase = code&0x01 != 0
version, bytesRead := deserializeVLQ(serialized[offset:]) stxo.height = int32(code >> 1)
if stxo.height > 0 {
// The legacy v1 spend journal format conditionally tracked the
// containing transaction version when the height was non-zero,
// so this is required for backwards compat.
_, bytesRead := deserializeVLQ(serialized[offset:])
offset += bytesRead offset += bytesRead
if offset >= len(serialized) { if offset >= len(serialized) {
return offset, errDeserialize("unexpected end of data " + return offset, errDeserialize("unexpected end of data " +
"after version") "after reserved")
} }
stxo.isCoinBase = code&0x01 != 0
stxo.height = int32(code >> 1)
stxo.version = int32(version)
} else {
// Ensure a tx version was specified if the stxo did not encode
// it. This should never happen unless there is database
// corruption or this function is being called without the
// proper state.
if txVersion == -1 {
return offset, AssertError("decodeSpentTxOut called " +
"without a containing tx version when the " +
"serialized stxo that does not encode the " +
"version")
}
stxo.version = txVersion
} }
// Decode the compressed txout. // Decode the compressed txout.
compAmount, compScript, bytesRead, err := decodeCompressedTxOut( amount, pkScript, bytesRead, err := decodeCompressedTxOut(
serialized[offset:], stxo.version) serialized[offset:])
offset += bytesRead offset += bytesRead
if err != nil { if err != nil {
return offset, errDeserialize(fmt.Sprintf("unable to decode "+ return offset, errDeserialize(fmt.Sprintf("unable to decode "+
"txout: %v", err)) "txout: %v", err))
} }
stxo.amount = int64(compAmount) stxo.amount = int64(amount)
stxo.pkScript = compScript stxo.pkScript = pkScript
stxo.compressed = true
return offset, nil return offset, nil
} }
@ -315,9 +342,8 @@ func decodeSpentTxOut(serialized []byte, stxo *spentTxOut, txVersion int32) (int
// //
// Since the serialization format is not self describing, as noted in the // Since the serialization format is not self describing, as noted in the
// format comments, this function also requires the transactions that spend the // format comments, this function also requires the transactions that spend the
// txouts and a utxo view that contains any remaining existing utxos in the // txouts.
// transactions referenced by the inputs to the passed transasctions. func deserializeSpendJournalEntry(serialized []byte, txns []*wire.MsgTx) ([]spentTxOut, error) {
func deserializeSpendJournalEntry(serialized []byte, txns []*wire.MsgTx, view *UtxoViewpoint) ([]spentTxOut, error) {
// Calculate the total number of stxos. // Calculate the total number of stxos.
var numStxos int var numStxos int
for _, tx := range txns { for _, tx := range txns {
@ -341,7 +367,6 @@ func deserializeSpendJournalEntry(serialized []byte, txns []*wire.MsgTx, view *U
// Loop backwards through all transactions so everything is read in // Loop backwards through all transactions so everything is read in
// reverse order to match the serialization order. // reverse order to match the serialization order.
stxoIdx := numStxos - 1 stxoIdx := numStxos - 1
stxoInFlight := make(map[chainhash.Hash]int)
offset := 0 offset := 0
stxos := make([]spentTxOut, numStxos) stxos := make([]spentTxOut, numStxos)
for txIdx := len(txns) - 1; txIdx > -1; txIdx-- { for txIdx := len(txns) - 1; txIdx > -1; txIdx-- {
@ -354,36 +379,7 @@ func deserializeSpendJournalEntry(serialized []byte, txns []*wire.MsgTx, view *U
stxo := &stxos[stxoIdx] stxo := &stxos[stxoIdx]
stxoIdx-- stxoIdx--
// Get the transaction version for the stxo based on n, err := decodeSpentTxOut(serialized[offset:], stxo)
// whether or not it should be serialized as a part of
// the stxo. Recall that it is only serialized when the
// stxo spends the final utxo of a transaction. Since
// they are deserialized in reverse order, this means
// the first time an entry for a given containing tx is
// encountered that is not already in the utxo view it
// must have been the final spend and thus the extra
// data will be serialized with the stxo. Otherwise,
// the version must be pulled from the utxo entry.
//
// Since the view is not actually modified as the stxos
// are read here and it's possible later entries
// reference earlier ones, an inflight map is maintained
// to detect this case and pull the tx version from the
// entry that contains the version information as just
// described.
txVersion := int32(-1)
originHash := &txIn.PreviousOutPoint.Hash
entry := view.LookupEntry(originHash)
if entry != nil {
txVersion = entry.Version()
} else if idx, ok := stxoInFlight[*originHash]; ok {
txVersion = stxos[idx].version
} else {
stxoInFlight[*originHash] = stxoIdx + 1
}
n, err := decodeSpentTxOut(serialized[offset:], stxo,
txVersion)
offset += n offset += n
if err != nil { if err != nil {
return nil, errDeserialize(fmt.Sprintf("unable "+ return nil, errDeserialize(fmt.Sprintf("unable "+
@ -420,17 +416,18 @@ func serializeSpendJournalEntry(stxos []spentTxOut) []byte {
return serialized return serialized
} }
// dbFetchSpendJournalEntry fetches the spend journal entry for the passed // dbFetchSpendJournalEntry fetches the spend journal entry for the passed block
// block and deserializes it into a slice of spent txout entries. The provided // and deserializes it into a slice of spent txout entries.
// view MUST have the utxos referenced by all of the transactions available for //
// the passed block since that information is required to reconstruct the spent // NOTE: Legacy entries will not have the coinbase flag or height set unless it
// txouts. // was the final output spend in the containing transaction. It is up to the
func dbFetchSpendJournalEntry(dbTx database.Tx, block *btcutil.Block, view *UtxoViewpoint) ([]spentTxOut, error) { // caller to handle this properly by looking the information up in the utxo set.
func dbFetchSpendJournalEntry(dbTx database.Tx, block *btcutil.Block) ([]spentTxOut, error) {
// Exclude the coinbase transaction since it can't spend anything. // Exclude the coinbase transaction since it can't spend anything.
spendBucket := dbTx.Metadata().Bucket(spendJournalBucketName) spendBucket := dbTx.Metadata().Bucket(spendJournalBucketName)
serialized := spendBucket.Get(block.Hash()[:]) serialized := spendBucket.Get(block.Hash()[:])
blockTxns := block.MsgBlock().Transactions[1:] blockTxns := block.MsgBlock().Transactions[1:]
stxos, err := deserializeSpendJournalEntry(serialized, blockTxns, view) stxos, err := deserializeSpendJournalEntry(serialized, blockTxns)
if err != nil { if err != nil {
// Ensure any deserialization errors are returned as database // Ensure any deserialization errors are returned as database
// corruption errors. // corruption errors.
@ -468,219 +465,161 @@ func dbRemoveSpendJournalEntry(dbTx database.Tx, blockHash *chainhash.Hash) erro
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// The unspent transaction output (utxo) set consists of an entry for each // The unspent transaction output (utxo) set consists of an entry for each
// transaction which contains a utxo serialized using a format that is highly // unspent output using a format that is optimized to reduce space using domain
// optimized to reduce space using domain specific compression algorithms. This // specific compression algorithms. This format is a slightly modified version
// format is a slightly modified version of the format used in Bitcoin Core. // of the format used in Bitcoin Core.
// //
// The serialized format is: // Each entry is keyed by an outpoint as specified below. It is important to
// note that the key encoding uses a VLQ, which employs an MSB encoding so
// iteration of utxos when doing byte-wise comparisons will produce them in
// order.
// //
// <version><height><header code><unspentness bitmap>[<compressed txouts>,...] // The serialized key format is:
// <hash><output index>
//
// Field Type Size
// hash chainhash.Hash chainhash.HashSize
// output index VLQ variable
//
// The serialized value format is:
//
// <header code><compressed txout>
// //
// Field Type Size // Field Type Size
// version VLQ variable
// block height VLQ variable
// header code VLQ variable // header code VLQ variable
// unspentness bitmap []byte variable // compressed txout
// compressed txouts
// compressed amount VLQ variable // compressed amount VLQ variable
// compressed script []byte variable // compressed script []byte variable
// //
// The serialized header code format is: // The serialized header code format is:
// bit 0 - containing transaction is a coinbase // bit 0 - containing transaction is a coinbase
// bit 1 - output zero is unspent // bits 1-x - height of the block that contains the unspent txout
// bit 2 - output one is unspent
// bits 3-x - number of bytes in unspentness bitmap. When both bits 1 and 2
// are unset, it encodes N-1 since there must be at least one unspent
// output.
//
// The rationale for the header code scheme is as follows:
// - Transactions which only pay to a single output and a change output are
// extremely common, thus an extra byte for the unspentness bitmap can be
// avoided for them by encoding those two outputs in the low order bits.
// - Given it is encoded as a VLQ which can encode values up to 127 with a
// single byte, that leaves 4 bits to represent the number of bytes in the
// unspentness bitmap while still only consuming a single byte for the
// header code. In other words, an unspentness bitmap with up to 120
// transaction outputs can be encoded with a single-byte header code.
// This covers the vast majority of transactions.
// - Encoding N-1 bytes when both bits 1 and 2 are unset allows an additional
// 8 outpoints to be encoded before causing the header code to require an
// additional byte.
// //
// Example 1: // Example 1:
// From tx in main blockchain: // From tx in main blockchain:
// Blk 1, 0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098 // Blk 1, 0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098:0
// //
// 010103320496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52 // 03320496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52
// <><><><------------------------------------------------------------------> // <><------------------------------------------------------------------>
// | | \--------\ | // | |
// | height | compressed txout 0 // header code compressed txout
// version header code
// //
// - version: 1 // - header code: 0x03 (coinbase, height 1)
// - height: 1 // - compressed txout:
// - header code: 0x03 (coinbase, output zero unspent, 0 bytes of unspentness)
// - unspentness: Nothing since it is zero bytes
// - compressed txout 0:
// - 0x32: VLQ-encoded compressed amount for 5000000000 (50 BTC) // - 0x32: VLQ-encoded compressed amount for 5000000000 (50 BTC)
// - 0x04: special script type pay-to-pubkey // - 0x04: special script type pay-to-pubkey
// - 0x96...52: x-coordinate of the pubkey // - 0x96...52: x-coordinate of the pubkey
// //
// Example 2: // Example 2:
// From tx in main blockchain: // From tx in main blockchain:
// Blk 113931, 4a16969aa4764dd7507fc1de7f0baa4850a246de90c45e59a3207f9a26b5036f // Blk 113931, 4a16969aa4764dd7507fc1de7f0baa4850a246de90c45e59a3207f9a26b5036f:2
// //
// 0185f90b0a011200e2ccd6ec7c6e2e581349c77e067385fa8236bf8a800900b8025be1b3efc63b0ad48e7f9f10e87544528d58 // 8cf316800900b8025be1b3efc63b0ad48e7f9f10e87544528d58
// <><----><><><------------------------------------------><--------------------------------------------> // <----><------------------------------------------>
// | | | \-------------------\ | | // | |
// version | \--------\ unspentness | compressed txout 2 // header code compressed txout
// height header code compressed txout 0
// //
// - version: 1 // - header code: 0x8cf316 (not coinbase, height 113931)
// - height: 113931 // - compressed txout:
// - header code: 0x0a (output zero unspent, 1 byte in unspentness bitmap)
// - unspentness: [0x01] (bit 0 is set, so output 0+2 = 2 is unspent)
// NOTE: It's +2 since the first two outputs are encoded in the header code
// - compressed txout 0:
// - 0x12: VLQ-encoded compressed amount for 20000000 (0.2 BTC)
// - 0x00: special script type pay-to-pubkey-hash
// - 0xe2...8a: pubkey hash
// - compressed txout 2:
// - 0x8009: VLQ-encoded compressed amount for 15000000 (0.15 BTC) // - 0x8009: VLQ-encoded compressed amount for 15000000 (0.15 BTC)
// - 0x00: special script type pay-to-pubkey-hash // - 0x00: special script type pay-to-pubkey-hash
// - 0xb8...58: pubkey hash // - 0xb8...58: pubkey hash
// //
// Example 3: // Example 3:
// From tx in main blockchain: // From tx in main blockchain:
// Blk 338156, 1b02d1c8cfef60a189017b9a420c682cf4a0028175f2f563209e4ff61c8c3620 // Blk 338156, 1b02d1c8cfef60a189017b9a420c682cf4a0028175f2f563209e4ff61c8c3620:22
// //
// 0193d06c100000108ba5b9e763011dd46a006572d820e448e12d2bbb38640bc718e6 // a8a2588ba5b9e763011dd46a006572d820e448e12d2bbb38640bc718e6
// <><----><><----><--------------------------------------------------> // <----><-------------------------------------------------->
// | | | \-----------------\ | // | |
// version | \--------\ unspentness | // header code compressed txout
// height header code compressed txout 22
// //
// - version: 1 // - header code: 0xa8a258 (not coinbase, height 338156)
// - height: 338156 // - compressed txout:
// - header code: 0x10 (2+1 = 3 bytes in unspentness bitmap)
// NOTE: It's +1 since neither bit 1 nor 2 are set, so N-1 is encoded.
// - unspentness: [0x00 0x00 0x10] (bit 20 is set, so output 20+2 = 22 is unspent)
// NOTE: It's +2 since the first two outputs are encoded in the header code
// - compressed txout 22:
// - 0x8ba5b9e763: VLQ-encoded compressed amount for 366875659 (3.66875659 BTC) // - 0x8ba5b9e763: VLQ-encoded compressed amount for 366875659 (3.66875659 BTC)
// - 0x01: special script type pay-to-script-hash // - 0x01: special script type pay-to-script-hash
// - 0x1d...e6: script hash // - 0x1d...e6: script hash
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// maxUint32VLQSerializeSize is the maximum number of bytes a max uint32 takes
// to serialize as a VLQ.
var maxUint32VLQSerializeSize = serializeSizeVLQ(1<<32 - 1)
// outpointKeyPool defines a concurrent safe free list of byte slices used to
// provide temporary buffers for outpoint database keys.
var outpointKeyPool = sync.Pool{
New: func() interface{} {
b := make([]byte, chainhash.HashSize+maxUint32VLQSerializeSize)
return &b // Pointer to slice to avoid boxing alloc.
},
}
// outpointKey returns a key suitable for use as a database key in the utxo set
// while making use of a free list. A new buffer is allocated if there are not
// already any available on the free list. The returned byte slice should be
// returned to the free list by using the recycleOutpointKey function when the
// caller is done with it _unless_ the slice will need to live for longer than
// the caller can calculate such as when used to write to the database.
func outpointKey(outpoint wire.OutPoint) *[]byte {
// A VLQ employs an MSB encoding, so they are useful not only to reduce
// the amount of storage space, but also so iteration of utxos when
// doing byte-wise comparisons will produce them in order.
key := outpointKeyPool.Get().(*[]byte)
idx := uint64(outpoint.Index)
*key = (*key)[:chainhash.HashSize+serializeSizeVLQ(idx)]
copy(*key, outpoint.Hash[:])
putVLQ((*key)[chainhash.HashSize:], idx)
return key
}
// recycleOutpointKey puts the provided byte slice, which should have been
// obtained via the outpointKey function, back on the free list.
func recycleOutpointKey(key *[]byte) {
outpointKeyPool.Put(key)
}
// utxoEntryHeaderCode returns the calculated header code to be used when // utxoEntryHeaderCode returns the calculated header code to be used when
// serializing the provided utxo entry and the number of bytes needed to encode // serializing the provided utxo entry.
// the unspentness bitmap. func utxoEntryHeaderCode(entry *UtxoEntry) (uint64, error) {
func utxoEntryHeaderCode(entry *UtxoEntry, highestOutputIndex uint32) (uint64, int, error) { if entry.IsSpent() {
// The first two outputs are encoded separately, so offset the index return 0, AssertError("attempt to serialize spent utxo header")
// accordingly to calculate the correct number of bytes needed to encode
// up to the highest unspent output index.
numBitmapBytes := int((highestOutputIndex + 6) / 8)
// As previously described, one less than the number of bytes is encoded
// when both output 0 and 1 are spent because there must be at least one
// unspent output. Adjust the number of bytes to encode accordingly and
// encode the value by shifting it over 3 bits.
output0Unspent := !entry.IsOutputSpent(0)
output1Unspent := !entry.IsOutputSpent(1)
var numBitmapBytesAdjustment int
if !output0Unspent && !output1Unspent {
if numBitmapBytes == 0 {
return 0, 0, AssertError("attempt to serialize utxo " +
"header for fully spent transaction")
}
numBitmapBytesAdjustment = 1
}
headerCode := uint64(numBitmapBytes-numBitmapBytesAdjustment) << 3
// Set the coinbase, output 0, and output 1 bits in the header code
// accordingly.
if entry.isCoinBase {
headerCode |= 0x01 // bit 0
}
if output0Unspent {
headerCode |= 0x02 // bit 1
}
if output1Unspent {
headerCode |= 0x04 // bit 2
} }
return headerCode, numBitmapBytes, nil // As described in the serialization format comments, the header code
// encodes the height shifted over one bit and the coinbase flag in the
// lowest bit.
headerCode := uint64(entry.BlockHeight()) << 1
if entry.IsCoinBase() {
headerCode |= 0x01
}
return headerCode, nil
} }
// serializeUtxoEntry returns the entry serialized to a format that is suitable // serializeUtxoEntry returns the entry serialized to a format that is suitable
// for long-term storage. The format is described in detail above. // for long-term storage. The format is described in detail above.
func serializeUtxoEntry(entry *UtxoEntry) ([]byte, error) { func serializeUtxoEntry(entry *UtxoEntry) ([]byte, error) {
// Fully spent entries have no serialization. // Spent outputs have no serialization.
if entry.IsFullySpent() { if entry.IsSpent() {
return nil, nil return nil, nil
} }
// Determine the output order by sorting the sparse output index keys. // Encode the header code.
outputOrder := make([]int, 0, len(entry.sparseOutputs)) headerCode, err := utxoEntryHeaderCode(entry)
for outputIndex := range entry.sparseOutputs {
outputOrder = append(outputOrder, int(outputIndex))
}
sort.Ints(outputOrder)
// Encode the header code and determine the number of bytes the
// unspentness bitmap needs.
highIndex := uint32(outputOrder[len(outputOrder)-1])
headerCode, numBitmapBytes, err := utxoEntryHeaderCode(entry, highIndex)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Calculate the size needed to serialize the entry. // Calculate the size needed to serialize the entry.
size := serializeSizeVLQ(uint64(entry.version)) + size := serializeSizeVLQ(headerCode) +
serializeSizeVLQ(uint64(entry.blockHeight)) + compressedTxOutSize(uint64(entry.Amount()), entry.PkScript())
serializeSizeVLQ(headerCode) + numBitmapBytes
for _, outputIndex := range outputOrder {
out := entry.sparseOutputs[uint32(outputIndex)]
if out.spent {
continue
}
size += compressedTxOutSize(uint64(out.amount), out.pkScript,
entry.version, out.compressed)
}
// Serialize the version, block height of the containing transaction, // Serialize the header code followed by the compressed unspent
// and header code. // transaction output.
serialized := make([]byte, size) serialized := make([]byte, size)
offset := putVLQ(serialized, uint64(entry.version)) offset := putVLQ(serialized, headerCode)
offset += putVLQ(serialized[offset:], uint64(entry.blockHeight)) offset += putCompressedTxOut(serialized[offset:], uint64(entry.Amount()),
offset += putVLQ(serialized[offset:], headerCode) entry.PkScript())
// Serialize the unspentness bitmap.
for i := uint32(0); i < uint32(numBitmapBytes); i++ {
unspentBits := byte(0)
for j := uint32(0); j < 8; j++ {
// The first 2 outputs are encoded via the header code,
// so adjust the output index accordingly.
if !entry.IsOutputSpent(2 + i*8 + j) {
unspentBits |= 1 << uint8(j)
}
}
serialized[offset] = unspentBits
offset++
}
// Serialize the compressed unspent transaction outputs. Outputs that
// are already compressed are serialized without modifications.
for _, outputIndex := range outputOrder {
out := entry.sparseOutputs[uint32(outputIndex)]
if out.spent {
continue
}
offset += putCompressedTxOut(serialized[offset:],
uint64(out.amount), out.pkScript, entry.version,
out.compressed)
}
return serialized, nil return serialized, nil
} }
@ -689,23 +628,8 @@ func serializeUtxoEntry(entry *UtxoEntry) ([]byte, error) {
// slice into a new UtxoEntry using a format that is suitable for long-term // slice into a new UtxoEntry using a format that is suitable for long-term
// storage. The format is described in detail above. // storage. The format is described in detail above.
func deserializeUtxoEntry(serialized []byte) (*UtxoEntry, error) { func deserializeUtxoEntry(serialized []byte) (*UtxoEntry, error) {
// Deserialize the version.
version, bytesRead := deserializeVLQ(serialized)
offset := bytesRead
if offset >= len(serialized) {
return nil, errDeserialize("unexpected end of data after version")
}
// Deserialize the block height.
blockHeight, bytesRead := deserializeVLQ(serialized[offset:])
offset += bytesRead
if offset >= len(serialized) {
return nil, errDeserialize("unexpected end of data after height")
}
// Deserialize the header code. // Deserialize the header code.
code, bytesRead := deserializeVLQ(serialized[offset:]) code, offset := deserializeVLQ(serialized)
offset += bytesRead
if offset >= len(serialized) { if offset >= len(serialized) {
return nil, errDeserialize("unexpected end of data after header") return nil, errDeserialize("unexpected end of data after header")
} }
@ -713,101 +637,83 @@ func deserializeUtxoEntry(serialized []byte) (*UtxoEntry, error) {
// Decode the header code. // Decode the header code.
// //
// Bit 0 indicates whether the containing transaction is a coinbase. // Bit 0 indicates whether the containing transaction is a coinbase.
// Bit 1 indicates output 0 is unspent. // Bits 1-x encode height of containing transaction.
// Bit 2 indicates output 1 is unspent.
// Bits 3-x encodes the number of non-zero unspentness bitmap bytes that
// follow. When both output 0 and 1 are spent, it encodes N-1.
isCoinBase := code&0x01 != 0 isCoinBase := code&0x01 != 0
output0Unspent := code&0x02 != 0 blockHeight := int32(code >> 1)
output1Unspent := code&0x04 != 0
numBitmapBytes := code >> 3 // Decode the compressed unspent transaction output.
if !output0Unspent && !output1Unspent { amount, pkScript, _, err := decodeCompressedTxOut(serialized[offset:])
numBitmapBytes++ if err != nil {
return nil, errDeserialize(fmt.Sprintf("unable to decode "+
"utxo: %v", err))
} }
// Ensure there are enough bytes left to deserialize the unspentness entry := &UtxoEntry{
// bitmap. amount: int64(amount),
if uint64(len(serialized[offset:])) < numBitmapBytes { pkScript: pkScript,
return nil, errDeserialize("unexpected end of data for " + blockHeight: blockHeight,
"unspentness bitmap") packedFlags: 0,
} }
if isCoinBase {
// Create a new utxo entry with the details deserialized above to house entry.packedFlags |= tfCoinBase
// all of the utxos.
entry := newUtxoEntry(int32(version), isCoinBase, int32(blockHeight))
// Add sparse output for unspent outputs 0 and 1 as needed based on the
// details provided by the header code.
var outputIndexes []uint32
if output0Unspent {
outputIndexes = append(outputIndexes, 0)
}
if output1Unspent {
outputIndexes = append(outputIndexes, 1)
}
// Decode the unspentness bitmap adding a sparse output for each unspent
// output.
for i := uint32(0); i < uint32(numBitmapBytes); i++ {
unspentBits := serialized[offset]
for j := uint32(0); j < 8; j++ {
if unspentBits&0x01 != 0 {
// The first 2 outputs are encoded via the
// header code, so adjust the output number
// accordingly.
outputNum := 2 + i*8 + j
outputIndexes = append(outputIndexes, outputNum)
}
unspentBits >>= 1
}
offset++
}
// Decode and add all of the utxos.
for i, outputIndex := range outputIndexes {
// Decode the next utxo. The script and amount fields of the
// utxo output are left compressed so decompression can be
// avoided on those that are not accessed. This is done since
// it is quite common for a redeeming transaction to only
// reference a single utxo from a referenced transaction.
compAmount, compScript, bytesRead, err := decodeCompressedTxOut(
serialized[offset:], int32(version))
if err != nil {
return nil, errDeserialize(fmt.Sprintf("unable to "+
"decode utxo at index %d: %v", i, err))
}
offset += bytesRead
entry.sparseOutputs[outputIndex] = &utxoOutput{
spent: false,
compressed: true,
pkScript: compScript,
amount: int64(compAmount),
}
} }
return entry, nil return entry, nil
} }
// dbFetchUtxoEntry uses an existing database transaction to fetch all unspent // dbFetchUtxoEntryByHash attempts to find and fetch a utxo for the given hash.
// outputs for the provided Bitcoin transaction hash from the utxo set. // It uses a cursor and seek to try and do this as efficiently as possible.
// //
// When there is no entry for the provided hash, nil will be returned for the // When there are no entries for the provided hash, nil will be returned for the
// both the entry and the error. // both the entry and the error.
func dbFetchUtxoEntry(dbTx database.Tx, hash *chainhash.Hash) (*UtxoEntry, error) { func dbFetchUtxoEntryByHash(dbTx database.Tx, hash *chainhash.Hash) (*UtxoEntry, error) {
// Attempt to find an entry by seeking for the hash along with a zero
// index. Due to the fact the keys are serialized as <hash><index>,
// where the index uses an MSB encoding, if there are any entries for
// the hash at all, one will be found.
cursor := dbTx.Metadata().Bucket(utxoSetBucketName).Cursor()
key := outpointKey(wire.OutPoint{Hash: *hash, Index: 0})
ok := cursor.Seek(*key)
recycleOutpointKey(key)
if !ok {
return nil, nil
}
// An entry was found, but it could just be an entry with the next
// highest hash after the requested one, so make sure the hashes
// actually match.
cursorKey := cursor.Key()
if len(cursorKey) < chainhash.HashSize {
return nil, nil
}
if !bytes.Equal(hash[:], cursorKey[:chainhash.HashSize]) {
return nil, nil
}
return deserializeUtxoEntry(cursor.Value())
}
// dbFetchUtxoEntry uses an existing database transaction to fetch the specified
// transaction output from the utxo set.
//
// When there is no entry for the provided output, nil will be returned for both
// the entry and the error.
func dbFetchUtxoEntry(dbTx database.Tx, outpoint wire.OutPoint) (*UtxoEntry, error) {
// Fetch the unspent transaction output information for the passed // Fetch the unspent transaction output information for the passed
// transaction hash. Return now when there is no entry. // transaction output. Return now when there is no entry.
key := outpointKey(outpoint)
utxoBucket := dbTx.Metadata().Bucket(utxoSetBucketName) utxoBucket := dbTx.Metadata().Bucket(utxoSetBucketName)
serializedUtxo := utxoBucket.Get(hash[:]) serializedUtxo := utxoBucket.Get(*key)
recycleOutpointKey(key)
if serializedUtxo == nil { if serializedUtxo == nil {
return nil, nil return nil, nil
} }
// A non-nil zero-length entry means there is an entry in the database // A non-nil zero-length entry means there is an entry in the database
// for a fully spent transaction which should never be the case. // for a spent transaction output which should never be the case.
if len(serializedUtxo) == 0 { if len(serializedUtxo) == 0 {
return nil, AssertError(fmt.Sprintf("database contains entry "+ return nil, AssertError(fmt.Sprintf("database contains entry "+
"for fully spent tx %v", hash)) "for spent tx output %v", outpoint))
} }
// Deserialize the utxo entry and return it. // Deserialize the utxo entry and return it.
@ -819,7 +725,7 @@ func dbFetchUtxoEntry(dbTx database.Tx, hash *chainhash.Hash) (*UtxoEntry, error
return nil, database.Error{ return nil, database.Error{
ErrorCode: database.ErrCorruption, ErrorCode: database.ErrCorruption,
Description: fmt.Sprintf("corrupt utxo entry "+ Description: fmt.Sprintf("corrupt utxo entry "+
"for %v: %v", hash, err), "for %v: %v", outpoint, err),
} }
} }
@ -835,36 +741,35 @@ func dbFetchUtxoEntry(dbTx database.Tx, hash *chainhash.Hash) (*UtxoEntry, error
// to the database. // to the database.
func dbPutUtxoView(dbTx database.Tx, view *UtxoViewpoint) error { func dbPutUtxoView(dbTx database.Tx, view *UtxoViewpoint) error {
utxoBucket := dbTx.Metadata().Bucket(utxoSetBucketName) utxoBucket := dbTx.Metadata().Bucket(utxoSetBucketName)
for txHashIter, entry := range view.entries { for outpoint, entry := range view.entries {
// No need to update the database if the entry was not modified. // No need to update the database if the entry was not modified.
if entry == nil || !entry.modified { if entry == nil || !entry.isModified() {
continue continue
} }
// Serialize the utxo entry without any entries that have been // Remove the utxo entry if it is spent.
// spent. if entry.IsSpent() {
serialized, err := serializeUtxoEntry(entry) key := outpointKey(outpoint)
if err != nil { err := utxoBucket.Delete(*key)
return err recycleOutpointKey(key)
} if err != nil {
// Make a copy of the hash because the iterator changes on each
// loop iteration and thus slicing it directly would cause the
// data to change out from under the put/delete funcs below.
txHash := txHashIter
// Remove the utxo entry if it is now fully spent.
if serialized == nil {
if err := utxoBucket.Delete(txHash[:]); err != nil {
return err return err
} }
continue continue
} }
// At this point the utxo entry is not fully spent, so store its // Serialize and store the utxo entry.
// serialization in the database. serialized, err := serializeUtxoEntry(entry)
err = utxoBucket.Put(txHash[:], serialized) if err != nil {
return err
}
key := outpointKey(outpoint)
err = utxoBucket.Put(*key, serialized)
// NOTE: The key is intentionally not recycled here since the
// database interface contract prohibits modifications. It will
// be garbage collected normally when the database is done with
// it.
if err != nil { if err != nil {
return err return err
} }
@ -1111,19 +1016,31 @@ func (b *BlockChain) createChainState() error {
return err return err
} }
// Create the bucket that houses the spend journal data. // Create the bucket that houses the spend journal data and
// store its version.
_, err = meta.CreateBucket(spendJournalBucketName) _, err = meta.CreateBucket(spendJournalBucketName)
if err != nil { if err != nil {
return err return err
} }
err = dbPutVersion(dbTx, utxoSetVersionKeyName,
latestUtxoSetBucketVersion)
if err != nil {
return err
}
// Create the bucket that houses the utxo set. Note that the // Create the bucket that houses the utxo set and store its
// genesis block coinbase transaction is intentionally not // version. Note that the genesis block coinbase transaction is
// inserted here since it is not spendable by consensus rules. // intentionally not inserted here since it is not spendable by
// consensus rules.
_, err = meta.CreateBucket(utxoSetBucketName) _, err = meta.CreateBucket(utxoSetBucketName)
if err != nil { if err != nil {
return err return err
} }
err = dbPutVersion(dbTx, spendJournalVersionKeyName,
latestSpendJournalBucketVersion)
if err != nil {
return err
}
// Save the genesis block to the block index database. // Save the genesis block to the block index database.
err = dbStoreBlockNode(dbTx, node) err = dbStoreBlockNode(dbTx, node)

View file

@ -11,7 +11,6 @@ import (
"reflect" "reflect"
"testing" "testing"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database" "github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcd/wire" "github.com/btcsuite/btcd/wire"
) )
@ -38,19 +37,6 @@ func TestErrNotInMainChain(t *testing.T) {
} }
} }
// maybeDecompress decompresses the amount and public key script fields of the
// stxo and marks it decompressed if needed.
func (o *spentTxOut) maybeDecompress(version int32) {
// Nothing to do if it's not compressed.
if !o.compressed {
return
}
o.amount = int64(decompressTxOutAmount(uint64(o.amount)))
o.pkScript = decompressScript(o.pkScript, version)
o.compressed = false
}
// TestStxoSerialization ensures serializing and deserializing spent transaction // TestStxoSerialization ensures serializing and deserializing spent transaction
// output entries works as expected. // output entries works as expected.
func TestStxoSerialization(t *testing.T) { func TestStxoSerialization(t *testing.T) {
@ -59,7 +45,6 @@ func TestStxoSerialization(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
stxo spentTxOut stxo spentTxOut
txVersion int32 // When the txout is not fully spent.
serialized []byte serialized []byte
}{ }{
// From block 170 in main blockchain. // From block 170 in main blockchain.
@ -70,9 +55,8 @@ func TestStxoSerialization(t *testing.T) {
pkScript: hexToBytes("410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac"), pkScript: hexToBytes("410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac"),
isCoinBase: true, isCoinBase: true,
height: 9, height: 9,
version: 1,
}, },
serialized: hexToBytes("1301320511db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5c"), serialized: hexToBytes("1300320511db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5c"),
}, },
// Adapted from block 100025 in main blockchain. // Adapted from block 100025 in main blockchain.
{ {
@ -82,19 +66,16 @@ func TestStxoSerialization(t *testing.T) {
pkScript: hexToBytes("76a914b2fb57eadf61e106a100a7445a8c3f67898841ec88ac"), pkScript: hexToBytes("76a914b2fb57eadf61e106a100a7445a8c3f67898841ec88ac"),
isCoinBase: false, isCoinBase: false,
height: 100024, height: 100024,
version: 1,
}, },
serialized: hexToBytes("8b99700186c64700b2fb57eadf61e106a100a7445a8c3f67898841ec"), serialized: hexToBytes("8b99700086c64700b2fb57eadf61e106a100a7445a8c3f67898841ec"),
}, },
// Adapted from block 100025 in main blockchain. // Adapted from block 100025 in main blockchain.
{ {
name: "Does not spend last output", name: "Does not spend last output, legacy format",
stxo: spentTxOut{ stxo: spentTxOut{
amount: 34405000000, amount: 34405000000,
pkScript: hexToBytes("76a9146edbc6c4d31bae9f1ccc38538a114bf42de65e8688ac"), pkScript: hexToBytes("76a9146edbc6c4d31bae9f1ccc38538a114bf42de65e8688ac"),
version: 1,
}, },
txVersion: 1,
serialized: hexToBytes("0091f20f006edbc6c4d31bae9f1ccc38538a114bf42de65e86"), serialized: hexToBytes("0091f20f006edbc6c4d31bae9f1ccc38538a114bf42de65e86"),
}, },
} }
@ -130,14 +111,12 @@ func TestStxoSerialization(t *testing.T) {
// Ensure the serialized bytes are decoded back to the expected // Ensure the serialized bytes are decoded back to the expected
// stxo. // stxo.
var gotStxo spentTxOut var gotStxo spentTxOut
gotBytesRead, err := decodeSpentTxOut(test.serialized, &gotStxo, gotBytesRead, err := decodeSpentTxOut(test.serialized, &gotStxo)
test.txVersion)
if err != nil { if err != nil {
t.Errorf("decodeSpentTxOut (%s): unexpected error: %v", t.Errorf("decodeSpentTxOut (%s): unexpected error: %v",
test.name, err) test.name, err)
continue continue
} }
gotStxo.maybeDecompress(test.stxo.version)
if !reflect.DeepEqual(gotStxo, test.stxo) { if !reflect.DeepEqual(gotStxo, test.stxo) {
t.Errorf("decodeSpentTxOut (%s) mismatched entries - "+ t.Errorf("decodeSpentTxOut (%s) mismatched entries - "+
"got %v, want %v", test.name, gotStxo, test.stxo) "got %v, want %v", test.name, gotStxo, test.stxo)
@ -160,7 +139,6 @@ func TestStxoDecodeErrors(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
stxo spentTxOut stxo spentTxOut
txVersion int32 // When the txout is not fully spent.
serialized []byte serialized []byte
bytesRead int // Expected number of bytes read. bytesRead int // Expected number of bytes read.
errType error errType error
@ -173,39 +151,30 @@ func TestStxoDecodeErrors(t *testing.T) {
bytesRead: 0, bytesRead: 0,
}, },
{ {
name: "no data after header code w/o version", name: "no data after header code w/o reserved",
stxo: spentTxOut{}, stxo: spentTxOut{},
serialized: hexToBytes("00"), serialized: hexToBytes("00"),
errType: errDeserialize(""), errType: errDeserialize(""),
bytesRead: 1, bytesRead: 1,
}, },
{ {
name: "no data after header code with version", name: "no data after header code with reserved",
stxo: spentTxOut{}, stxo: spentTxOut{},
serialized: hexToBytes("13"), serialized: hexToBytes("13"),
errType: errDeserialize(""), errType: errDeserialize(""),
bytesRead: 1, bytesRead: 1,
}, },
{ {
name: "no data after version", name: "no data after reserved",
stxo: spentTxOut{}, stxo: spentTxOut{},
serialized: hexToBytes("1301"), serialized: hexToBytes("1300"),
errType: errDeserialize(""), errType: errDeserialize(""),
bytesRead: 2, bytesRead: 2,
}, },
{
name: "no serialized tx version and passed -1",
stxo: spentTxOut{},
txVersion: -1,
serialized: hexToBytes("003205"),
errType: AssertError(""),
bytesRead: 1,
},
{ {
name: "incomplete compressed txout", name: "incomplete compressed txout",
stxo: spentTxOut{}, stxo: spentTxOut{},
txVersion: 1, serialized: hexToBytes("1332"),
serialized: hexToBytes("0032"),
errType: errDeserialize(""), errType: errDeserialize(""),
bytesRead: 2, bytesRead: 2,
}, },
@ -214,7 +183,7 @@ func TestStxoDecodeErrors(t *testing.T) {
for _, test := range tests { for _, test := range tests {
// Ensure the expected error type is returned. // Ensure the expected error type is returned.
gotBytesRead, err := decodeSpentTxOut(test.serialized, gotBytesRead, err := decodeSpentTxOut(test.serialized,
&test.stxo, test.txVersion) &test.stxo)
if reflect.TypeOf(err) != reflect.TypeOf(test.errType) { if reflect.TypeOf(err) != reflect.TypeOf(test.errType) {
t.Errorf("decodeSpentTxOut (%s): expected error type "+ t.Errorf("decodeSpentTxOut (%s): expected error type "+
"does not match - got %T, want %T", test.name, "does not match - got %T, want %T", test.name,
@ -241,7 +210,6 @@ func TestSpendJournalSerialization(t *testing.T) {
name string name string
entry []spentTxOut entry []spentTxOut
blockTxns []*wire.MsgTx blockTxns []*wire.MsgTx
utxoView *UtxoViewpoint
serialized []byte serialized []byte
}{ }{
// From block 2 in main blockchain. // From block 2 in main blockchain.
@ -249,7 +217,6 @@ func TestSpendJournalSerialization(t *testing.T) {
name: "No spends", name: "No spends",
entry: nil, entry: nil,
blockTxns: nil, blockTxns: nil,
utxoView: NewUtxoViewpoint(),
serialized: nil, serialized: nil,
}, },
// From block 170 in main blockchain. // From block 170 in main blockchain.
@ -260,7 +227,6 @@ func TestSpendJournalSerialization(t *testing.T) {
pkScript: hexToBytes("410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac"), pkScript: hexToBytes("410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac"),
isCoinBase: true, isCoinBase: true,
height: 9, height: 9,
version: 1,
}}, }},
blockTxns: []*wire.MsgTx{{ // Coinbase omitted. blockTxns: []*wire.MsgTx{{ // Coinbase omitted.
Version: 1, Version: 1,
@ -281,22 +247,21 @@ func TestSpendJournalSerialization(t *testing.T) {
}}, }},
LockTime: 0, LockTime: 0,
}}, }},
utxoView: NewUtxoViewpoint(), serialized: hexToBytes("1300320511db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5c"),
serialized: hexToBytes("1301320511db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5c"),
}, },
// Adapted from block 100025 in main blockchain. // Adapted from block 100025 in main blockchain.
{ {
name: "Two txns when one spends last output, one doesn't", name: "Two txns when one spends last output, one doesn't",
entry: []spentTxOut{{ entry: []spentTxOut{{
amount: 34405000000, amount: 34405000000,
pkScript: hexToBytes("76a9146edbc6c4d31bae9f1ccc38538a114bf42de65e8688ac"), pkScript: hexToBytes("76a9146edbc6c4d31bae9f1ccc38538a114bf42de65e8688ac"),
version: 1, isCoinBase: false,
height: 100024,
}, { }, {
amount: 13761000000, amount: 13761000000,
pkScript: hexToBytes("76a914b2fb57eadf61e106a100a7445a8c3f67898841ec88ac"), pkScript: hexToBytes("76a914b2fb57eadf61e106a100a7445a8c3f67898841ec88ac"),
isCoinBase: false, isCoinBase: false,
height: 100024, height: 100024,
version: 1,
}}, }},
blockTxns: []*wire.MsgTx{{ // Coinbase omitted. blockTxns: []*wire.MsgTx{{ // Coinbase omitted.
Version: 1, Version: 1,
@ -335,73 +300,7 @@ func TestSpendJournalSerialization(t *testing.T) {
}}, }},
LockTime: 0, LockTime: 0,
}}, }},
utxoView: &UtxoViewpoint{entries: map[chainhash.Hash]*UtxoEntry{ serialized: hexToBytes("8b99700086c64700b2fb57eadf61e106a100a7445a8c3f67898841ec8b99700091f20f006edbc6c4d31bae9f1ccc38538a114bf42de65e86"),
*newHashFromStr("c0ed017828e59ad5ed3cf70ee7c6fb0f426433047462477dc7a5d470f987a537"): {
version: 1,
isCoinBase: false,
blockHeight: 100024,
sparseOutputs: map[uint32]*utxoOutput{
1: {
amount: 34405000000,
pkScript: hexToBytes("76a9142084541c3931677527a7eafe56fd90207c344eb088ac"),
},
},
},
}},
serialized: hexToBytes("8b99700186c64700b2fb57eadf61e106a100a7445a8c3f67898841ec0091f20f006edbc6c4d31bae9f1ccc38538a114bf42de65e86"),
},
// Hand crafted.
{
name: "One tx, two inputs from same tx, neither spend last output",
entry: []spentTxOut{{
amount: 165125632,
pkScript: hexToBytes("51"),
version: 1,
}, {
amount: 154370000,
pkScript: hexToBytes("51"),
version: 1,
}},
blockTxns: []*wire.MsgTx{{ // Coinbase omitted.
Version: 1,
TxIn: []*wire.TxIn{{
PreviousOutPoint: wire.OutPoint{
Hash: *newHashFromStr("c0ed017828e59ad5ed3cf70ee7c6fb0f426433047462477dc7a5d470f987a537"),
Index: 1,
},
SignatureScript: hexToBytes(""),
Sequence: 0xffffffff,
}, {
PreviousOutPoint: wire.OutPoint{
Hash: *newHashFromStr("c0ed017828e59ad5ed3cf70ee7c6fb0f426433047462477dc7a5d470f987a537"),
Index: 2,
},
SignatureScript: hexToBytes(""),
Sequence: 0xffffffff,
}},
TxOut: []*wire.TxOut{{
Value: 165125632,
PkScript: hexToBytes("51"),
}, {
Value: 154370000,
PkScript: hexToBytes("51"),
}},
LockTime: 0,
}},
utxoView: &UtxoViewpoint{entries: map[chainhash.Hash]*UtxoEntry{
*newHashFromStr("c0ed017828e59ad5ed3cf70ee7c6fb0f426433047462477dc7a5d470f987a537"): {
version: 1,
isCoinBase: false,
blockHeight: 100000,
sparseOutputs: map[uint32]*utxoOutput{
0: {
amount: 165712179,
pkScript: hexToBytes("51"),
},
},
},
}},
serialized: hexToBytes("0087bc3707510084c3d19a790751"),
}, },
} }
@ -417,16 +316,12 @@ func TestSpendJournalSerialization(t *testing.T) {
// Deserialize to a spend journal entry. // Deserialize to a spend journal entry.
gotEntry, err := deserializeSpendJournalEntry(test.serialized, gotEntry, err := deserializeSpendJournalEntry(test.serialized,
test.blockTxns, test.utxoView) test.blockTxns)
if err != nil { if err != nil {
t.Errorf("deserializeSpendJournalEntry #%d (%s) "+ t.Errorf("deserializeSpendJournalEntry #%d (%s) "+
"unexpected error: %v", i, test.name, err) "unexpected error: %v", i, test.name, err)
continue continue
} }
for stxoIdx := range gotEntry {
stxo := &gotEntry[stxoIdx]
stxo.maybeDecompress(test.entry[stxoIdx].version)
}
// Ensure that the deserialized spend journal entry has the // Ensure that the deserialized spend journal entry has the
// correct properties. // correct properties.
@ -447,7 +342,6 @@ func TestSpendJournalErrors(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
blockTxns []*wire.MsgTx blockTxns []*wire.MsgTx
utxoView *UtxoViewpoint
serialized []byte serialized []byte
errType error errType error
}{ }{
@ -466,7 +360,6 @@ func TestSpendJournalErrors(t *testing.T) {
}}, }},
LockTime: 0, LockTime: 0,
}}, }},
utxoView: NewUtxoViewpoint(),
serialized: hexToBytes(""), serialized: hexToBytes(""),
errType: AssertError(""), errType: AssertError(""),
}, },
@ -484,7 +377,6 @@ func TestSpendJournalErrors(t *testing.T) {
}}, }},
LockTime: 0, LockTime: 0,
}}, }},
utxoView: NewUtxoViewpoint(),
serialized: hexToBytes("1301320511db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a"), serialized: hexToBytes("1301320511db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a"),
errType: errDeserialize(""), errType: errDeserialize(""),
}, },
@ -494,7 +386,7 @@ func TestSpendJournalErrors(t *testing.T) {
// Ensure the expected error type is returned and the returned // Ensure the expected error type is returned and the returned
// slice is nil. // slice is nil.
stxos, err := deserializeSpendJournalEntry(test.serialized, stxos, err := deserializeSpendJournalEntry(test.serialized,
test.blockTxns, test.utxoView) test.blockTxns)
if reflect.TypeOf(err) != reflect.TypeOf(test.errType) { if reflect.TypeOf(err) != reflect.TypeOf(test.errType) {
t.Errorf("deserializeSpendJournalEntry (%s): expected "+ t.Errorf("deserializeSpendJournalEntry (%s): expected "+
"error type does not match - got %T, want %T", "error type does not match - got %T, want %T",
@ -521,186 +413,52 @@ func TestUtxoSerialization(t *testing.T) {
serialized []byte serialized []byte
}{ }{
// From tx in main blockchain: // From tx in main blockchain:
// 0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098 // 0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098:0
{ {
name: "Only output 0, coinbase", name: "height 1, coinbase",
entry: &UtxoEntry{ entry: &UtxoEntry{
version: 1, amount: 5000000000,
isCoinBase: true, pkScript: hexToBytes("410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858eeac"),
blockHeight: 1, blockHeight: 1,
sparseOutputs: map[uint32]*utxoOutput{ packedFlags: tfCoinBase,
0: {
amount: 5000000000,
pkScript: hexToBytes("410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858eeac"),
},
},
}, },
serialized: hexToBytes("010103320496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52"), serialized: hexToBytes("03320496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52"),
}, },
// From tx in main blockchain: // From tx in main blockchain:
// 8131ffb0a2c945ecaf9b9063e59558784f9c3a74741ce6ae2a18d0571dac15bb // 0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098:0
{ {
name: "Only output 1, not coinbase", name: "height 1, coinbase, spent",
entry: &UtxoEntry{ entry: &UtxoEntry{
version: 1, amount: 5000000000,
isCoinBase: false, pkScript: hexToBytes("410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858eeac"),
blockHeight: 100001,
sparseOutputs: map[uint32]*utxoOutput{
1: {
amount: 1000000,
pkScript: hexToBytes("76a914ee8bd501094a7d5ca318da2506de35e1cb025ddc88ac"),
},
},
},
serialized: hexToBytes("01858c21040700ee8bd501094a7d5ca318da2506de35e1cb025ddc"),
},
// Adapted from tx in main blockchain:
// df3f3f442d9699857f7f49de4ff0b5d0f3448bec31cdc7b5bf6d25f2abd637d5
{
name: "Only output 2, coinbase",
entry: &UtxoEntry{
version: 1,
isCoinBase: true,
blockHeight: 99004,
sparseOutputs: map[uint32]*utxoOutput{
2: {
amount: 100937281,
pkScript: hexToBytes("76a914da33f77cee27c2a975ed5124d7e4f7f97513510188ac"),
},
},
},
serialized: hexToBytes("0185843c010182b095bf4100da33f77cee27c2a975ed5124d7e4f7f975135101"),
},
// Adapted from tx in main blockchain:
// 4a16969aa4764dd7507fc1de7f0baa4850a246de90c45e59a3207f9a26b5036f
{
name: "outputs 0 and 2 not coinbase",
entry: &UtxoEntry{
version: 1,
isCoinBase: false,
blockHeight: 113931,
sparseOutputs: map[uint32]*utxoOutput{
0: {
amount: 20000000,
pkScript: hexToBytes("76a914e2ccd6ec7c6e2e581349c77e067385fa8236bf8a88ac"),
},
2: {
amount: 15000000,
pkScript: hexToBytes("76a914b8025be1b3efc63b0ad48e7f9f10e87544528d5888ac"),
},
},
},
serialized: hexToBytes("0185f90b0a011200e2ccd6ec7c6e2e581349c77e067385fa8236bf8a800900b8025be1b3efc63b0ad48e7f9f10e87544528d58"),
},
// Adapted from tx in main blockchain:
// 4a16969aa4764dd7507fc1de7f0baa4850a246de90c45e59a3207f9a26b5036f
{
name: "outputs 0 and 2, not coinbase, 1 marked spent",
entry: &UtxoEntry{
version: 1,
isCoinBase: false,
blockHeight: 113931,
sparseOutputs: map[uint32]*utxoOutput{
0: {
amount: 20000000,
pkScript: hexToBytes("76a914e2ccd6ec7c6e2e581349c77e067385fa8236bf8a88ac"),
},
1: { // This won't be serialized.
spent: true,
amount: 1000000,
pkScript: hexToBytes("76a914e43031c3e46f20bf1ccee9553ce815de5a48467588ac"),
},
2: {
amount: 15000000,
pkScript: hexToBytes("76a914b8025be1b3efc63b0ad48e7f9f10e87544528d5888ac"),
},
},
},
serialized: hexToBytes("0185f90b0a011200e2ccd6ec7c6e2e581349c77e067385fa8236bf8a800900b8025be1b3efc63b0ad48e7f9f10e87544528d58"),
},
// Adapted from tx in main blockchain:
// 4a16969aa4764dd7507fc1de7f0baa4850a246de90c45e59a3207f9a26b5036f
{
name: "outputs 0 and 2, not coinbase, output 2 compressed",
entry: &UtxoEntry{
version: 1,
isCoinBase: false,
blockHeight: 113931,
sparseOutputs: map[uint32]*utxoOutput{
0: {
amount: 20000000,
pkScript: hexToBytes("76a914e2ccd6ec7c6e2e581349c77e067385fa8236bf8a88ac"),
},
2: {
// Uncompressed Amount: 15000000
// Uncompressed PkScript: 76a914b8025be1b3efc63b0ad48e7f9f10e87544528d5888ac
compressed: true,
amount: 137,
pkScript: hexToBytes("00b8025be1b3efc63b0ad48e7f9f10e87544528d58"),
},
},
},
serialized: hexToBytes("0185f90b0a011200e2ccd6ec7c6e2e581349c77e067385fa8236bf8a800900b8025be1b3efc63b0ad48e7f9f10e87544528d58"),
},
// Adapted from tx in main blockchain:
// 4a16969aa4764dd7507fc1de7f0baa4850a246de90c45e59a3207f9a26b5036f
{
name: "outputs 0 and 2, not coinbase, output 2 compressed, packed indexes reversed",
entry: &UtxoEntry{
version: 1,
isCoinBase: false,
blockHeight: 113931,
sparseOutputs: map[uint32]*utxoOutput{
0: {
amount: 20000000,
pkScript: hexToBytes("76a914e2ccd6ec7c6e2e581349c77e067385fa8236bf8a88ac"),
},
2: {
// Uncompressed Amount: 15000000
// Uncompressed PkScript: 76a914b8025be1b3efc63b0ad48e7f9f10e87544528d5888ac
compressed: true,
amount: 137,
pkScript: hexToBytes("00b8025be1b3efc63b0ad48e7f9f10e87544528d58"),
},
},
},
serialized: hexToBytes("0185f90b0a011200e2ccd6ec7c6e2e581349c77e067385fa8236bf8a800900b8025be1b3efc63b0ad48e7f9f10e87544528d58"),
},
// From tx in main blockchain:
// 0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098
{
name: "Only output 0, coinbase, fully spent",
entry: &UtxoEntry{
version: 1,
isCoinBase: true,
blockHeight: 1, blockHeight: 1,
sparseOutputs: map[uint32]*utxoOutput{ packedFlags: tfCoinBase | tfSpent,
0: {
spent: true,
amount: 5000000000,
pkScript: hexToBytes("410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858eeac"),
},
},
}, },
serialized: nil, serialized: nil,
}, },
// Adapted from tx in main blockchain: // From tx in main blockchain:
// 1b02d1c8cfef60a189017b9a420c682cf4a0028175f2f563209e4ff61c8c3620 // 8131ffb0a2c945ecaf9b9063e59558784f9c3a74741ce6ae2a18d0571dac15bb:1
{ {
name: "Only output 22, not coinbase", name: "height 100001, not coinbase",
entry: &UtxoEntry{ entry: &UtxoEntry{
version: 1, amount: 1000000,
isCoinBase: false, pkScript: hexToBytes("76a914ee8bd501094a7d5ca318da2506de35e1cb025ddc88ac"),
blockHeight: 338156, blockHeight: 100001,
sparseOutputs: map[uint32]*utxoOutput{ packedFlags: 0,
22: {
spent: false,
amount: 366875659,
pkScript: hexToBytes("a9141dd46a006572d820e448e12d2bbb38640bc718e687"),
},
},
}, },
serialized: hexToBytes("0193d06c100000108ba5b9e763011dd46a006572d820e448e12d2bbb38640bc718e6"), serialized: hexToBytes("8b99420700ee8bd501094a7d5ca318da2506de35e1cb025ddc"),
},
// From tx in main blockchain:
// 8131ffb0a2c945ecaf9b9063e59558784f9c3a74741ce6ae2a18d0571dac15bb:1
{
name: "height 100001, not coinbase, spent",
entry: &UtxoEntry{
amount: 1000000,
pkScript: hexToBytes("76a914ee8bd501094a7d5ca318da2506de35e1cb025ddc88ac"),
blockHeight: 100001,
packedFlags: tfSpent,
},
serialized: nil,
}, },
} }
@ -719,9 +477,9 @@ func TestUtxoSerialization(t *testing.T) {
continue continue
} }
// Don't try to deserialize if the test entry was fully spent // Don't try to deserialize if the test entry was spent since it
// since it will have a nil serialization. // will have a nil serialization.
if test.entry.IsFullySpent() { if test.entry.IsSpent() {
continue continue
} }
@ -733,12 +491,33 @@ func TestUtxoSerialization(t *testing.T) {
continue continue
} }
// Ensure that the deserialized utxo entry has the same // The deserialized entry must not be marked spent since unspent
// properties for the containing transaction and block height. // entries are not serialized.
if utxoEntry.Version() != test.entry.Version() { if utxoEntry.IsSpent() {
t.Errorf("deserializeUtxoEntry #%d (%s) output should "+
"not be marked spent", i, test.name)
continue
}
// Ensure the deserialized entry has the same properties as the
// ones in the test entry.
if utxoEntry.Amount() != test.entry.Amount() {
t.Errorf("deserializeUtxoEntry #%d (%s) mismatched "+ t.Errorf("deserializeUtxoEntry #%d (%s) mismatched "+
"version: got %d, want %d", i, test.name, "amounts: got %d, want %d", i, test.name,
utxoEntry.Version(), test.entry.Version()) utxoEntry.Amount(), test.entry.Amount())
continue
}
if !bytes.Equal(utxoEntry.PkScript(), test.entry.PkScript()) {
t.Errorf("deserializeUtxoEntry #%d (%s) mismatched "+
"scripts: got %x, want %x", i, test.name,
utxoEntry.PkScript(), test.entry.PkScript())
continue
}
if utxoEntry.BlockHeight() != test.entry.BlockHeight() {
t.Errorf("deserializeUtxoEntry #%d (%s) mismatched "+
"block height: got %d, want %d", i, test.name,
utxoEntry.BlockHeight(), test.entry.BlockHeight())
continue continue
} }
if utxoEntry.IsCoinBase() != test.entry.IsCoinBase() { if utxoEntry.IsCoinBase() != test.entry.IsCoinBase() {
@ -747,71 +526,6 @@ func TestUtxoSerialization(t *testing.T) {
utxoEntry.IsCoinBase(), test.entry.IsCoinBase()) utxoEntry.IsCoinBase(), test.entry.IsCoinBase())
continue continue
} }
if utxoEntry.BlockHeight() != test.entry.BlockHeight() {
t.Errorf("deserializeUtxoEntry #%d (%s) mismatched "+
"block height: got %d, want %d", i, test.name,
utxoEntry.BlockHeight(),
test.entry.BlockHeight())
continue
}
if utxoEntry.IsFullySpent() != test.entry.IsFullySpent() {
t.Errorf("deserializeUtxoEntry #%d (%s) mismatched "+
"fully spent: got %v, want %v", i, test.name,
utxoEntry.IsFullySpent(),
test.entry.IsFullySpent())
continue
}
// Ensure all of the outputs in the test entry match the
// spentness of the output in the deserialized entry and the
// deserialized entry does not contain any additional utxos.
var numUnspent int
for outputIndex := range test.entry.sparseOutputs {
gotSpent := utxoEntry.IsOutputSpent(outputIndex)
wantSpent := test.entry.IsOutputSpent(outputIndex)
if !wantSpent {
numUnspent++
}
if gotSpent != wantSpent {
t.Errorf("deserializeUtxoEntry #%d (%s) output "+
"#%d: mismatched spent: got %v, want "+
"%v", i, test.name, outputIndex,
gotSpent, wantSpent)
continue
}
}
if len(utxoEntry.sparseOutputs) != numUnspent {
t.Errorf("deserializeUtxoEntry #%d (%s): mismatched "+
"number of unspent outputs: got %d, want %d", i,
test.name, len(utxoEntry.sparseOutputs),
numUnspent)
continue
}
// Ensure all of the amounts and scripts of the utxos in the
// deserialized entry match the ones in the test entry.
for outputIndex := range utxoEntry.sparseOutputs {
gotAmount := utxoEntry.AmountByIndex(outputIndex)
wantAmount := test.entry.AmountByIndex(outputIndex)
if gotAmount != wantAmount {
t.Errorf("deserializeUtxoEntry #%d (%s) "+
"output #%d: mismatched amounts: got "+
"%d, want %d", i, test.name,
outputIndex, gotAmount, wantAmount)
continue
}
gotPkScript := utxoEntry.PkScriptByIndex(outputIndex)
wantPkScript := test.entry.PkScriptByIndex(outputIndex)
if !bytes.Equal(gotPkScript, wantPkScript) {
t.Errorf("deserializeUtxoEntry #%d (%s) "+
"output #%d mismatched scripts: got "+
"%x, want %x", i, test.name,
outputIndex, gotPkScript, wantPkScript)
continue
}
}
} }
} }
@ -821,23 +535,21 @@ func TestUtxoEntryHeaderCodeErrors(t *testing.T) {
t.Parallel() t.Parallel()
tests := []struct { tests := []struct {
name string name string
entry *UtxoEntry entry *UtxoEntry
code uint64 code uint64
bytesRead int // Expected number of bytes read. errType error
errType error
}{ }{
{ {
name: "Force assertion due to fully spent tx", name: "Force assertion due to spent output",
entry: &UtxoEntry{}, entry: &UtxoEntry{packedFlags: tfSpent},
errType: AssertError(""), errType: AssertError(""),
bytesRead: 0,
}, },
} }
for _, test := range tests { for _, test := range tests {
// Ensure the expected error type is returned and the code is 0. // Ensure the expected error type is returned and the code is 0.
code, gotBytesRead, err := utxoEntryHeaderCode(test.entry, 0) code, err := utxoEntryHeaderCode(test.entry)
if reflect.TypeOf(err) != reflect.TypeOf(test.errType) { if reflect.TypeOf(err) != reflect.TypeOf(test.errType) {
t.Errorf("utxoEntryHeaderCode (%s): expected error "+ t.Errorf("utxoEntryHeaderCode (%s): expected error "+
"type does not match - got %T, want %T", "type does not match - got %T, want %T",
@ -849,14 +561,6 @@ func TestUtxoEntryHeaderCodeErrors(t *testing.T) {
"on error - got %d, want 0", test.name, code) "on error - got %d, want 0", test.name, code)
continue continue
} }
// Ensure the expected number of bytes read is returned.
if gotBytesRead != test.bytesRead {
t.Errorf("utxoEntryHeaderCode (%s): unexpected number "+
"of bytes read - got %d, want %d", test.name,
gotBytesRead, test.bytesRead)
continue
}
} }
} }
@ -870,29 +574,14 @@ func TestUtxoEntryDeserializeErrors(t *testing.T) {
serialized []byte serialized []byte
errType error errType error
}{ }{
{
name: "no data after version",
serialized: hexToBytes("01"),
errType: errDeserialize(""),
},
{
name: "no data after block height",
serialized: hexToBytes("0101"),
errType: errDeserialize(""),
},
{ {
name: "no data after header code", name: "no data after header code",
serialized: hexToBytes("010102"), serialized: hexToBytes("02"),
errType: errDeserialize(""),
},
{
name: "not enough bytes for unspentness bitmap",
serialized: hexToBytes("01017800"),
errType: errDeserialize(""), errType: errDeserialize(""),
}, },
{ {
name: "incomplete compressed txout", name: "incomplete compressed txout",
serialized: hexToBytes("01010232"), serialized: hexToBytes("0232"),
errType: errDeserialize(""), errType: errDeserialize(""),
}, },
} }

View file

@ -190,10 +190,10 @@ func chainSetup(dbName string, params *chaincfg.Params) (*BlockChain, func(), er
// loadUtxoView returns a utxo view loaded from a file. // loadUtxoView returns a utxo view loaded from a file.
func loadUtxoView(filename string) (*UtxoViewpoint, error) { func loadUtxoView(filename string) (*UtxoViewpoint, error) {
// The utxostore file format is: // The utxostore file format is:
// <tx hash><serialized utxo len><serialized utxo> // <tx hash><output index><serialized utxo len><serialized utxo>
// //
// The serialized utxo len is a little endian uint32 and the serialized // The output index and serialized utxo len are little endian uint32s
// utxo uses the format described in chainio.go. // and the serialized utxo uses the format described in chainio.go.
filename = filepath.Join("testdata", filename) filename = filepath.Join("testdata", filename)
fi, err := os.Open(filename) fi, err := os.Open(filename)
@ -223,7 +223,14 @@ func loadUtxoView(filename string) (*UtxoViewpoint, error) {
return nil, err return nil, err
} }
// Num of serialize utxo entry bytes. // Output index of the utxo entry.
var index uint32
err = binary.Read(r, binary.LittleEndian, &index)
if err != nil {
return nil, err
}
// Num of serialized utxo entry bytes.
var numBytes uint32 var numBytes uint32
err = binary.Read(r, binary.LittleEndian, &numBytes) err = binary.Read(r, binary.LittleEndian, &numBytes)
if err != nil { if err != nil {
@ -238,16 +245,98 @@ func loadUtxoView(filename string) (*UtxoViewpoint, error) {
} }
// Deserialize it and add it to the view. // Deserialize it and add it to the view.
utxoEntry, err := deserializeUtxoEntry(serialized) entry, err := deserializeUtxoEntry(serialized)
if err != nil { if err != nil {
return nil, err return nil, err
} }
view.Entries()[hash] = utxoEntry view.Entries()[wire.OutPoint{Hash: hash, Index: index}] = entry
} }
return view, nil return view, nil
} }
// convertUtxoStore reads a utxostore from the legacy format and writes it back
// out using the latest format. It is only useful for converting utxostore data
// used in the tests, which has already been done. However, the code is left
// available for future reference.
func convertUtxoStore(r io.Reader, w io.Writer) error {
// The old utxostore file format was:
// <tx hash><serialized utxo len><serialized utxo>
//
// The serialized utxo len was a little endian uint32 and the serialized
// utxo uses the format described in upgrade.go.
littleEndian := binary.LittleEndian
for {
// Hash of the utxo entry.
var hash chainhash.Hash
_, err := io.ReadAtLeast(r, hash[:], len(hash[:]))
if err != nil {
// Expected EOF at the right offset.
if err == io.EOF {
break
}
return err
}
// Num of serialized utxo entry bytes.
var numBytes uint32
err = binary.Read(r, littleEndian, &numBytes)
if err != nil {
return err
}
// Serialized utxo entry.
serialized := make([]byte, numBytes)
_, err = io.ReadAtLeast(r, serialized, int(numBytes))
if err != nil {
return err
}
// Deserialize the entry.
entries, err := deserializeUtxoEntryV0(serialized)
if err != nil {
return err
}
// Loop through all of the utxos and write them out in the new
// format.
for outputIdx, entry := range entries {
// Reserialize the entries using the new format.
serialized, err := serializeUtxoEntry(entry)
if err != nil {
return err
}
// Write the hash of the utxo entry.
_, err = w.Write(hash[:])
if err != nil {
return err
}
// Write the output index of the utxo entry.
err = binary.Write(w, littleEndian, outputIdx)
if err != nil {
return err
}
// Write num of serialized utxo entry bytes.
err = binary.Write(w, littleEndian, uint32(len(serialized)))
if err != nil {
return err
}
// Write the serialized utxo.
_, err = w.Write(serialized)
if err != nil {
return err
}
}
}
return nil
}
// TstSetCoinbaseMaturity makes the ability to set the coinbase maturity // TstSetCoinbaseMaturity makes the ability to set the coinbase maturity
// available when running tests. // available when running tests.
func (b *BlockChain) TstSetCoinbaseMaturity(maturity uint16) { func (b *BlockChain) TstSetCoinbaseMaturity(maturity uint16) {

View file

@ -241,7 +241,7 @@ func isPubKey(script []byte) (bool, []byte) {
// compressedScriptSize returns the number of bytes the passed script would take // compressedScriptSize returns the number of bytes the passed script would take
// when encoded with the domain specific compression algorithm described above. // when encoded with the domain specific compression algorithm described above.
func compressedScriptSize(pkScript []byte, version int32) int { func compressedScriptSize(pkScript []byte) int {
// Pay-to-pubkey-hash script. // Pay-to-pubkey-hash script.
if valid, _ := isPubKeyHash(pkScript); valid { if valid, _ := isPubKeyHash(pkScript); valid {
return 21 return 21
@ -268,7 +268,7 @@ func compressedScriptSize(pkScript []byte, version int32) int {
// script, possibly followed by other data, and returns the number of bytes it // script, possibly followed by other data, and returns the number of bytes it
// occupies taking into account the special encoding of the script size by the // occupies taking into account the special encoding of the script size by the
// domain specific compression algorithm described above. // domain specific compression algorithm described above.
func decodeCompressedScriptSize(serialized []byte, version int32) int { func decodeCompressedScriptSize(serialized []byte) int {
scriptSize, bytesRead := deserializeVLQ(serialized) scriptSize, bytesRead := deserializeVLQ(serialized)
if bytesRead == 0 { if bytesRead == 0 {
return 0 return 0
@ -296,7 +296,7 @@ func decodeCompressedScriptSize(serialized []byte, version int32) int {
// target byte slice. The target byte slice must be at least large enough to // target byte slice. The target byte slice must be at least large enough to
// handle the number of bytes returned by the compressedScriptSize function or // handle the number of bytes returned by the compressedScriptSize function or
// it will panic. // it will panic.
func putCompressedScript(target, pkScript []byte, version int32) int { func putCompressedScript(target, pkScript []byte) int {
// Pay-to-pubkey-hash script. // Pay-to-pubkey-hash script.
if valid, hash := isPubKeyHash(pkScript); valid { if valid, hash := isPubKeyHash(pkScript); valid {
target[0] = cstPayToPubKeyHash target[0] = cstPayToPubKeyHash
@ -344,7 +344,7 @@ func putCompressedScript(target, pkScript []byte, version int32) int {
// NOTE: The script parameter must already have been proven to be long enough // NOTE: The script parameter must already have been proven to be long enough
// to contain the number of bytes returned by decodeCompressedScriptSize or it // to contain the number of bytes returned by decodeCompressedScriptSize or it
// will panic. This is acceptable since it is only an internal function. // will panic. This is acceptable since it is only an internal function.
func decompressScript(compressedPkScript []byte, version int32) []byte { func decompressScript(compressedPkScript []byte) []byte {
// In practice this function will not be called with a zero-length or // In practice this function will not be called with a zero-length or
// nil script since the nil script encoding includes the length, however // nil script since the nil script encoding includes the length, however
// the code below assumes the length exists, so just return nil now if // the code below assumes the length exists, so just return nil now if
@ -542,43 +542,27 @@ func decompressTxOutAmount(amount uint64) uint64 {
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// compressedTxOutSize returns the number of bytes the passed transaction output // compressedTxOutSize returns the number of bytes the passed transaction output
// fields would take when encoded with the format described above. The // fields would take when encoded with the format described above.
// preCompressed flag indicates the provided amount and script are already func compressedTxOutSize(amount uint64, pkScript []byte) int {
// compressed. This is useful since loaded utxo entries are not decompressed
// until the output is accessed.
func compressedTxOutSize(amount uint64, pkScript []byte, version int32, preCompressed bool) int {
if preCompressed {
return serializeSizeVLQ(amount) + len(pkScript)
}
return serializeSizeVLQ(compressTxOutAmount(amount)) + return serializeSizeVLQ(compressTxOutAmount(amount)) +
compressedScriptSize(pkScript, version) compressedScriptSize(pkScript)
} }
// putCompressedTxOut potentially compresses the passed amount and script // putCompressedTxOut compresses the passed amount and script according to their
// according to their domain specific compression algorithms and encodes them // domain specific compression algorithms and encodes them directly into the
// directly into the passed target byte slice with the format described above. // passed target byte slice with the format described above. The target byte
// The preCompressed flag indicates the provided amount and script are already // slice must be at least large enough to handle the number of bytes returned by
// compressed in which case the values are not modified. This is useful since // the compressedTxOutSize function or it will panic.
// loaded utxo entries are not decompressed until the output is accessed. The func putCompressedTxOut(target []byte, amount uint64, pkScript []byte) int {
// target byte slice must be at least large enough to handle the number of bytes
// returned by the compressedTxOutSize function or it will panic.
func putCompressedTxOut(target []byte, amount uint64, pkScript []byte, version int32, preCompressed bool) int {
if preCompressed {
offset := putVLQ(target, amount)
copy(target[offset:], pkScript)
return offset + len(pkScript)
}
offset := putVLQ(target, compressTxOutAmount(amount)) offset := putVLQ(target, compressTxOutAmount(amount))
offset += putCompressedScript(target[offset:], pkScript, version) offset += putCompressedScript(target[offset:], pkScript)
return offset return offset
} }
// decodeCompressedTxOut decodes the passed compressed txout, possibly followed // decodeCompressedTxOut decodes the passed compressed txout, possibly followed
// by other data, into its compressed amount and compressed script and returns // by other data, into its uncompressed amount and script and returns them along
// them along with the number of bytes they occupied. // with the number of bytes they occupied prior to decompression.
func decodeCompressedTxOut(serialized []byte, version int32) (uint64, []byte, int, error) { func decodeCompressedTxOut(serialized []byte) (uint64, []byte, int, error) {
// Deserialize the compressed amount and ensure there are bytes // Deserialize the compressed amount and ensure there are bytes
// remaining for the compressed script. // remaining for the compressed script.
compressedAmount, bytesRead := deserializeVLQ(serialized) compressedAmount, bytesRead := deserializeVLQ(serialized)
@ -589,15 +573,14 @@ func decodeCompressedTxOut(serialized []byte, version int32) (uint64, []byte, in
// Decode the compressed script size and ensure there are enough bytes // Decode the compressed script size and ensure there are enough bytes
// left in the slice for it. // left in the slice for it.
scriptSize := decodeCompressedScriptSize(serialized[bytesRead:], version) scriptSize := decodeCompressedScriptSize(serialized[bytesRead:])
if len(serialized[bytesRead:]) < scriptSize { if len(serialized[bytesRead:]) < scriptSize {
return 0, nil, bytesRead, errDeserialize("unexpected end of " + return 0, nil, bytesRead, errDeserialize("unexpected end of " +
"data after script size") "data after script size")
} }
// Make a copy of the compressed script so the original serialized data // Decompress and return the amount and script.
// can be released as soon as possible. amount := decompressTxOutAmount(compressedAmount)
compressedScript := make([]byte, scriptSize) script := decompressScript(serialized[bytesRead : bytesRead+scriptSize])
copy(compressedScript, serialized[bytesRead:bytesRead+scriptSize]) return amount, script, bytesRead + scriptSize, nil
return compressedAmount, compressedScript, bytesRead + scriptSize, nil
} }

View file

@ -109,79 +109,66 @@ func TestScriptCompression(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
version int32
uncompressed []byte uncompressed []byte
compressed []byte compressed []byte
}{ }{
{ {
name: "nil", name: "nil",
version: 1,
uncompressed: nil, uncompressed: nil,
compressed: hexToBytes("06"), compressed: hexToBytes("06"),
}, },
{ {
name: "pay-to-pubkey-hash 1", name: "pay-to-pubkey-hash 1",
version: 1,
uncompressed: hexToBytes("76a9141018853670f9f3b0582c5b9ee8ce93764ac32b9388ac"), uncompressed: hexToBytes("76a9141018853670f9f3b0582c5b9ee8ce93764ac32b9388ac"),
compressed: hexToBytes("001018853670f9f3b0582c5b9ee8ce93764ac32b93"), compressed: hexToBytes("001018853670f9f3b0582c5b9ee8ce93764ac32b93"),
}, },
{ {
name: "pay-to-pubkey-hash 2", name: "pay-to-pubkey-hash 2",
version: 1,
uncompressed: hexToBytes("76a914e34cce70c86373273efcc54ce7d2a491bb4a0e8488ac"), uncompressed: hexToBytes("76a914e34cce70c86373273efcc54ce7d2a491bb4a0e8488ac"),
compressed: hexToBytes("00e34cce70c86373273efcc54ce7d2a491bb4a0e84"), compressed: hexToBytes("00e34cce70c86373273efcc54ce7d2a491bb4a0e84"),
}, },
{ {
name: "pay-to-script-hash 1", name: "pay-to-script-hash 1",
version: 1,
uncompressed: hexToBytes("a914da1745e9b549bd0bfa1a569971c77eba30cd5a4b87"), uncompressed: hexToBytes("a914da1745e9b549bd0bfa1a569971c77eba30cd5a4b87"),
compressed: hexToBytes("01da1745e9b549bd0bfa1a569971c77eba30cd5a4b"), compressed: hexToBytes("01da1745e9b549bd0bfa1a569971c77eba30cd5a4b"),
}, },
{ {
name: "pay-to-script-hash 2", name: "pay-to-script-hash 2",
version: 1,
uncompressed: hexToBytes("a914f815b036d9bbbce5e9f2a00abd1bf3dc91e9551087"), uncompressed: hexToBytes("a914f815b036d9bbbce5e9f2a00abd1bf3dc91e9551087"),
compressed: hexToBytes("01f815b036d9bbbce5e9f2a00abd1bf3dc91e95510"), compressed: hexToBytes("01f815b036d9bbbce5e9f2a00abd1bf3dc91e95510"),
}, },
{ {
name: "pay-to-pubkey compressed 0x02", name: "pay-to-pubkey compressed 0x02",
version: 1,
uncompressed: hexToBytes("2102192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b4ac"), uncompressed: hexToBytes("2102192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b4ac"),
compressed: hexToBytes("02192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b4"), compressed: hexToBytes("02192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b4"),
}, },
{ {
name: "pay-to-pubkey compressed 0x03", name: "pay-to-pubkey compressed 0x03",
version: 1,
uncompressed: hexToBytes("2103b0bd634234abbb1ba1e986e884185c61cf43e001f9137f23c2c409273eb16e65ac"), uncompressed: hexToBytes("2103b0bd634234abbb1ba1e986e884185c61cf43e001f9137f23c2c409273eb16e65ac"),
compressed: hexToBytes("03b0bd634234abbb1ba1e986e884185c61cf43e001f9137f23c2c409273eb16e65"), compressed: hexToBytes("03b0bd634234abbb1ba1e986e884185c61cf43e001f9137f23c2c409273eb16e65"),
}, },
{ {
name: "pay-to-pubkey uncompressed 0x04 even", name: "pay-to-pubkey uncompressed 0x04 even",
version: 1,
uncompressed: hexToBytes("4104192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b40d45264838c0bd96852662ce6a847b197376830160c6d2eb5e6a4c44d33f453eac"), uncompressed: hexToBytes("4104192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b40d45264838c0bd96852662ce6a847b197376830160c6d2eb5e6a4c44d33f453eac"),
compressed: hexToBytes("04192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b4"), compressed: hexToBytes("04192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b4"),
}, },
{ {
name: "pay-to-pubkey uncompressed 0x04 odd", name: "pay-to-pubkey uncompressed 0x04 odd",
version: 1,
uncompressed: hexToBytes("410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac"), uncompressed: hexToBytes("410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac"),
compressed: hexToBytes("0511db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5c"), compressed: hexToBytes("0511db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5c"),
}, },
{ {
name: "pay-to-pubkey invalid pubkey", name: "pay-to-pubkey invalid pubkey",
version: 1,
uncompressed: hexToBytes("3302aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac"), uncompressed: hexToBytes("3302aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac"),
compressed: hexToBytes("293302aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac"), compressed: hexToBytes("293302aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac"),
}, },
{ {
name: "null data", name: "null data",
version: 1,
uncompressed: hexToBytes("6a200102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"), uncompressed: hexToBytes("6a200102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"),
compressed: hexToBytes("286a200102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"), compressed: hexToBytes("286a200102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"),
}, },
{ {
name: "requires 2 size bytes - data push 200 bytes", name: "requires 2 size bytes - data push 200 bytes",
version: 1,
uncompressed: append(hexToBytes("4cc8"), bytes.Repeat([]byte{0x00}, 200)...), uncompressed: append(hexToBytes("4cc8"), bytes.Repeat([]byte{0x00}, 200)...),
// [0x80, 0x50] = 208 as a variable length quantity // [0x80, 0x50] = 208 as a variable length quantity
// [0x4c, 0xc8] = OP_PUSHDATA1 200 // [0x4c, 0xc8] = OP_PUSHDATA1 200
@ -192,7 +179,7 @@ func TestScriptCompression(t *testing.T) {
for _, test := range tests { for _, test := range tests {
// Ensure the function to calculate the serialized size without // Ensure the function to calculate the serialized size without
// actually serializing the value is calculated properly. // actually serializing the value is calculated properly.
gotSize := compressedScriptSize(test.uncompressed, test.version) gotSize := compressedScriptSize(test.uncompressed)
if gotSize != len(test.compressed) { if gotSize != len(test.compressed) {
t.Errorf("compressedScriptSize (%s): did not get "+ t.Errorf("compressedScriptSize (%s): did not get "+
"expected size - got %d, want %d", test.name, "expected size - got %d, want %d", test.name,
@ -203,7 +190,7 @@ func TestScriptCompression(t *testing.T) {
// Ensure the script compresses to the expected bytes. // Ensure the script compresses to the expected bytes.
gotCompressed := make([]byte, gotSize) gotCompressed := make([]byte, gotSize)
gotBytesWritten := putCompressedScript(gotCompressed, gotBytesWritten := putCompressedScript(gotCompressed,
test.uncompressed, test.version) test.uncompressed)
if !bytes.Equal(gotCompressed, test.compressed) { if !bytes.Equal(gotCompressed, test.compressed) {
t.Errorf("putCompressedScript (%s): did not get "+ t.Errorf("putCompressedScript (%s): did not get "+
"expected bytes - got %x, want %x", test.name, "expected bytes - got %x, want %x", test.name,
@ -220,8 +207,7 @@ func TestScriptCompression(t *testing.T) {
// Ensure the compressed script size is properly decoded from // Ensure the compressed script size is properly decoded from
// the compressed script. // the compressed script.
gotDecodedSize := decodeCompressedScriptSize(test.compressed, gotDecodedSize := decodeCompressedScriptSize(test.compressed)
test.version)
if gotDecodedSize != len(test.compressed) { if gotDecodedSize != len(test.compressed) {
t.Errorf("decodeCompressedScriptSize (%s): did not get "+ t.Errorf("decodeCompressedScriptSize (%s): did not get "+
"expected size - got %d, want %d", test.name, "expected size - got %d, want %d", test.name,
@ -230,7 +216,7 @@ func TestScriptCompression(t *testing.T) {
} }
// Ensure the script decompresses to the expected bytes. // Ensure the script decompresses to the expected bytes.
gotDecompressed := decompressScript(test.compressed, test.version) gotDecompressed := decompressScript(test.compressed)
if !bytes.Equal(gotDecompressed, test.uncompressed) { if !bytes.Equal(gotDecompressed, test.uncompressed) {
t.Errorf("decompressScript (%s): did not get expected "+ t.Errorf("decompressScript (%s): did not get expected "+
"bytes - got %x, want %x", test.name, "bytes - got %x, want %x", test.name,
@ -246,13 +232,13 @@ func TestScriptCompressionErrors(t *testing.T) {
t.Parallel() t.Parallel()
// A nil script must result in a decoded size of 0. // A nil script must result in a decoded size of 0.
if gotSize := decodeCompressedScriptSize(nil, 1); gotSize != 0 { if gotSize := decodeCompressedScriptSize(nil); gotSize != 0 {
t.Fatalf("decodeCompressedScriptSize with nil script did not "+ t.Fatalf("decodeCompressedScriptSize with nil script did not "+
"return 0 - got %d", gotSize) "return 0 - got %d", gotSize)
} }
// A nil script must result in a nil decompressed script. // A nil script must result in a nil decompressed script.
if gotScript := decompressScript(nil, 1); gotScript != nil { if gotScript := decompressScript(nil); gotScript != nil {
t.Fatalf("decompressScript with nil script did not return nil "+ t.Fatalf("decompressScript with nil script did not return nil "+
"decompressed script - got %x", gotScript) "decompressed script - got %x", gotScript)
} }
@ -261,7 +247,7 @@ func TestScriptCompressionErrors(t *testing.T) {
// in an invalid pubkey must result in a nil decompressed script. // in an invalid pubkey must result in a nil decompressed script.
compressedScript := hexToBytes("04012d74d0cb94344c9569c2e77901573d8d" + compressedScript := hexToBytes("04012d74d0cb94344c9569c2e77901573d8d" +
"7903c3ebec3a957724895dca52c6b4") "7903c3ebec3a957724895dca52c6b4")
if gotScript := decompressScript(compressedScript, 1); gotScript != nil { if gotScript := decompressScript(compressedScript); gotScript != nil {
t.Fatalf("decompressScript with compressed pay-to-"+ t.Fatalf("decompressScript with compressed pay-to-"+
"uncompressed-pubkey that is invalid did not return "+ "uncompressed-pubkey that is invalid did not return "+
"nil decompressed script - got %x", gotScript) "nil decompressed script - got %x", gotScript)
@ -352,48 +338,35 @@ func TestCompressedTxOut(t *testing.T) {
t.Parallel() t.Parallel()
tests := []struct { tests := []struct {
name string name string
amount uint64 amount uint64
compAmount uint64 pkScript []byte
pkScript []byte compressed []byte
compPkScript []byte
version int32
compressed []byte
}{ }{
{ {
name: "nulldata with 0 BTC", name: "nulldata with 0 BTC",
amount: 0, amount: 0,
compAmount: 0, pkScript: hexToBytes("6a200102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"),
pkScript: hexToBytes("6a200102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"), compressed: hexToBytes("00286a200102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"),
compPkScript: hexToBytes("286a200102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"),
version: 1,
compressed: hexToBytes("00286a200102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"),
}, },
{ {
name: "pay-to-pubkey-hash dust", name: "pay-to-pubkey-hash dust",
amount: 546, amount: 546,
compAmount: 4911, pkScript: hexToBytes("76a9141018853670f9f3b0582c5b9ee8ce93764ac32b9388ac"),
pkScript: hexToBytes("76a9141018853670f9f3b0582c5b9ee8ce93764ac32b9388ac"), compressed: hexToBytes("a52f001018853670f9f3b0582c5b9ee8ce93764ac32b93"),
compPkScript: hexToBytes("001018853670f9f3b0582c5b9ee8ce93764ac32b93"),
version: 1,
compressed: hexToBytes("a52f001018853670f9f3b0582c5b9ee8ce93764ac32b93"),
}, },
{ {
name: "pay-to-pubkey uncompressed 1 BTC", name: "pay-to-pubkey uncompressed 1 BTC",
amount: 100000000, amount: 100000000,
compAmount: 9, pkScript: hexToBytes("4104192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b40d45264838c0bd96852662ce6a847b197376830160c6d2eb5e6a4c44d33f453eac"),
pkScript: hexToBytes("4104192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b40d45264838c0bd96852662ce6a847b197376830160c6d2eb5e6a4c44d33f453eac"), compressed: hexToBytes("0904192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b4"),
compPkScript: hexToBytes("04192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b4"),
version: 1,
compressed: hexToBytes("0904192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b4"),
}, },
} }
for _, test := range tests { for _, test := range tests {
// Ensure the function to calculate the serialized size without // Ensure the function to calculate the serialized size without
// actually serializing the txout is calculated properly. // actually serializing the txout is calculated properly.
gotSize := compressedTxOutSize(test.amount, test.pkScript, gotSize := compressedTxOutSize(test.amount, test.pkScript)
test.version, false)
if gotSize != len(test.compressed) { if gotSize != len(test.compressed) {
t.Errorf("compressedTxOutSize (%s): did not get "+ t.Errorf("compressedTxOutSize (%s): did not get "+
"expected size - got %d, want %d", test.name, "expected size - got %d, want %d", test.name,
@ -404,7 +377,7 @@ func TestCompressedTxOut(t *testing.T) {
// Ensure the txout compresses to the expected value. // Ensure the txout compresses to the expected value.
gotCompressed := make([]byte, gotSize) gotCompressed := make([]byte, gotSize)
gotBytesWritten := putCompressedTxOut(gotCompressed, gotBytesWritten := putCompressedTxOut(gotCompressed,
test.amount, test.pkScript, test.version, false) test.amount, test.pkScript)
if !bytes.Equal(gotCompressed, test.compressed) { if !bytes.Equal(gotCompressed, test.compressed) {
t.Errorf("compressTxOut (%s): did not get expected "+ t.Errorf("compressTxOut (%s): did not get expected "+
"bytes - got %x, want %x", test.name, "bytes - got %x, want %x", test.name,
@ -420,24 +393,24 @@ func TestCompressedTxOut(t *testing.T) {
} }
// Ensure the serialized bytes are decoded back to the expected // Ensure the serialized bytes are decoded back to the expected
// compressed values. // uncompressed values.
gotAmount, gotScript, gotBytesRead, err := decodeCompressedTxOut( gotAmount, gotScript, gotBytesRead, err := decodeCompressedTxOut(
test.compressed, test.version) test.compressed)
if err != nil { if err != nil {
t.Errorf("decodeCompressedTxOut (%s): unexpected "+ t.Errorf("decodeCompressedTxOut (%s): unexpected "+
"error: %v", test.name, err) "error: %v", test.name, err)
continue continue
} }
if gotAmount != test.compAmount { if gotAmount != test.amount {
t.Errorf("decodeCompressedTxOut (%s): did not get "+ t.Errorf("decodeCompressedTxOut (%s): did not get "+
"expected amount - got %d, want %d", "expected amount - got %d, want %d",
test.name, gotAmount, test.compAmount) test.name, gotAmount, test.amount)
continue continue
} }
if !bytes.Equal(gotScript, test.compPkScript) { if !bytes.Equal(gotScript, test.pkScript) {
t.Errorf("decodeCompressedTxOut (%s): did not get "+ t.Errorf("decodeCompressedTxOut (%s): did not get "+
"expected script - got %x, want %x", "expected script - got %x, want %x",
test.name, gotScript, test.compPkScript) test.name, gotScript, test.pkScript)
continue continue
} }
if gotBytesRead != len(test.compressed) { if gotBytesRead != len(test.compressed) {
@ -446,23 +419,6 @@ func TestCompressedTxOut(t *testing.T) {
test.name, gotBytesRead, len(test.compressed)) test.name, gotBytesRead, len(test.compressed))
continue continue
} }
// Ensure the compressed values decompress to the expected
// txout.
gotAmount = decompressTxOutAmount(gotAmount)
if gotAmount != test.amount {
t.Errorf("decompressTxOut (%s): did not get expected "+
"value - got %d, want %d", test.name, gotAmount,
test.amount)
continue
}
gotScript = decompressScript(gotScript, test.version)
if !bytes.Equal(gotScript, test.pkScript) {
t.Errorf("decompressTxOut (%s): did not get expected "+
"script - got %x, want %x", test.name,
gotScript, test.pkScript)
continue
}
} }
} }
@ -473,7 +429,7 @@ func TestTxOutCompressionErrors(t *testing.T) {
// A compressed txout with missing compressed script must error. // A compressed txout with missing compressed script must error.
compressedTxOut := hexToBytes("00") compressedTxOut := hexToBytes("00")
_, _, _, err := decodeCompressedTxOut(compressedTxOut, 1) _, _, _, err := decodeCompressedTxOut(compressedTxOut)
if !isDeserializeErr(err) { if !isDeserializeErr(err) {
t.Fatalf("decodeCompressedTxOut with missing compressed script "+ t.Fatalf("decodeCompressedTxOut with missing compressed script "+
"did not return expected error type - got %T, want "+ "did not return expected error type - got %T, want "+
@ -482,7 +438,7 @@ func TestTxOutCompressionErrors(t *testing.T) {
// A compressed txout with short compressed script must error. // A compressed txout with short compressed script must error.
compressedTxOut = hexToBytes("0010") compressedTxOut = hexToBytes("0010")
_, _, _, err = decodeCompressedTxOut(compressedTxOut, 1) _, _, _, err = decodeCompressedTxOut(compressedTxOut)
if !isDeserializeErr(err) { if !isDeserializeErr(err) {
t.Fatalf("decodeCompressedTxOut with short compressed script "+ t.Fatalf("decodeCompressedTxOut with short compressed script "+
"did not return expected error type - got %T, want "+ "did not return expected error type - got %T, want "+

View file

@ -703,14 +703,12 @@ func (idx *AddrIndex) indexBlock(data writeIndexData, block *btcutil.Block, view
// The view should always have the input since // The view should always have the input since
// the index contract requires it, however, be // the index contract requires it, however, be
// safe and simply ignore any missing entries. // safe and simply ignore any missing entries.
origin := &txIn.PreviousOutPoint entry := view.LookupEntry(txIn.PreviousOutPoint)
entry := view.LookupEntry(&origin.Hash)
if entry == nil { if entry == nil {
continue continue
} }
pkScript := entry.PkScriptByIndex(origin.Index) idx.indexPkScript(data, entry.PkScript(), txIdx)
idx.indexPkScript(data, pkScript, txIdx)
} }
} }
@ -872,15 +870,14 @@ func (idx *AddrIndex) AddUnconfirmedTx(tx *btcutil.Tx, utxoView *blockchain.Utxo
// transaction has already been validated and thus all inputs are // transaction has already been validated and thus all inputs are
// already known to exist. // already known to exist.
for _, txIn := range tx.MsgTx().TxIn { for _, txIn := range tx.MsgTx().TxIn {
entry := utxoView.LookupEntry(&txIn.PreviousOutPoint.Hash) entry := utxoView.LookupEntry(txIn.PreviousOutPoint)
if entry == nil { if entry == nil {
// Ignore missing entries. This should never happen // Ignore missing entries. This should never happen
// in practice since the function comments specifically // in practice since the function comments specifically
// call out all inputs must be available. // call out all inputs must be available.
continue continue
} }
pkScript := entry.PkScriptByIndex(txIn.PreviousOutPoint.Index) idx.indexUnconfirmedAddresses(entry.PkScript(), tx)
idx.indexUnconfirmedAddresses(pkScript, tx)
} }
// Index addresses of all created outputs. // Index addresses of all created outputs.

View file

@ -55,31 +55,16 @@ out:
for { for {
select { select {
case txVI := <-v.validateChan: case txVI := <-v.validateChan:
// Ensure the referenced input transaction is available. // Ensure the referenced input utxo is available.
txIn := txVI.txIn txIn := txVI.txIn
originTxHash := &txIn.PreviousOutPoint.Hash utxo := v.utxoView.LookupEntry(txIn.PreviousOutPoint)
originTxIndex := txIn.PreviousOutPoint.Index if utxo == nil {
txEntry := v.utxoView.LookupEntry(originTxHash)
if txEntry == nil {
str := fmt.Sprintf("unable to find input "+
"transaction %v referenced from "+
"transaction %v", originTxHash,
txVI.tx.Hash())
err := ruleError(ErrMissingTxOut, str)
v.sendResult(err)
break out
}
// Ensure the referenced input transaction public key
// script is available.
pkScript := txEntry.PkScriptByIndex(originTxIndex)
if pkScript == nil {
str := fmt.Sprintf("unable to find unspent "+ str := fmt.Sprintf("unable to find unspent "+
"output %v script referenced from "+ "output %v referenced from "+
"transaction %s:%d", "transaction %s:%d",
txIn.PreviousOutPoint, txVI.tx.Hash(), txIn.PreviousOutPoint, txVI.tx.Hash(),
txVI.txInIndex) txVI.txInIndex)
err := ruleError(ErrBadTxInput, str) err := ruleError(ErrMissingTxOut, str)
v.sendResult(err) v.sendResult(err)
break out break out
} }
@ -87,18 +72,19 @@ out:
// Create a new script engine for the script pair. // Create a new script engine for the script pair.
sigScript := txIn.SignatureScript sigScript := txIn.SignatureScript
witness := txIn.Witness witness := txIn.Witness
inputAmount := txEntry.AmountByIndex(originTxIndex) pkScript := utxo.PkScript()
inputAmount := utxo.Amount()
vm, err := txscript.NewEngine(pkScript, txVI.tx.MsgTx(), vm, err := txscript.NewEngine(pkScript, txVI.tx.MsgTx(),
txVI.txInIndex, v.flags, v.sigCache, txVI.sigHashes, txVI.txInIndex, v.flags, v.sigCache, txVI.sigHashes,
inputAmount) inputAmount)
if err != nil { if err != nil {
str := fmt.Sprintf("failed to parse input "+ str := fmt.Sprintf("failed to parse input "+
"%s:%d which references output %s:%d - "+ "%s:%d which references output %v - "+
"%v (input witness %x, input script "+ "%v (input witness %x, input script "+
"bytes %x, prev output script bytes %x)", "bytes %x, prev output script bytes %x)",
txVI.tx.Hash(), txVI.txInIndex, originTxHash, txVI.tx.Hash(), txVI.txInIndex,
originTxIndex, err, witness, sigScript, txIn.PreviousOutPoint, err, witness,
pkScript) sigScript, pkScript)
err := ruleError(ErrScriptMalformed, str) err := ruleError(ErrScriptMalformed, str)
v.sendResult(err) v.sendResult(err)
break out break out
@ -107,12 +93,12 @@ out:
// Execute the script pair. // Execute the script pair.
if err := vm.Execute(); err != nil { if err := vm.Execute(); err != nil {
str := fmt.Sprintf("failed to validate input "+ str := fmt.Sprintf("failed to validate input "+
"%s:%d which references output %s:%d - "+ "%s:%d which references output %v - "+
"%v (input witness %x, input script "+ "%v (input witness %x, input script "+
"bytes %x, prev output script bytes %x)", "bytes %x, prev output script bytes %x)",
txVI.tx.Hash(), txVI.txInIndex, txVI.tx.Hash(), txVI.txInIndex,
originTxHash, originTxIndex, err, txIn.PreviousOutPoint, err, witness,
witness, sigScript, pkScript) sigScript, pkScript)
err := ruleError(ErrScriptValidation, str) err := ruleError(ErrScriptValidation, str)
v.sendResult(err) v.sendResult(err)
break out break out

Binary file not shown.

View file

@ -7,7 +7,9 @@ package blockchain
import ( import (
"bytes" "bytes"
"container/list" "container/list"
"errors"
"fmt" "fmt"
"time"
"github.com/btcsuite/btcd/chaincfg/chainhash" "github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database" "github.com/btcsuite/btcd/database"
@ -23,6 +25,23 @@ const (
blockHdrOffset = 12 blockHdrOffset = 12
) )
// errInterruptRequested indicates that an operation was cancelled due
// to a user-requested interrupt.
var errInterruptRequested = errors.New("interrupt requested")
// interruptRequested returns true when the provided channel has been closed.
// This simplifies early shutdown slightly since the caller can just use an if
// statement instead of a select.
func interruptRequested(interrupted <-chan struct{}) bool {
select {
case <-interrupted:
return true
default:
}
return false
}
// blockChainContext represents a particular block's placement in the block // blockChainContext represents a particular block's placement in the block
// chain. This is used by the block index migration to track block metadata that // chain. This is used by the block index migration to track block metadata that
// will be written to disk. // will be written to disk.
@ -204,3 +223,382 @@ func determineMainChainBlocks(blocksMap map[chainhash.Hash]*blockChainContext, t
blocksMap[*nextHash].mainChain = true blocksMap[*nextHash].mainChain = true
} }
} }
// deserializeUtxoEntryV0 decodes a utxo entry from the passed serialized byte
// slice according to the legacy version 0 format into a map of utxos keyed by
// the output index within the transaction. The map is necessary because the
// previous format encoded all unspent outputs for a transaction using a single
// entry, whereas the new format encodes each unspent output individually.
//
// The legacy format is as follows:
//
// <version><height><header code><unspentness bitmap>[<compressed txouts>,...]
//
// Field Type Size
// version VLQ variable
// block height VLQ variable
// header code VLQ variable
// unspentness bitmap []byte variable
// compressed txouts
// compressed amount VLQ variable
// compressed script []byte variable
//
// The serialized header code format is:
// bit 0 - containing transaction is a coinbase
// bit 1 - output zero is unspent
// bit 2 - output one is unspent
// bits 3-x - number of bytes in unspentness bitmap. When both bits 1 and 2
// are unset, it encodes N-1 since there must be at least one unspent
// output.
//
// The rationale for the header code scheme is as follows:
// - Transactions which only pay to a single output and a change output are
// extremely common, thus an extra byte for the unspentness bitmap can be
// avoided for them by encoding those two outputs in the low order bits.
// - Given it is encoded as a VLQ which can encode values up to 127 with a
// single byte, that leaves 4 bits to represent the number of bytes in the
// unspentness bitmap while still only consuming a single byte for the
// header code. In other words, an unspentness bitmap with up to 120
// transaction outputs can be encoded with a single-byte header code.
// This covers the vast majority of transactions.
// - Encoding N-1 bytes when both bits 1 and 2 are unset allows an additional
// 8 outpoints to be encoded before causing the header code to require an
// additional byte.
//
// Example 1:
// From tx in main blockchain:
// Blk 1, 0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098
//
// 010103320496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52
// <><><><------------------------------------------------------------------>
// | | \--------\ |
// | height | compressed txout 0
// version header code
//
// - version: 1
// - height: 1
// - header code: 0x03 (coinbase, output zero unspent, 0 bytes of unspentness)
// - unspentness: Nothing since it is zero bytes
// - compressed txout 0:
// - 0x32: VLQ-encoded compressed amount for 5000000000 (50 BTC)
// - 0x04: special script type pay-to-pubkey
// - 0x96...52: x-coordinate of the pubkey
//
// Example 2:
// From tx in main blockchain:
// Blk 113931, 4a16969aa4764dd7507fc1de7f0baa4850a246de90c45e59a3207f9a26b5036f
//
// 0185f90b0a011200e2ccd6ec7c6e2e581349c77e067385fa8236bf8a800900b8025be1b3efc63b0ad48e7f9f10e87544528d58
// <><----><><><------------------------------------------><-------------------------------------------->
// | | | \-------------------\ | |
// version | \--------\ unspentness | compressed txout 2
// height header code compressed txout 0
//
// - version: 1
// - height: 113931
// - header code: 0x0a (output zero unspent, 1 byte in unspentness bitmap)
// - unspentness: [0x01] (bit 0 is set, so output 0+2 = 2 is unspent)
// NOTE: It's +2 since the first two outputs are encoded in the header code
// - compressed txout 0:
// - 0x12: VLQ-encoded compressed amount for 20000000 (0.2 BTC)
// - 0x00: special script type pay-to-pubkey-hash
// - 0xe2...8a: pubkey hash
// - compressed txout 2:
// - 0x8009: VLQ-encoded compressed amount for 15000000 (0.15 BTC)
// - 0x00: special script type pay-to-pubkey-hash
// - 0xb8...58: pubkey hash
//
// Example 3:
// From tx in main blockchain:
// Blk 338156, 1b02d1c8cfef60a189017b9a420c682cf4a0028175f2f563209e4ff61c8c3620
//
// 0193d06c100000108ba5b9e763011dd46a006572d820e448e12d2bbb38640bc718e6
// <><----><><----><-------------------------------------------------->
// | | | \-----------------\ |
// version | \--------\ unspentness |
// height header code compressed txout 22
//
// - version: 1
// - height: 338156
// - header code: 0x10 (2+1 = 3 bytes in unspentness bitmap)
// NOTE: It's +1 since neither bit 1 nor 2 are set, so N-1 is encoded.
// - unspentness: [0x00 0x00 0x10] (bit 20 is set, so output 20+2 = 22 is unspent)
// NOTE: It's +2 since the first two outputs are encoded in the header code
// - compressed txout 22:
// - 0x8ba5b9e763: VLQ-encoded compressed amount for 366875659 (3.66875659 BTC)
// - 0x01: special script type pay-to-script-hash
// - 0x1d...e6: script hash
func deserializeUtxoEntryV0(serialized []byte) (map[uint32]*UtxoEntry, error) {
// Deserialize the version.
//
// NOTE: Ignore version since it is no longer used in the new format.
_, bytesRead := deserializeVLQ(serialized)
offset := bytesRead
if offset >= len(serialized) {
return nil, errDeserialize("unexpected end of data after version")
}
// Deserialize the block height.
blockHeight, bytesRead := deserializeVLQ(serialized[offset:])
offset += bytesRead
if offset >= len(serialized) {
return nil, errDeserialize("unexpected end of data after height")
}
// Deserialize the header code.
code, bytesRead := deserializeVLQ(serialized[offset:])
offset += bytesRead
if offset >= len(serialized) {
return nil, errDeserialize("unexpected end of data after header")
}
// Decode the header code.
//
// Bit 0 indicates whether the containing transaction is a coinbase.
// Bit 1 indicates output 0 is unspent.
// Bit 2 indicates output 1 is unspent.
// Bits 3-x encodes the number of non-zero unspentness bitmap bytes that
// follow. When both output 0 and 1 are spent, it encodes N-1.
isCoinBase := code&0x01 != 0
output0Unspent := code&0x02 != 0
output1Unspent := code&0x04 != 0
numBitmapBytes := code >> 3
if !output0Unspent && !output1Unspent {
numBitmapBytes++
}
// Ensure there are enough bytes left to deserialize the unspentness
// bitmap.
if uint64(len(serialized[offset:])) < numBitmapBytes {
return nil, errDeserialize("unexpected end of data for " +
"unspentness bitmap")
}
// Add sparse output for unspent outputs 0 and 1 as needed based on the
// details provided by the header code.
var outputIndexes []uint32
if output0Unspent {
outputIndexes = append(outputIndexes, 0)
}
if output1Unspent {
outputIndexes = append(outputIndexes, 1)
}
// Decode the unspentness bitmap adding a sparse output for each unspent
// output.
for i := uint32(0); i < uint32(numBitmapBytes); i++ {
unspentBits := serialized[offset]
for j := uint32(0); j < 8; j++ {
if unspentBits&0x01 != 0 {
// The first 2 outputs are encoded via the
// header code, so adjust the output number
// accordingly.
outputNum := 2 + i*8 + j
outputIndexes = append(outputIndexes, outputNum)
}
unspentBits >>= 1
}
offset++
}
// Map to hold all of the converted outputs.
entries := make(map[uint32]*UtxoEntry)
// All entries will need to potentially be marked as a coinbase.
var packedFlags txoFlags
if isCoinBase {
packedFlags |= tfCoinBase
}
// Decode and add all of the utxos.
for i, outputIndex := range outputIndexes {
// Decode the next utxo.
amount, pkScript, bytesRead, err := decodeCompressedTxOut(
serialized[offset:])
if err != nil {
return nil, errDeserialize(fmt.Sprintf("unable to "+
"decode utxo at index %d: %v", i, err))
}
offset += bytesRead
// Create a new utxo entry with the details deserialized above.
entries[outputIndex] = &UtxoEntry{
amount: int64(amount),
pkScript: pkScript,
blockHeight: int32(blockHeight),
packedFlags: packedFlags,
}
}
return entries, nil
}
// upgradeUtxoSetToV2 migrates the utxo set entries from version 1 to 2 in
// batches. It is guaranteed to updated if this returns without failure.
func upgradeUtxoSetToV2(db database.DB, interrupt <-chan struct{}) error {
// Hardcoded bucket names so updates to the global values do not affect
// old upgrades.
var (
v1BucketName = []byte("utxoset")
v2BucketName = []byte("utxosetv2")
)
log.Infof("Upgrading utxo set to v2. This will take a while...")
start := time.Now()
// Create the new utxo set bucket as needed.
err := db.Update(func(dbTx database.Tx) error {
_, err := dbTx.Metadata().CreateBucketIfNotExists(v2BucketName)
return err
})
if err != nil {
return err
}
// doBatch contains the primary logic for upgrading the utxo set from
// version 1 to 2 in batches. This is done because the utxo set can be
// huge and thus attempting to migrate in a single database transaction
// would result in massive memory usage and could potentially crash on
// many systems due to ulimits.
//
// It returns the number of utxos processed.
const maxUtxos = 200000
doBatch := func(dbTx database.Tx) (uint32, error) {
v1Bucket := dbTx.Metadata().Bucket(v1BucketName)
v2Bucket := dbTx.Metadata().Bucket(v2BucketName)
v1Cursor := v1Bucket.Cursor()
// Migrate utxos so long as the max number of utxos for this
// batch has not been exceeded.
var numUtxos uint32
for ok := v1Cursor.First(); ok && numUtxos < maxUtxos; ok =
v1Cursor.Next() {
// Old key was the transaction hash.
oldKey := v1Cursor.Key()
var txHash chainhash.Hash
copy(txHash[:], oldKey)
// Deserialize the old entry which included all utxos
// for the given transaction.
utxos, err := deserializeUtxoEntryV0(v1Cursor.Value())
if err != nil {
return 0, err
}
// Add an entry for each utxo into the new bucket using
// the new format.
for txOutIdx, utxo := range utxos {
reserialized, err := serializeUtxoEntry(utxo)
if err != nil {
return 0, err
}
key := outpointKey(wire.OutPoint{
Hash: txHash,
Index: txOutIdx,
})
err = v2Bucket.Put(*key, reserialized)
// NOTE: The key is intentionally not recycled
// here since the database interface contract
// prohibits modifications. It will be garbage
// collected normally when the database is done
// with it.
if err != nil {
return 0, err
}
}
// Remove old entry.
err = v1Bucket.Delete(oldKey)
if err != nil {
return 0, err
}
numUtxos += uint32(len(utxos))
if interruptRequested(interrupt) {
// No error here so the database transaction
// is not cancelled and therefore outstanding
// work is written to disk.
break
}
}
return numUtxos, nil
}
// Migrate all entries in batches for the reasons mentioned above.
var totalUtxos uint64
for {
var numUtxos uint32
err := db.Update(func(dbTx database.Tx) error {
var err error
numUtxos, err = doBatch(dbTx)
return err
})
if err != nil {
return err
}
if interruptRequested(interrupt) {
return errInterruptRequested
}
if numUtxos == 0 {
break
}
totalUtxos += uint64(numUtxos)
log.Infof("Migrated %d utxos (%d total)", numUtxos, totalUtxos)
}
// Remove the old bucket and update the utxo set version once it has
// been fully migrated.
err = db.Update(func(dbTx database.Tx) error {
err := dbTx.Metadata().DeleteBucket(v1BucketName)
if err != nil {
return err
}
return dbPutVersion(dbTx, utxoSetVersionKeyName, 2)
})
if err != nil {
return err
}
seconds := int64(time.Since(start) / time.Second)
log.Infof("Done upgrading utxo set. Total utxos: %d in %d seconds",
totalUtxos, seconds)
return nil
}
// maybeUpgradeDbBuckets checks the database version of the buckets used by this
// package and performs any needed upgrades to bring them to the latest version.
//
// All buckets used by this package are guaranteed to be the latest version if
// this function returns without error.
func (b *BlockChain) maybeUpgradeDbBuckets(interrupt <-chan struct{}) error {
// Load or create bucket versions as needed.
var utxoSetVersion uint32
err := b.db.Update(func(dbTx database.Tx) error {
// Load the utxo set version from the database or create it and
// initialize it to version 1 if it doesn't exist.
var err error
utxoSetVersion, err = dbFetchOrCreateVersion(dbTx,
utxoSetVersionKeyName, 1)
return err
})
if err != nil {
return err
}
// Update the utxo set to v2 if needed.
if utxoSetVersion < 2 {
if err := upgradeUtxoSetToV2(b.db, interrupt); err != nil {
return err
}
}
return nil
}

116
blockchain/upgrade_test.go Normal file
View file

@ -0,0 +1,116 @@
// Copyright (c) 2015-2016 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package blockchain
import (
"reflect"
"testing"
)
// TestDeserializeUtxoEntryV0 ensures deserializing unspent trasaction output
// entries from the legacy version 0 format works as expected.
func TestDeserializeUtxoEntryV0(t *testing.T) {
tests := []struct {
name string
entries map[uint32]*UtxoEntry
serialized []byte
}{
// From tx in main blockchain:
// 0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098
{
name: "Only output 0, coinbase",
entries: map[uint32]*UtxoEntry{
0: {
amount: 5000000000,
pkScript: hexToBytes("410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858eeac"),
blockHeight: 1,
packedFlags: tfCoinBase,
},
},
serialized: hexToBytes("010103320496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52"),
},
// From tx in main blockchain:
// 8131ffb0a2c945ecaf9b9063e59558784f9c3a74741ce6ae2a18d0571dac15bb
{
name: "Only output 1, not coinbase",
entries: map[uint32]*UtxoEntry{
1: {
amount: 1000000,
pkScript: hexToBytes("76a914ee8bd501094a7d5ca318da2506de35e1cb025ddc88ac"),
blockHeight: 100001,
packedFlags: 0,
},
},
serialized: hexToBytes("01858c21040700ee8bd501094a7d5ca318da2506de35e1cb025ddc"),
},
// Adapted from tx in main blockchain:
// df3f3f442d9699857f7f49de4ff0b5d0f3448bec31cdc7b5bf6d25f2abd637d5
{
name: "Only output 2, coinbase",
entries: map[uint32]*UtxoEntry{
2: {
amount: 100937281,
pkScript: hexToBytes("76a914da33f77cee27c2a975ed5124d7e4f7f97513510188ac"),
blockHeight: 99004,
packedFlags: tfCoinBase,
},
},
serialized: hexToBytes("0185843c010182b095bf4100da33f77cee27c2a975ed5124d7e4f7f975135101"),
},
// Adapted from tx in main blockchain:
// 4a16969aa4764dd7507fc1de7f0baa4850a246de90c45e59a3207f9a26b5036f
{
name: "outputs 0 and 2 not coinbase",
entries: map[uint32]*UtxoEntry{
0: {
amount: 20000000,
pkScript: hexToBytes("76a914e2ccd6ec7c6e2e581349c77e067385fa8236bf8a88ac"),
blockHeight: 113931,
packedFlags: 0,
},
2: {
amount: 15000000,
pkScript: hexToBytes("76a914b8025be1b3efc63b0ad48e7f9f10e87544528d5888ac"),
blockHeight: 113931,
packedFlags: 0,
},
},
serialized: hexToBytes("0185f90b0a011200e2ccd6ec7c6e2e581349c77e067385fa8236bf8a800900b8025be1b3efc63b0ad48e7f9f10e87544528d58"),
},
// Adapted from tx in main blockchain:
// 1b02d1c8cfef60a189017b9a420c682cf4a0028175f2f563209e4ff61c8c3620
{
name: "Only output 22, not coinbase",
entries: map[uint32]*UtxoEntry{
22: {
amount: 366875659,
pkScript: hexToBytes("a9141dd46a006572d820e448e12d2bbb38640bc718e687"),
blockHeight: 338156,
packedFlags: 0,
},
},
serialized: hexToBytes("0193d06c100000108ba5b9e763011dd46a006572d820e448e12d2bbb38640bc718e6"),
},
}
for i, test := range tests {
// Deserialize to map of utxos keyed by the output index.
entries, err := deserializeUtxoEntryV0(test.serialized)
if err != nil {
t.Errorf("deserializeUtxoEntryV0 #%d (%s) unexpected "+
"error: %v", i, test.name, err)
continue
}
// Ensure the deserialized entry has the same properties as the
// ones in the test entry.
if !reflect.DeepEqual(entries, test.entries) {
t.Errorf("deserializeUtxoEntryV0 #%d (%s) unexpected "+
"entries: got %v, want %v", i, test.name,
entries, test.entries)
continue
}
}
}

View file

@ -10,179 +10,104 @@ import (
"github.com/btcsuite/btcd/chaincfg/chainhash" "github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database" "github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcd/txscript" "github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil" "github.com/btcsuite/btcutil"
) )
// utxoOutput houses details about an individual unspent transaction output such // txoFlags is a bitmask defining additional information and state for a
// as whether or not it is spent, its public key script, and how much it pays. // transaction output in a utxo view.
// type txoFlags uint8
// Standard public key scripts are stored in the database using a compressed
// format. Since the vast majority of scripts are of the standard form, a fairly
// significant savings is achieved by discarding the portions of the standard
// scripts that can be reconstructed.
//
// Also, since it is common for only a specific output in a given utxo entry to
// be referenced from a redeeming transaction, the script and amount for a given
// output is not uncompressed until the first time it is accessed. This
// provides a mechanism to avoid the overhead of needlessly uncompressing all
// outputs for a given utxo entry at the time of load.
type utxoOutput struct {
spent bool // Output is spent.
compressed bool // The amount and public key script are compressed.
amount int64 // The amount of the output.
pkScript []byte // The public key script for the output.
}
// maybeDecompress decompresses the amount and public key script fields of the const (
// utxo and marks it decompressed if needed. // tfCoinBase indicates that a txout was contained in a coinbase tx.
func (o *utxoOutput) maybeDecompress(version int32) { tfCoinBase txoFlags = 1 << iota
// Nothing to do if it's not compressed.
if !o.compressed {
return
}
o.amount = int64(decompressTxOutAmount(uint64(o.amount))) // tfSpent indicates that a txout is spent.
o.pkScript = decompressScript(o.pkScript, version) tfSpent
o.compressed = false
}
// UtxoEntry contains contextual information about an unspent transaction such // tfModified indicates that a txout has been modified since it was
// as whether or not it is a coinbase transaction, which block it was found in, // loaded.
// and the spent status of its outputs. tfModified
)
// UtxoEntry houses details about an individual transaction output in a utxo
// view such as whether or not it was contained in a coinbase tx, the height of
// the block that contains the tx, whether or not it is spent, its public key
// script, and how much it pays.
type UtxoEntry struct { type UtxoEntry struct {
modified bool // Entry changed since load. // NOTE: Additions, deletions, or modifications to the order of the
version int32 // The version of this tx. // definitions in this struct should not be changed without considering
isCoinBase bool // Whether entry is a coinbase tx. // how it affects alignment on 64-bit platforms. The current order is
blockHeight int32 // Height of block containing tx. // specifically crafted to result in minimal padding. There will be a
sparseOutputs map[uint32]*utxoOutput // Sparse map of unspent outputs. // lot of these in memory, so a few extra bytes of padding adds up.
amount int64
pkScript []byte // The public key script for the output.
blockHeight int32 // Height of block containing tx.
// packedFlags contains additional info about output such as whether it
// is a coinbase, whether it is spent, and whether it has been modified
// since it was loaded. This approach is used in order to reduce memory
// usage since there will be a lot of these in memory.
packedFlags txoFlags
} }
// Version returns the version of the transaction the utxo represents. // isModified returns whether or not the output has been modified since it was
func (entry *UtxoEntry) Version() int32 { // loaded.
return entry.version func (entry *UtxoEntry) isModified() bool {
return entry.packedFlags&tfModified == tfModified
} }
// IsCoinBase returns whether or not the transaction the utxo entry represents // IsCoinBase returns whether or not the output was contained in a coinbase
// is a coinbase. // transaction.
func (entry *UtxoEntry) IsCoinBase() bool { func (entry *UtxoEntry) IsCoinBase() bool {
return entry.isCoinBase return entry.packedFlags&tfCoinBase == tfCoinBase
} }
// BlockHeight returns the height of the block containing the transaction the // BlockHeight returns the height of the block containing the output.
// utxo entry represents.
func (entry *UtxoEntry) BlockHeight() int32 { func (entry *UtxoEntry) BlockHeight() int32 {
return entry.blockHeight return entry.blockHeight
} }
// IsOutputSpent returns whether or not the provided output index has been // IsSpent returns whether or not the output has been spent based upon the
// spent based upon the current state of the unspent transaction output view // current state of the unspent transaction output view it was obtained from.
// the entry was obtained from. func (entry *UtxoEntry) IsSpent() bool {
// return entry.packedFlags&tfSpent == tfSpent
// Returns true if the output index references an output that does not exist
// either due to it being invalid or because the output is not part of the view
// due to previously being spent/pruned.
func (entry *UtxoEntry) IsOutputSpent(outputIndex uint32) bool {
output, ok := entry.sparseOutputs[outputIndex]
if !ok {
return true
}
return output.spent
} }
// SpendOutput marks the output at the provided index as spent. Specifying an // Spend marks the output as spent. Spending an output that is already spent
// output index that does not exist will not have any effect. // has no effect.
func (entry *UtxoEntry) SpendOutput(outputIndex uint32) { func (entry *UtxoEntry) Spend() {
output, ok := entry.sparseOutputs[outputIndex]
if !ok {
return
}
// Nothing to do if the output is already spent. // Nothing to do if the output is already spent.
if output.spent { if entry.IsSpent() {
return return
} }
entry.modified = true // Mark the output as spent and modified.
output.spent = true entry.packedFlags |= tfSpent | tfModified
} }
// IsFullySpent returns whether or not the transaction the utxo entry represents // Amount returns the amount of the output.
// is fully spent. func (entry *UtxoEntry) Amount() int64 {
func (entry *UtxoEntry) IsFullySpent() bool { return entry.amount
// The entry is not fully spent if any of the outputs are unspent.
for _, output := range entry.sparseOutputs {
if !output.spent {
return false
}
}
return true
} }
// AmountByIndex returns the amount of the provided output index. // PkScript returns the public key script for the output.
// func (entry *UtxoEntry) PkScript() []byte {
// Returns 0 if the output index references an output that does not exist return entry.pkScript
// either due to it being invalid or because the output is not part of the view
// due to previously being spent/pruned.
func (entry *UtxoEntry) AmountByIndex(outputIndex uint32) int64 {
output, ok := entry.sparseOutputs[outputIndex]
if !ok {
return 0
}
// Ensure the output is decompressed before returning the amount.
output.maybeDecompress(entry.version)
return output.amount
} }
// PkScriptByIndex returns the public key script for the provided output index. // Clone returns a shallow copy of the utxo entry.
//
// Returns nil if the output index references an output that does not exist
// either due to it being invalid or because the output is not part of the view
// due to previously being spent/pruned.
func (entry *UtxoEntry) PkScriptByIndex(outputIndex uint32) []byte {
output, ok := entry.sparseOutputs[outputIndex]
if !ok {
return nil
}
// Ensure the output is decompressed before returning the script.
output.maybeDecompress(entry.version)
return output.pkScript
}
// Clone returns a deep copy of the utxo entry.
func (entry *UtxoEntry) Clone() *UtxoEntry { func (entry *UtxoEntry) Clone() *UtxoEntry {
if entry == nil { if entry == nil {
return nil return nil
} }
newEntry := &UtxoEntry{
version: entry.version,
isCoinBase: entry.isCoinBase,
blockHeight: entry.blockHeight,
sparseOutputs: make(map[uint32]*utxoOutput),
}
for outputIndex, output := range entry.sparseOutputs {
newEntry.sparseOutputs[outputIndex] = &utxoOutput{
spent: output.spent,
compressed: output.compressed,
amount: output.amount,
pkScript: output.pkScript,
}
}
return newEntry
}
// newUtxoEntry returns a new unspent transaction output entry with the provided
// coinbase flag and block height ready to have unspent outputs added.
func newUtxoEntry(version int32, isCoinBase bool, blockHeight int32) *UtxoEntry {
return &UtxoEntry{ return &UtxoEntry{
version: version, amount: entry.amount,
isCoinBase: isCoinBase, pkScript: entry.pkScript,
blockHeight: blockHeight, blockHeight: entry.blockHeight,
sparseOutputs: make(map[uint32]*utxoOutput), packedFlags: entry.packedFlags,
} }
} }
@ -194,7 +119,7 @@ func newUtxoEntry(version int32, isCoinBase bool, blockHeight int32) *UtxoEntry
// The unspent outputs are needed by other transactions for things such as // The unspent outputs are needed by other transactions for things such as
// script validation and double spend prevention. // script validation and double spend prevention.
type UtxoViewpoint struct { type UtxoViewpoint struct {
entries map[chainhash.Hash]*UtxoEntry entries map[wire.OutPoint]*UtxoEntry
bestHash chainhash.Hash bestHash chainhash.Hash
} }
@ -210,17 +135,60 @@ func (view *UtxoViewpoint) SetBestHash(hash *chainhash.Hash) {
view.bestHash = *hash view.bestHash = *hash
} }
// LookupEntry returns information about a given transaction according to the // LookupEntry returns information about a given transaction output according to
// current state of the view. It will return nil if the passed transaction // the current state of the view. It will return nil if the passed output does
// hash does not exist in the view or is otherwise not available such as when // not exist in the view or is otherwise not available such as when it has been
// it has been disconnected during a reorg. // disconnected during a reorg.
func (view *UtxoViewpoint) LookupEntry(txHash *chainhash.Hash) *UtxoEntry { func (view *UtxoViewpoint) LookupEntry(outpoint wire.OutPoint) *UtxoEntry {
entry, ok := view.entries[*txHash] return view.entries[outpoint]
if !ok { }
return nil
// addTxOut adds the specified output to the view if it is not provably
// unspendable. When the view already has an entry for the output, it will be
// marked unspent. All fields will be updated for existing entries since it's
// possible it has changed during a reorg.
func (view *UtxoViewpoint) addTxOut(outpoint wire.OutPoint, txOut *wire.TxOut, isCoinBase bool, blockHeight int32) {
// Don't add provably unspendable outputs.
if txscript.IsUnspendable(txOut.PkScript) {
return
} }
return entry // Update existing entries. All fields are updated because it's
// possible (although extremely unlikely) that the existing entry is
// being replaced by a different transaction with the same hash. This
// is allowed so long as the previous transaction is fully spent.
entry := view.LookupEntry(outpoint)
if entry == nil {
entry = new(UtxoEntry)
view.entries[outpoint] = entry
}
entry.amount = txOut.Value
entry.pkScript = txOut.PkScript
entry.blockHeight = blockHeight
entry.packedFlags = tfModified
if isCoinBase {
entry.packedFlags |= tfCoinBase
}
}
// AddTxOut adds the specified output of the passed transaction to the view if
// it exists and is not provably unspendable. When the view already has an
// entry for the output, it will be marked unspent. All fields will be updated
// for existing entries since it's possible it has changed during a reorg.
func (view *UtxoViewpoint) AddTxOut(tx *btcutil.Tx, txOutIdx uint32, blockHeight int32) {
// Can't add an output for an out of bounds index.
if txOutIdx >= uint32(len(tx.MsgTx().TxOut)) {
return
}
// Update existing entries. All fields are updated because it's
// possible (although extremely unlikely) that the existing entry is
// being replaced by a different transaction with the same hash. This
// is allowed so long as the previous transaction is fully spent.
prevOut := wire.OutPoint{Hash: *tx.Hash(), Index: txOutIdx}
txOut := tx.MsgTx().TxOut[txOutIdx]
view.addTxOut(prevOut, txOut, IsCoinBase(tx), blockHeight)
} }
// AddTxOuts adds all outputs in the passed transaction which are not provably // AddTxOuts adds all outputs in the passed transaction which are not provably
@ -228,45 +196,18 @@ func (view *UtxoViewpoint) LookupEntry(txHash *chainhash.Hash) *UtxoEntry {
// outputs, they are simply marked unspent. All fields will be updated for // outputs, they are simply marked unspent. All fields will be updated for
// existing entries since it's possible it has changed during a reorg. // existing entries since it's possible it has changed during a reorg.
func (view *UtxoViewpoint) AddTxOuts(tx *btcutil.Tx, blockHeight int32) { func (view *UtxoViewpoint) AddTxOuts(tx *btcutil.Tx, blockHeight int32) {
// When there are not already any utxos associated with the transaction,
// add a new entry for it to the view.
entry := view.LookupEntry(tx.Hash())
if entry == nil {
entry = newUtxoEntry(tx.MsgTx().Version, IsCoinBase(tx),
blockHeight)
view.entries[*tx.Hash()] = entry
} else {
entry.blockHeight = blockHeight
}
entry.modified = true
// Loop all of the transaction outputs and add those which are not // Loop all of the transaction outputs and add those which are not
// provably unspendable. // provably unspendable.
isCoinBase := IsCoinBase(tx)
prevOut := wire.OutPoint{Hash: *tx.Hash()}
for txOutIdx, txOut := range tx.MsgTx().TxOut { for txOutIdx, txOut := range tx.MsgTx().TxOut {
if txscript.IsUnspendable(txOut.PkScript) {
continue
}
// Update existing entries. All fields are updated because it's // Update existing entries. All fields are updated because it's
// possible (although extremely unlikely) that the existing // possible (although extremely unlikely) that the existing
// entry is being replaced by a different transaction with the // entry is being replaced by a different transaction with the
// same hash. This is allowed so long as the previous // same hash. This is allowed so long as the previous
// transaction is fully spent. // transaction is fully spent.
if output, ok := entry.sparseOutputs[uint32(txOutIdx)]; ok { prevOut.Index = uint32(txOutIdx)
output.spent = false view.addTxOut(prevOut, txOut, isCoinBase, blockHeight)
output.compressed = false
output.amount = txOut.Value
output.pkScript = txOut.PkScript
continue
}
// Add the unspent transaction output.
entry.sparseOutputs[uint32(txOutIdx)] = &utxoOutput{
spent: false,
compressed: false,
amount: txOut.Value,
pkScript: txOut.PkScript,
}
} }
} }
@ -287,39 +228,30 @@ func (view *UtxoViewpoint) connectTransaction(tx *btcutil.Tx, blockHeight int32,
// if a slice was provided for the spent txout details, append an entry // if a slice was provided for the spent txout details, append an entry
// to it. // to it.
for _, txIn := range tx.MsgTx().TxIn { for _, txIn := range tx.MsgTx().TxIn {
originIndex := txIn.PreviousOutPoint.Index
entry := view.entries[txIn.PreviousOutPoint.Hash]
// Ensure the referenced utxo exists in the view. This should // Ensure the referenced utxo exists in the view. This should
// never happen unless there is a bug is introduced in the code. // never happen unless there is a bug is introduced in the code.
entry := view.entries[txIn.PreviousOutPoint]
if entry == nil { if entry == nil {
return AssertError(fmt.Sprintf("view missing input %v", return AssertError(fmt.Sprintf("view missing input %v",
txIn.PreviousOutPoint)) txIn.PreviousOutPoint))
} }
entry.SpendOutput(originIndex)
// Don't create the stxo details if not requested. // Only create the stxo details if requested.
if stxos == nil { if stxos != nil {
continue // Populate the stxo details using the utxo entry.
var stxo = spentTxOut{
amount: entry.Amount(),
pkScript: entry.PkScript(),
height: entry.BlockHeight(),
isCoinBase: entry.IsCoinBase(),
}
*stxos = append(*stxos, stxo)
} }
// Populate the stxo details using the utxo entry. When the // Mark the entry as spent. This is not done until after the
// transaction is fully spent, set the additional stxo fields // relevant details have been accessed since spending it might
// accordingly since those details will no longer be available // clear the fields from memory in the future.
// in the utxo set. entry.Spend()
var stxo = spentTxOut{
compressed: false,
version: entry.Version(),
amount: entry.AmountByIndex(originIndex),
pkScript: entry.PkScriptByIndex(originIndex),
}
if entry.IsFullySpent() {
stxo.height = entry.BlockHeight()
stxo.isCoinBase = entry.IsCoinBase()
}
// Append the entry to the provided spent txouts slice.
*stxos = append(*stxos, stxo)
} }
// Add the transaction's outputs as available utxos. // Add the transaction's outputs as available utxos.
@ -346,11 +278,37 @@ func (view *UtxoViewpoint) connectTransactions(block *btcutil.Block, stxos *[]sp
return nil return nil
} }
// fetchEntryByHash attempts to find any available utxo for the given hash by
// searching the entire set of possible outputs for the given hash. It checks
// the view first and then falls back to the database if needed.
func (view *UtxoViewpoint) fetchEntryByHash(db database.DB, hash *chainhash.Hash) (*UtxoEntry, error) {
// First attempt to find a utxo with the provided hash in the view.
prevOut := wire.OutPoint{Hash: *hash}
for idx := uint32(0); idx < MaxOutputsPerBlock; idx++ {
prevOut.Index = idx
entry := view.LookupEntry(prevOut)
if entry != nil {
return entry, nil
}
}
// Check the database since it doesn't exist in the view. This will
// often by the case since only specifically referenced utxos are loaded
// into the view.
var entry *UtxoEntry
err := db.View(func(dbTx database.Tx) error {
var err error
entry, err = dbFetchUtxoEntryByHash(dbTx, hash)
return err
})
return entry, err
}
// disconnectTransactions updates the view by removing all of the transactions // disconnectTransactions updates the view by removing all of the transactions
// created by the passed block, restoring all utxos the transactions spent by // created by the passed block, restoring all utxos the transactions spent by
// using the provided spent txo information, and setting the best hash for the // using the provided spent txo information, and setting the best hash for the
// view to the block before the passed block. // view to the block before the passed block.
func (view *UtxoViewpoint) disconnectTransactions(block *btcutil.Block, stxos []spentTxOut) error { func (view *UtxoViewpoint) disconnectTransactions(db database.DB, block *btcutil.Block, stxos []spentTxOut) error {
// Sanity check the correct number of stxos are provided. // Sanity check the correct number of stxos are provided.
if len(stxos) != countSpentOutputs(block) { if len(stxos) != countSpentOutputs(block) {
return AssertError("disconnectTransactions called with bad " + return AssertError("disconnectTransactions called with bad " +
@ -365,25 +323,52 @@ func (view *UtxoViewpoint) disconnectTransactions(block *btcutil.Block, stxos []
for txIdx := len(transactions) - 1; txIdx > -1; txIdx-- { for txIdx := len(transactions) - 1; txIdx > -1; txIdx-- {
tx := transactions[txIdx] tx := transactions[txIdx]
// Clear this transaction from the view if it already exists or // All entries will need to potentially be marked as a coinbase.
// create a new empty entry for when it does not. This is done var packedFlags txoFlags
// because the code relies on its existence in the view in order isCoinBase := txIdx == 0
// to signal modifications have happened. if isCoinBase {
isCoinbase := txIdx == 0 packedFlags |= tfCoinBase
entry := view.entries[*tx.Hash()] }
if entry == nil {
entry = newUtxoEntry(tx.MsgTx().Version, isCoinbase, // Mark all of the spendable outputs originally created by the
block.Height()) // transaction as spent. It is instructive to note that while
view.entries[*tx.Hash()] = entry // the outputs aren't actually being spent here, rather they no
// longer exist, since a pruned utxo set is used, there is no
// practical difference between a utxo that does not exist and
// one that has been spent.
//
// When the utxo does not already exist in the view, add an
// entry for it and then mark it spent. This is done because
// the code relies on its existence in the view in order to
// signal modifications have happened.
txHash := tx.Hash()
prevOut := wire.OutPoint{Hash: *txHash}
for txOutIdx, txOut := range tx.MsgTx().TxOut {
if txscript.IsUnspendable(txOut.PkScript) {
continue
}
prevOut.Index = uint32(txOutIdx)
entry := view.entries[prevOut]
if entry == nil {
entry = &UtxoEntry{
amount: txOut.Value,
pkScript: txOut.PkScript,
blockHeight: block.Height(),
packedFlags: packedFlags,
}
view.entries[prevOut] = entry
}
entry.Spend()
} }
entry.modified = true
entry.sparseOutputs = make(map[uint32]*utxoOutput)
// Loop backwards through all of the transaction inputs (except // Loop backwards through all of the transaction inputs (except
// for the coinbase which has no inputs) and unspend the // for the coinbase which has no inputs) and unspend the
// referenced txos. This is necessary to match the order of the // referenced txos. This is necessary to match the order of the
// spent txout entries. // spent txout entries.
if isCoinbase { if isCoinBase {
continue continue
} }
for txInIdx := len(tx.MsgTx().TxIn) - 1; txInIdx > -1; txInIdx-- { for txInIdx := len(tx.MsgTx().TxIn) - 1; txInIdx > -1; txInIdx-- {
@ -393,40 +378,57 @@ func (view *UtxoViewpoint) disconnectTransactions(block *btcutil.Block, stxos []
stxoIdx-- stxoIdx--
// When there is not already an entry for the referenced // When there is not already an entry for the referenced
// transaction in the view, it means it was fully spent, // output in the view, it means it was previously spent,
// so create a new utxo entry in order to resurrect it. // so create a new utxo entry in order to resurrect it.
txIn := tx.MsgTx().TxIn[txInIdx] originOut := &tx.MsgTx().TxIn[txInIdx].PreviousOutPoint
originHash := &txIn.PreviousOutPoint.Hash entry := view.entries[*originOut]
originIndex := txIn.PreviousOutPoint.Index
entry := view.entries[*originHash]
if entry == nil { if entry == nil {
entry = newUtxoEntry(stxo.version, entry = new(UtxoEntry)
stxo.isCoinBase, stxo.height) view.entries[*originOut] = entry
view.entries[*originHash] = entry
} }
// Mark the entry as modified since it is either new // The legacy v1 spend journal format only stored the
// or will be changed below. // coinbase flag and height when the output was the last
entry.modified = true // unspent output of the transaction. As a result, when
// the information is missing, search for it by scanning
// Restore the specific utxo using the stxo data from // all possible outputs of the transaction since it must
// the spend journal if it doesn't already exist in the // be in one of them.
// view. //
output, ok := entry.sparseOutputs[originIndex] // It should be noted that this is quite inefficient,
if !ok { // but it realistically will almost never run since all
// Add the unspent transaction output. // new entries include the information for all outputs
entry.sparseOutputs[originIndex] = &utxoOutput{ // and thus the only way this will be hit is if a long
spent: false, // enough reorg happens such that a block with the old
compressed: stxo.compressed, // spend data is being disconnected. The probability of
amount: stxo.amount, // that in practice is extremely low to begin with and
pkScript: stxo.pkScript, // becomes vanishingly small the more new blocks are
// connected. In the case of a fresh database that has
// only ever run with the new v2 format, this code path
// will never run.
if stxo.height == 0 {
utxo, err := view.fetchEntryByHash(db, txHash)
if err != nil {
return err
} }
continue if utxo == nil {
return AssertError(fmt.Sprintf("unable "+
"to resurrect legacy stxo %v",
*originOut))
}
stxo.height = utxo.BlockHeight()
stxo.isCoinBase = utxo.IsCoinBase()
} }
// Mark the existing referenced transaction output as // Restore the utxo using the stxo data from the spend
// unspent. // journal and mark it as modified.
output.spent = false entry.amount = stxo.amount
entry.pkScript = stxo.pkScript
entry.blockHeight = stxo.height
entry.packedFlags = tfModified
if stxo.isCoinBase {
entry.packedFlags |= tfCoinBase
}
} }
} }
@ -436,88 +438,94 @@ func (view *UtxoViewpoint) disconnectTransactions(block *btcutil.Block, stxos []
return nil return nil
} }
// RemoveEntry removes the given transaction output from the current state of
// the view. It will have no effect if the passed output does not exist in the
// view.
func (view *UtxoViewpoint) RemoveEntry(outpoint wire.OutPoint) {
delete(view.entries, outpoint)
}
// Entries returns the underlying map that stores of all the utxo entries. // Entries returns the underlying map that stores of all the utxo entries.
func (view *UtxoViewpoint) Entries() map[chainhash.Hash]*UtxoEntry { func (view *UtxoViewpoint) Entries() map[wire.OutPoint]*UtxoEntry {
return view.entries return view.entries
} }
// commit prunes all entries marked modified that are now fully spent and marks // commit prunes all entries marked modified that are now fully spent and marks
// all entries as unmodified. // all entries as unmodified.
func (view *UtxoViewpoint) commit() { func (view *UtxoViewpoint) commit() {
for txHash, entry := range view.entries { for outpoint, entry := range view.entries {
if entry == nil || (entry.modified && entry.IsFullySpent()) { if entry == nil || (entry.isModified() && entry.IsSpent()) {
delete(view.entries, txHash) delete(view.entries, outpoint)
continue continue
} }
entry.modified = false entry.packedFlags ^= tfModified
} }
} }
// fetchUtxosMain fetches unspent transaction output data about the provided // fetchUtxosMain fetches unspent transaction output data about the provided
// set of transactions from the point of view of the end of the main chain at // set of outpoints from the point of view of the end of the main chain at the
// the time of the call. // time of the call.
// //
// Upon completion of this function, the view will contain an entry for each // Upon completion of this function, the view will contain an entry for each
// requested transaction. Fully spent transactions, or those which otherwise // requested outpoint. Spent outputs, or those which otherwise don't exist,
// don't exist, will result in a nil entry in the view. // will result in a nil entry in the view.
func (view *UtxoViewpoint) fetchUtxosMain(db database.DB, txSet map[chainhash.Hash]struct{}) error { func (view *UtxoViewpoint) fetchUtxosMain(db database.DB, outpoints map[wire.OutPoint]struct{}) error {
// Nothing to do if there are no requested hashes. // Nothing to do if there are no requested outputs.
if len(txSet) == 0 { if len(outpoints) == 0 {
return nil return nil
} }
// Load the unspent transaction output information for the requested set // Load the requested set of unspent transaction outputs from the point
// of transactions from the point of view of the end of the main chain. // of view of the end of the main chain.
// //
// NOTE: Missing entries are not considered an error here and instead // NOTE: Missing entries are not considered an error here and instead
// will result in nil entries in the view. This is intentionally done // will result in nil entries in the view. This is intentionally done
// since other code uses the presence of an entry in the store as a way // so other code can use the presence of an entry in the store as a way
// to optimize spend and unspend updates to apply only to the specific // to unnecessarily avoid attempting to reload it from the database.
// utxos that the caller needs access to.
return db.View(func(dbTx database.Tx) error { return db.View(func(dbTx database.Tx) error {
for hash := range txSet { for outpoint := range outpoints {
hashCopy := hash entry, err := dbFetchUtxoEntry(dbTx, outpoint)
entry, err := dbFetchUtxoEntry(dbTx, &hashCopy)
if err != nil { if err != nil {
return err return err
} }
view.entries[hash] = entry view.entries[outpoint] = entry
} }
return nil return nil
}) })
} }
// fetchUtxos loads utxo details about provided set of transaction hashes into // fetchUtxos loads the unspent transaction outputs for the provided set of
// the view from the database as needed unless they already exist in the view in // outputs into the view from the database as needed unless they already exist
// which case they are ignored. // in the view in which case they are ignored.
func (view *UtxoViewpoint) fetchUtxos(db database.DB, txSet map[chainhash.Hash]struct{}) error { func (view *UtxoViewpoint) fetchUtxos(db database.DB, outpoints map[wire.OutPoint]struct{}) error {
// Nothing to do if there are no requested hashes. // Nothing to do if there are no requested outputs.
if len(txSet) == 0 { if len(outpoints) == 0 {
return nil return nil
} }
// Filter entries that are already in the view. // Filter entries that are already in the view.
txNeededSet := make(map[chainhash.Hash]struct{}) neededSet := make(map[wire.OutPoint]struct{})
for hash := range txSet { for outpoint := range outpoints {
// Already loaded into the current view. // Already loaded into the current view.
if _, ok := view.entries[hash]; ok { if _, ok := view.entries[outpoint]; ok {
continue continue
} }
txNeededSet[hash] = struct{}{} neededSet[outpoint] = struct{}{}
} }
// Request the input utxos from the database. // Request the input utxos from the database.
return view.fetchUtxosMain(db, txNeededSet) return view.fetchUtxosMain(db, neededSet)
} }
// fetchInputUtxos loads utxo details about the input transactions referenced // fetchInputUtxos loads the unspent transaction outputs for the inputs
// by the transactions in the given block into the view from the database as // referenced by the transactions in the given block into the view from the
// needed. In particular, referenced entries that are earlier in the block are // database as needed. In particular, referenced entries that are earlier in
// added to the view and entries that are already in the view are not modified. // the block are added to the view and entries that are already in the view are
// not modified.
func (view *UtxoViewpoint) fetchInputUtxos(db database.DB, block *btcutil.Block) error { func (view *UtxoViewpoint) fetchInputUtxos(db database.DB, block *btcutil.Block) error {
// Build a map of in-flight transactions because some of the inputs in // Build a map of in-flight transactions because some of the inputs in
// this block could be referencing other transactions earlier in this // this block could be referencing other transactions earlier in this
@ -531,7 +539,7 @@ func (view *UtxoViewpoint) fetchInputUtxos(db database.DB, block *btcutil.Block)
// Loop through all of the transaction inputs (except for the coinbase // Loop through all of the transaction inputs (except for the coinbase
// which has no inputs) collecting them into sets of what is needed and // which has no inputs) collecting them into sets of what is needed and
// what is already known (in-flight). // what is already known (in-flight).
txNeededSet := make(map[chainhash.Hash]struct{}) neededSet := make(map[wire.OutPoint]struct{})
for i, tx := range transactions[1:] { for i, tx := range transactions[1:] {
for _, txIn := range tx.MsgTx().TxIn { for _, txIn := range tx.MsgTx().TxIn {
// It is acceptable for a transaction input to reference // It is acceptable for a transaction input to reference
@ -556,72 +564,74 @@ func (view *UtxoViewpoint) fetchInputUtxos(db database.DB, block *btcutil.Block)
// Don't request entries that are already in the view // Don't request entries that are already in the view
// from the database. // from the database.
if _, ok := view.entries[*originHash]; ok { if _, ok := view.entries[txIn.PreviousOutPoint]; ok {
continue continue
} }
txNeededSet[*originHash] = struct{}{} neededSet[txIn.PreviousOutPoint] = struct{}{}
} }
} }
// Request the input utxos from the database. // Request the input utxos from the database.
return view.fetchUtxosMain(db, txNeededSet) return view.fetchUtxosMain(db, neededSet)
} }
// NewUtxoViewpoint returns a new empty unspent transaction output view. // NewUtxoViewpoint returns a new empty unspent transaction output view.
func NewUtxoViewpoint() *UtxoViewpoint { func NewUtxoViewpoint() *UtxoViewpoint {
return &UtxoViewpoint{ return &UtxoViewpoint{
entries: make(map[chainhash.Hash]*UtxoEntry), entries: make(map[wire.OutPoint]*UtxoEntry),
} }
} }
// FetchUtxoView loads utxo details about the input transactions referenced by // FetchUtxoView loads unspent transaction outputs for the inputs referenced by
// the passed transaction from the point of view of the end of the main chain. // the passed transaction from the point of view of the end of the main chain.
// It also attempts to fetch the utxo details for the transaction itself so the // It also attempts to fetch the utxos for the outputs of the transaction itself
// returned view can be examined for duplicate unspent transaction outputs. // so the returned view can be examined for duplicate transactions.
// //
// This function is safe for concurrent access however the returned view is NOT. // This function is safe for concurrent access however the returned view is NOT.
func (b *BlockChain) FetchUtxoView(tx *btcutil.Tx) (*UtxoViewpoint, error) { func (b *BlockChain) FetchUtxoView(tx *btcutil.Tx) (*UtxoViewpoint, error) {
b.chainLock.RLock() // Create a set of needed outputs based on those referenced by the
defer b.chainLock.RUnlock() // inputs of the passed transaction and the outputs of the transaction
// itself.
// Create a set of needed transactions based on those referenced by the neededSet := make(map[wire.OutPoint]struct{})
// inputs of the passed transaction. Also, add the passed transaction prevOut := wire.OutPoint{Hash: *tx.Hash()}
// itself as a way for the caller to detect duplicates that are not for txOutIdx := range tx.MsgTx().TxOut {
// fully spent. prevOut.Index = uint32(txOutIdx)
txNeededSet := make(map[chainhash.Hash]struct{}) neededSet[prevOut] = struct{}{}
txNeededSet[*tx.Hash()] = struct{}{} }
if !IsCoinBase(tx) { if !IsCoinBase(tx) {
for _, txIn := range tx.MsgTx().TxIn { for _, txIn := range tx.MsgTx().TxIn {
txNeededSet[txIn.PreviousOutPoint.Hash] = struct{}{} neededSet[txIn.PreviousOutPoint] = struct{}{}
} }
} }
// Request the utxos from the point of view of the end of the main // Request the utxos from the point of view of the end of the main
// chain. // chain.
view := NewUtxoViewpoint() view := NewUtxoViewpoint()
err := view.fetchUtxosMain(b.db, txNeededSet) b.chainLock.RLock()
err := view.fetchUtxosMain(b.db, neededSet)
b.chainLock.RUnlock()
return view, err return view, err
} }
// FetchUtxoEntry loads and returns the unspent transaction output entry for the // FetchUtxoEntry loads and returns the requested unspent transaction output
// passed hash from the point of view of the end of the main chain. // from the point of view of the end of the main chain.
// //
// NOTE: Requesting a hash for which there is no data will NOT return an error. // NOTE: Requesting an output for which there is no data will NOT return an
// Instead both the entry and the error will be nil. This is done to allow // error. Instead both the entry and the error will be nil. This is done to
// pruning of fully spent transactions. In practice this means the caller must // allow pruning of spent transaction outputs. In practice this means the
// check if the returned entry is nil before invoking methods on it. // caller must check if the returned entry is nil before invoking methods on it.
// //
// This function is safe for concurrent access however the returned entry (if // This function is safe for concurrent access however the returned entry (if
// any) is NOT. // any) is NOT.
func (b *BlockChain) FetchUtxoEntry(txHash *chainhash.Hash) (*UtxoEntry, error) { func (b *BlockChain) FetchUtxoEntry(outpoint wire.OutPoint) (*UtxoEntry, error) {
b.chainLock.RLock() b.chainLock.RLock()
defer b.chainLock.RUnlock() defer b.chainLock.RUnlock()
var entry *UtxoEntry var entry *UtxoEntry
err := b.db.View(func(dbTx database.Tx) error { err := b.db.View(func(dbTx database.Tx) error {
var err error var err error
entry, err = dbFetchUtxoEntry(dbTx, txHash) entry, err = dbFetchUtxoEntry(dbTx, outpoint)
return err return err
}) })
if err != nil { if err != nil {

View file

@ -286,8 +286,7 @@ func CheckTransactionSanity(tx *btcutil.Tx) error {
// Previous transaction outputs referenced by the inputs to this // Previous transaction outputs referenced by the inputs to this
// transaction must not be null. // transaction must not be null.
for _, txIn := range msgTx.TxIn { for _, txIn := range msgTx.TxIn {
prevOut := &txIn.PreviousOutPoint if isNullOutpoint(&txIn.PreviousOutPoint) {
if isNullOutpoint(prevOut) {
return ruleError(ErrBadTxInput, "transaction "+ return ruleError(ErrBadTxInput, "transaction "+
"input refers to previous output that "+ "input refers to previous output that "+
"is null") "is null")
@ -385,10 +384,8 @@ func CountP2SHSigOps(tx *btcutil.Tx, isCoinBaseTx bool, utxoView *UtxoViewpoint)
totalSigOps := 0 totalSigOps := 0
for txInIndex, txIn := range msgTx.TxIn { for txInIndex, txIn := range msgTx.TxIn {
// Ensure the referenced input transaction is available. // Ensure the referenced input transaction is available.
originTxHash := &txIn.PreviousOutPoint.Hash utxo := utxoView.LookupEntry(txIn.PreviousOutPoint)
originTxIndex := txIn.PreviousOutPoint.Index if utxo == nil || utxo.IsSpent() {
txEntry := utxoView.LookupEntry(originTxHash)
if txEntry == nil || txEntry.IsOutputSpent(originTxIndex) {
str := fmt.Sprintf("output %v referenced from "+ str := fmt.Sprintf("output %v referenced from "+
"transaction %s:%d either does not exist or "+ "transaction %s:%d either does not exist or "+
"has already been spent", txIn.PreviousOutPoint, "has already been spent", txIn.PreviousOutPoint,
@ -398,7 +395,7 @@ func CountP2SHSigOps(tx *btcutil.Tx, isCoinBaseTx bool, utxoView *UtxoViewpoint)
// We're only interested in pay-to-script-hash types, so skip // We're only interested in pay-to-script-hash types, so skip
// this input if it's not one. // this input if it's not one.
pkScript := txEntry.PkScriptByIndex(originTxIndex) pkScript := utxo.PkScript()
if !txscript.IsPayToScriptHash(pkScript) { if !txscript.IsPayToScriptHash(pkScript) {
continue continue
} }
@ -827,16 +824,21 @@ func (b *BlockChain) checkBlockContext(block *btcutil.Block, prevNode *blockNode
// duplicated to effectively revert the overwritten transactions to a single // duplicated to effectively revert the overwritten transactions to a single
// confirmation thereby making them vulnerable to a double spend. // confirmation thereby making them vulnerable to a double spend.
// //
// For more details, see https://en.bitcoin.it/wiki/BIP_0030 and // For more details, see
// https://github.com/bitcoin/bips/blob/master/bip-0030.mediawiki and
// http://r6.ca/blog/20120206T005236Z.html. // http://r6.ca/blog/20120206T005236Z.html.
// //
// This function MUST be called with the chain state lock held (for reads). // This function MUST be called with the chain state lock held (for reads).
func (b *BlockChain) checkBIP0030(node *blockNode, block *btcutil.Block, view *UtxoViewpoint) error { func (b *BlockChain) checkBIP0030(node *blockNode, block *btcutil.Block, view *UtxoViewpoint) error {
// Fetch utxo details for all of the transactions in this block. // Fetch utxos for all of the transaction ouputs in this block.
// Typically, there will not be any utxos for any of the transactions. // Typically, there will not be any utxos for any of the outputs.
fetchSet := make(map[chainhash.Hash]struct{}) fetchSet := make(map[wire.OutPoint]struct{})
for _, tx := range block.Transactions() { for _, tx := range block.Transactions() {
fetchSet[*tx.Hash()] = struct{}{} prevOut := wire.OutPoint{Hash: *tx.Hash()}
for txOutIdx := range tx.MsgTx().TxOut {
prevOut.Index = uint32(txOutIdx)
fetchSet[prevOut] = struct{}{}
}
} }
err := view.fetchUtxos(b.db, fetchSet) err := view.fetchUtxos(b.db, fetchSet)
if err != nil { if err != nil {
@ -845,12 +847,12 @@ func (b *BlockChain) checkBIP0030(node *blockNode, block *btcutil.Block, view *U
// Duplicate transactions are only allowed if the previous transaction // Duplicate transactions are only allowed if the previous transaction
// is fully spent. // is fully spent.
for _, tx := range block.Transactions() { for outpoint := range fetchSet {
txEntry := view.LookupEntry(tx.Hash()) utxo := view.LookupEntry(outpoint)
if txEntry != nil && !txEntry.IsFullySpent() { if utxo != nil && !utxo.IsSpent() {
str := fmt.Sprintf("tried to overwrite transaction %v "+ str := fmt.Sprintf("tried to overwrite transaction %v "+
"at block height %d that is not fully spent", "at block height %d that is not fully spent",
tx.Hash(), txEntry.blockHeight) outpoint.Hash, utxo.BlockHeight())
return ruleError(ErrOverwriteTx, str) return ruleError(ErrOverwriteTx, str)
} }
} }
@ -879,10 +881,8 @@ func CheckTransactionInputs(tx *btcutil.Tx, txHeight int32, utxoView *UtxoViewpo
var totalSatoshiIn int64 var totalSatoshiIn int64
for txInIndex, txIn := range tx.MsgTx().TxIn { for txInIndex, txIn := range tx.MsgTx().TxIn {
// Ensure the referenced input transaction is available. // Ensure the referenced input transaction is available.
originTxHash := &txIn.PreviousOutPoint.Hash utxo := utxoView.LookupEntry(txIn.PreviousOutPoint)
originTxIndex := txIn.PreviousOutPoint.Index if utxo == nil || utxo.IsSpent() {
utxoEntry := utxoView.LookupEntry(originTxHash)
if utxoEntry == nil || utxoEntry.IsOutputSpent(originTxIndex) {
str := fmt.Sprintf("output %v referenced from "+ str := fmt.Sprintf("output %v referenced from "+
"transaction %s:%d either does not exist or "+ "transaction %s:%d either does not exist or "+
"has already been spent", txIn.PreviousOutPoint, "has already been spent", txIn.PreviousOutPoint,
@ -892,15 +892,15 @@ func CheckTransactionInputs(tx *btcutil.Tx, txHeight int32, utxoView *UtxoViewpo
// Ensure the transaction is not spending coins which have not // Ensure the transaction is not spending coins which have not
// yet reached the required coinbase maturity. // yet reached the required coinbase maturity.
if utxoEntry.IsCoinBase() { if utxo.IsCoinBase() {
originHeight := utxoEntry.BlockHeight() originHeight := utxo.BlockHeight()
blocksSincePrev := txHeight - originHeight blocksSincePrev := txHeight - originHeight
coinbaseMaturity := int32(chainParams.CoinbaseMaturity) coinbaseMaturity := int32(chainParams.CoinbaseMaturity)
if blocksSincePrev < coinbaseMaturity { if blocksSincePrev < coinbaseMaturity {
str := fmt.Sprintf("tried to spend coinbase "+ str := fmt.Sprintf("tried to spend coinbase "+
"transaction %v from height %v at "+ "transaction output %v from height %v "+
"height %v before required maturity "+ "at height %v before required maturity "+
"of %v blocks", originTxHash, "of %v blocks", txIn.PreviousOutPoint,
originHeight, txHeight, originHeight, txHeight,
coinbaseMaturity) coinbaseMaturity)
return 0, ruleError(ErrImmatureSpend, str) return 0, ruleError(ErrImmatureSpend, str)
@ -913,7 +913,7 @@ func CheckTransactionInputs(tx *btcutil.Tx, txHeight int32, utxoView *UtxoViewpo
// a transaction are in a unit value known as a satoshi. One // a transaction are in a unit value known as a satoshi. One
// bitcoin is a quantity of satoshi as defined by the // bitcoin is a quantity of satoshi as defined by the
// SatoshiPerBitcoin constant. // SatoshiPerBitcoin constant.
originTxSatoshi := utxoEntry.AmountByIndex(originTxIndex) originTxSatoshi := utxo.Amount()
if originTxSatoshi < 0 { if originTxSatoshi < 0 {
str := fmt.Sprintf("transaction output has negative "+ str := fmt.Sprintf("transaction output has negative "+
"value of %v", btcutil.Amount(originTxSatoshi)) "value of %v", btcutil.Amount(originTxSatoshi))

View file

@ -1,4 +1,4 @@
// Copyright (c) 2013-2016 The btcsuite developers // Copyright (c) 2013-2017 The btcsuite developers
// Use of this source code is governed by an ISC // Use of this source code is governed by an ISC
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
@ -8,6 +8,7 @@ import (
"fmt" "fmt"
"github.com/btcsuite/btcd/txscript" "github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil" "github.com/btcsuite/btcutil"
) )
@ -34,6 +35,14 @@ const (
// receives compared to "base" data. A scale factor of 4, denotes that // receives compared to "base" data. A scale factor of 4, denotes that
// witness data is 1/4 as cheap as regular non-witness data. // witness data is 1/4 as cheap as regular non-witness data.
WitnessScaleFactor = 4 WitnessScaleFactor = 4
// MinTxOutputWeight is the minimum possible weight for a transaction
// output.
MinTxOutputWeight = WitnessScaleFactor * wire.MinTxOutPayload
// MaxOutputsPerBlock is the maximum number of transaction outputs there
// can be in a block of max weight size.
MaxOutputsPerBlock = MaxBlockWeight / MinTxOutputWeight
) )
// GetBlockWeight computes the value of the weight metric for a given block. // GetBlockWeight computes the value of the weight metric for a given block.
@ -71,9 +80,7 @@ func GetTransactionWeight(tx *btcutil.Tx) int64 {
// legacy sig op count scaled according to the WitnessScaleFactor, the sig op // legacy sig op count scaled according to the WitnessScaleFactor, the sig op
// count for all p2sh inputs scaled by the WitnessScaleFactor, and finally the // count for all p2sh inputs scaled by the WitnessScaleFactor, and finally the
// unscaled sig op count for any inputs spending witness programs. // unscaled sig op count for any inputs spending witness programs.
func GetSigOpCost(tx *btcutil.Tx, isCoinBaseTx bool, utxoView *UtxoViewpoint, func GetSigOpCost(tx *btcutil.Tx, isCoinBaseTx bool, utxoView *UtxoViewpoint, bip16, segWit bool) (int, error) {
bip16, segWit bool) (int, error) {
numSigOps := CountSigOps(tx) * WitnessScaleFactor numSigOps := CountSigOps(tx) * WitnessScaleFactor
if bip16 { if bip16 {
numP2SHSigOps, err := CountP2SHSigOps(tx, isCoinBaseTx, utxoView) numP2SHSigOps, err := CountP2SHSigOps(tx, isCoinBaseTx, utxoView)
@ -86,11 +93,10 @@ func GetSigOpCost(tx *btcutil.Tx, isCoinBaseTx bool, utxoView *UtxoViewpoint,
if segWit && !isCoinBaseTx { if segWit && !isCoinBaseTx {
msgTx := tx.MsgTx() msgTx := tx.MsgTx()
for txInIndex, txIn := range msgTx.TxIn { for txInIndex, txIn := range msgTx.TxIn {
// Ensure the referenced input transaction is available. // Ensure the referenced output is available and hasn't
originTxHash := &txIn.PreviousOutPoint.Hash // already been spent.
originTxIndex := txIn.PreviousOutPoint.Index utxo := utxoView.LookupEntry(txIn.PreviousOutPoint)
txEntry := utxoView.LookupEntry(originTxHash) if utxo == nil || utxo.IsSpent() {
if txEntry == nil || txEntry.IsOutputSpent(originTxIndex) {
str := fmt.Sprintf("output %v referenced from "+ str := fmt.Sprintf("output %v referenced from "+
"transaction %s:%d either does not "+ "transaction %s:%d either does not "+
"exist or has already been spent", "exist or has already been spent",
@ -101,7 +107,7 @@ func GetSigOpCost(tx *btcutil.Tx, isCoinBaseTx bool, utxoView *UtxoViewpoint,
witness := txIn.Witness witness := txIn.Witness
sigScript := txIn.SignatureScript sigScript := txIn.SignatureScript
pkScript := txEntry.PkScriptByIndex(originTxIndex) pkScript := utxo.PkScript()
numSigOps += txscript.GetWitnessSigOpCount(sigScript, pkScript, witness) numSigOps += txscript.GetWitnessSigOpCount(sigScript, pkScript, witness)
} }

View file

@ -289,7 +289,6 @@ type GetTxOutResult struct {
Confirmations int64 `json:"confirmations"` Confirmations int64 `json:"confirmations"`
Value float64 `json:"value"` Value float64 `json:"value"`
ScriptPubKey ScriptPubKeyResult `json:"scriptPubKey"` ScriptPubKey ScriptPubKeyResult `json:"scriptPubKey"`
Version int32 `json:"version"`
Coinbase bool `json:"coinbase"` Coinbase bool `json:"coinbase"`
} }

View file

@ -595,15 +595,21 @@ func (mp *TxPool) fetchInputUtxos(tx *btcutil.Tx) (*blockchain.UtxoViewpoint, er
} }
// Attempt to populate any missing inputs from the transaction pool. // Attempt to populate any missing inputs from the transaction pool.
for originHash, entry := range utxoView.Entries() { for _, txIn := range tx.MsgTx().TxIn {
if entry != nil && !entry.IsFullySpent() { prevOut := &txIn.PreviousOutPoint
entry := utxoView.LookupEntry(*prevOut)
if entry != nil && !entry.IsSpent() {
continue continue
} }
if poolTxDesc, exists := mp.pool[originHash]; exists { if poolTxDesc, exists := mp.pool[prevOut.Hash]; exists {
utxoView.AddTxOuts(poolTxDesc.Tx, mining.UnminedHeight) // AddTxOut ignores out of range index values, so it is
// safe to call without bounds checking here.
utxoView.AddTxOut(poolTxDesc.Tx, prevOut.Index,
mining.UnminedHeight)
} }
} }
return utxoView, nil return utxoView, nil
} }
@ -733,25 +739,29 @@ func (mp *TxPool) maybeAcceptTransaction(tx *btcutil.Tx, isNew, rateLimit, rejec
// Don't allow the transaction if it exists in the main chain and is not // Don't allow the transaction if it exists in the main chain and is not
// not already fully spent. // not already fully spent.
txEntry := utxoView.LookupEntry(txHash) prevOut := wire.OutPoint{Hash: *txHash}
if txEntry != nil && !txEntry.IsFullySpent() { for txOutIdx := range tx.MsgTx().TxOut {
return nil, nil, txRuleError(wire.RejectDuplicate, prevOut.Index = uint32(txOutIdx)
"transaction already exists") entry := utxoView.LookupEntry(prevOut)
if entry != nil && !entry.IsSpent() {
return nil, nil, txRuleError(wire.RejectDuplicate,
"transaction already exists")
}
utxoView.RemoveEntry(prevOut)
} }
delete(utxoView.Entries(), *txHash)
// Transaction is an orphan if any of the referenced input transactions // Transaction is an orphan if any of the referenced transaction outputs
// don't exist. Adding orphans to the orphan pool is not handled by // don't exist or are already spent. Adding orphans to the orphan pool
// this function, and the caller should use maybeAddOrphan if this // is not handled by this function, and the caller should use
// behavior is desired. // maybeAddOrphan if this behavior is desired.
var missingParents []*chainhash.Hash var missingParents []*chainhash.Hash
for originHash, entry := range utxoView.Entries() { for outpoint, entry := range utxoView.Entries() {
if entry == nil || entry.IsFullySpent() { if entry == nil || entry.IsSpent() {
// Must make a copy of the hash here since the iterator // Must make a copy of the hash here since the iterator
// is replaced and taking its address directly would // is replaced and taking its address directly would
// result in all of the entries pointing to the same // result in all of the entries pointing to the same
// memory location and thus all be the final hash. // memory location and thus all be the final hash.
hashCopy := originHash hashCopy := outpoint.Hash
missingParents = append(missingParents, &hashCopy) missingParents = append(missingParents, &hashCopy)
} }
} }

View file

@ -31,10 +31,10 @@ type fakeChain struct {
medianTimePast time.Time medianTimePast time.Time
} }
// FetchUtxoView loads utxo details about the input transactions referenced by // FetchUtxoView loads utxo details about the inputs referenced by the passed
// the passed transaction from the point of view of the fake chain. // transaction from the point of view of the fake chain. It also attempts to
// It also attempts to fetch the utxo details for the transaction itself so the // fetch the utxos for the outputs of the transaction itself so the returned
// returned view can be examined for duplicate unspent transaction outputs. // view can be examined for duplicate transactions.
// //
// This function is safe for concurrent access however the returned view is NOT. // This function is safe for concurrent access however the returned view is NOT.
func (s *fakeChain) FetchUtxoView(tx *btcutil.Tx) (*blockchain.UtxoViewpoint, error) { func (s *fakeChain) FetchUtxoView(tx *btcutil.Tx) (*blockchain.UtxoViewpoint, error) {
@ -46,14 +46,17 @@ func (s *fakeChain) FetchUtxoView(tx *btcutil.Tx) (*blockchain.UtxoViewpoint, er
// Add an entry for the tx itself to the new view. // Add an entry for the tx itself to the new view.
viewpoint := blockchain.NewUtxoViewpoint() viewpoint := blockchain.NewUtxoViewpoint()
entry := s.utxos.LookupEntry(tx.Hash()) prevOut := wire.OutPoint{Hash: *tx.Hash()}
viewpoint.Entries()[*tx.Hash()] = entry.Clone() for txOutIdx := range tx.MsgTx().TxOut {
prevOut.Index = uint32(txOutIdx)
entry := s.utxos.LookupEntry(prevOut)
viewpoint.Entries()[prevOut] = entry.Clone()
}
// Add entries for all of the inputs to the tx to the new view. // Add entries for all of the inputs to the tx to the new view.
for _, txIn := range tx.MsgTx().TxIn { for _, txIn := range tx.MsgTx().TxIn {
originHash := &txIn.PreviousOutPoint.Hash entry := s.utxos.LookupEntry(txIn.PreviousOutPoint)
entry := s.utxos.LookupEntry(originHash) viewpoint.Entries()[txIn.PreviousOutPoint] = entry.Clone()
viewpoint.Entries()[*originHash] = entry.Clone()
} }
return viewpoint, nil return viewpoint, nil

View file

@ -98,9 +98,8 @@ func checkInputsStandard(tx *btcutil.Tx, utxoView *blockchain.UtxoViewpoint) err
// It is safe to elide existence and index checks here since // It is safe to elide existence and index checks here since
// they have already been checked prior to calling this // they have already been checked prior to calling this
// function. // function.
prevOut := txIn.PreviousOutPoint entry := utxoView.LookupEntry(txIn.PreviousOutPoint)
entry := utxoView.LookupEntry(&prevOut.Hash) originPkScript := entry.PkScript()
originPkScript := entry.PkScriptByIndex(prevOut.Index)
switch txscript.GetScriptClass(originPkScript) { switch txscript.GetScriptClass(originPkScript) {
case txscript.ScriptHashTy: case txscript.ScriptHashTy:
numSigOps := txscript.GetPreciseSigOpCount( numSigOps := txscript.GetPreciseSigOpCount(

View file

@ -222,14 +222,14 @@ type BlockTemplate struct {
// mergeUtxoView adds all of the entries in viewB to viewA. The result is that // mergeUtxoView adds all of the entries in viewB to viewA. The result is that
// viewA will contain all of its original entries plus all of the entries // viewA will contain all of its original entries plus all of the entries
// in viewB. It will replace any entries in viewB which also exist in viewA // in viewB. It will replace any entries in viewB which also exist in viewA
// if the entry in viewA is fully spent. // if the entry in viewA is spent.
func mergeUtxoView(viewA *blockchain.UtxoViewpoint, viewB *blockchain.UtxoViewpoint) { func mergeUtxoView(viewA *blockchain.UtxoViewpoint, viewB *blockchain.UtxoViewpoint) {
viewAEntries := viewA.Entries() viewAEntries := viewA.Entries()
for hash, entryB := range viewB.Entries() { for outpoint, entryB := range viewB.Entries() {
if entryA, exists := viewAEntries[hash]; !exists || if entryA, exists := viewAEntries[outpoint]; !exists ||
entryA == nil || entryA.IsFullySpent() { entryA == nil || entryA.IsSpent() {
viewAEntries[hash] = entryB viewAEntries[outpoint] = entryB
} }
} }
} }
@ -291,11 +291,9 @@ func createCoinbaseTx(params *chaincfg.Params, coinbaseScript []byte, nextBlockH
// which are not provably unspendable as available unspent transaction outputs. // which are not provably unspendable as available unspent transaction outputs.
func spendTransaction(utxoView *blockchain.UtxoViewpoint, tx *btcutil.Tx, height int32) error { func spendTransaction(utxoView *blockchain.UtxoViewpoint, tx *btcutil.Tx, height int32) error {
for _, txIn := range tx.MsgTx().TxIn { for _, txIn := range tx.MsgTx().TxIn {
originHash := &txIn.PreviousOutPoint.Hash entry := utxoView.LookupEntry(txIn.PreviousOutPoint)
originIndex := txIn.PreviousOutPoint.Index
entry := utxoView.LookupEntry(originHash)
if entry != nil { if entry != nil {
entry.SpendOutput(originIndex) entry.Spend()
} }
} }
@ -540,9 +538,8 @@ mempoolLoop:
prioItem := &txPrioItem{tx: tx} prioItem := &txPrioItem{tx: tx}
for _, txIn := range tx.MsgTx().TxIn { for _, txIn := range tx.MsgTx().TxIn {
originHash := &txIn.PreviousOutPoint.Hash originHash := &txIn.PreviousOutPoint.Hash
originIndex := txIn.PreviousOutPoint.Index entry := utxos.LookupEntry(txIn.PreviousOutPoint)
utxoEntry := utxos.LookupEntry(originHash) if entry == nil || entry.IsSpent() {
if utxoEntry == nil || utxoEntry.IsOutputSpent(originIndex) {
if !g.txSource.HaveTransaction(originHash) { if !g.txSource.HaveTransaction(originHash) {
log.Tracef("Skipping tx %s because it "+ log.Tracef("Skipping tx %s because it "+
"references unspent output %s "+ "references unspent output %s "+

View file

@ -67,16 +67,14 @@ func calcInputValueAge(tx *wire.MsgTx, utxoView *blockchain.UtxoViewpoint, nextB
for _, txIn := range tx.TxIn { for _, txIn := range tx.TxIn {
// Don't attempt to accumulate the total input age if the // Don't attempt to accumulate the total input age if the
// referenced transaction output doesn't exist. // referenced transaction output doesn't exist.
originHash := &txIn.PreviousOutPoint.Hash entry := utxoView.LookupEntry(txIn.PreviousOutPoint)
originIndex := txIn.PreviousOutPoint.Index if entry != nil && !entry.IsSpent() {
txEntry := utxoView.LookupEntry(originHash)
if txEntry != nil && !txEntry.IsOutputSpent(originIndex) {
// Inputs with dependencies currently in the mempool // Inputs with dependencies currently in the mempool
// have their block height set to a special constant. // have their block height set to a special constant.
// Their input age should computed as zero since their // Their input age should computed as zero since their
// parent hasn't made it into a block yet. // parent hasn't made it into a block yet.
var inputAge int32 var inputAge int32
originHeight := txEntry.BlockHeight() originHeight := entry.BlockHeight()
if originHeight == UnminedHeight { if originHeight == UnminedHeight {
inputAge = 0 inputAge = 0
} else { } else {
@ -84,7 +82,7 @@ func calcInputValueAge(tx *wire.MsgTx, utxoView *blockchain.UtxoViewpoint, nextB
} }
// Sum the input value times age. // Sum the input value times age.
inputValue := txEntry.AmountByIndex(originIndex) inputValue := entry.Amount()
totalInputAge += float64(inputValue * int64(inputAge)) totalInputAge += float64(inputValue * int64(inputAge))
} }
} }

View file

@ -898,12 +898,26 @@ func (sm *SyncManager) haveInventory(invVect *wire.InvVect) (bool, error) {
} }
// Check if the transaction exists from the point of view of the // Check if the transaction exists from the point of view of the
// end of the main chain. // end of the main chain. Note that this is only a best effort
entry, err := sm.chain.FetchUtxoEntry(&invVect.Hash) // since it is expensive to check existence of every output and
if err != nil { // the only purpose of this check is to avoid downloading
return false, err // already known transactions. Only the first two outputs are
// checked because the vast majority of transactions consist of
// two outputs where one is some form of "pay-to-somebody-else"
// and the other is a change output.
prevOut := wire.OutPoint{Hash: invVect.Hash}
for i := uint32(0); i < 2; i++ {
prevOut.Index = i
entry, err := sm.chain.FetchUtxoEntry(prevOut)
if err != nil {
return false, err
}
if entry != nil && !entry.IsSpent() {
return true, nil
}
} }
return entry != nil && !entry.IsFullySpent(), nil
return false, nil
} }
// The requested inventory is is an unsupported type, so just claim // The requested inventory is is an unsupported type, so just claim

View file

@ -2667,7 +2667,6 @@ func handleGetTxOut(s *rpcServer, cmd interface{}, closeChan <-chan struct{}) (i
// from there, otherwise attempt to fetch from the block database. // from there, otherwise attempt to fetch from the block database.
var bestBlockHash string var bestBlockHash string
var confirmations int32 var confirmations int32
var txVersion int32
var value int64 var value int64
var pkScript []byte var pkScript []byte
var isCoinbase bool var isCoinbase bool
@ -2702,12 +2701,12 @@ func handleGetTxOut(s *rpcServer, cmd interface{}, closeChan <-chan struct{}) (i
best := s.cfg.Chain.BestSnapshot() best := s.cfg.Chain.BestSnapshot()
bestBlockHash = best.Hash.String() bestBlockHash = best.Hash.String()
confirmations = 0 confirmations = 0
txVersion = mtx.Version
value = txOut.Value value = txOut.Value
pkScript = txOut.PkScript pkScript = txOut.PkScript
isCoinbase = blockchain.IsCoinBaseTx(mtx) isCoinbase = blockchain.IsCoinBaseTx(mtx)
} else { } else {
entry, err := s.cfg.Chain.FetchUtxoEntry(txHash) out := wire.OutPoint{Hash: *txHash, Index: c.Vout}
entry, err := s.cfg.Chain.FetchUtxoEntry(out)
if err != nil { if err != nil {
return nil, rpcNoTxInfoError(txHash) return nil, rpcNoTxInfoError(txHash)
} }
@ -2717,16 +2716,15 @@ func handleGetTxOut(s *rpcServer, cmd interface{}, closeChan <-chan struct{}) (i
// transaction already in the main chain. Mined transactions // transaction already in the main chain. Mined transactions
// that are spent by a mempool transaction are not affected by // that are spent by a mempool transaction are not affected by
// this. // this.
if entry == nil || entry.IsOutputSpent(c.Vout) { if entry == nil || entry.IsSpent() {
return nil, nil return nil, nil
} }
best := s.cfg.Chain.BestSnapshot() best := s.cfg.Chain.BestSnapshot()
bestBlockHash = best.Hash.String() bestBlockHash = best.Hash.String()
confirmations = 1 + best.Height - entry.BlockHeight() confirmations = 1 + best.Height - entry.BlockHeight()
txVersion = entry.Version() value = entry.Amount()
value = entry.AmountByIndex(c.Vout) pkScript = entry.PkScript()
pkScript = entry.PkScriptByIndex(c.Vout)
isCoinbase = entry.IsCoinBase() isCoinbase = entry.IsCoinBase()
} }
@ -2749,7 +2747,6 @@ func handleGetTxOut(s *rpcServer, cmd interface{}, closeChan <-chan struct{}) (i
BestBlock: bestBlockHash, BestBlock: bestBlockHash,
Confirmations: int64(confirmations), Confirmations: int64(confirmations),
Value: btcutil.Amount(value).ToBTC(), Value: btcutil.Amount(value).ToBTC(),
Version: txVersion,
ScriptPubKey: btcjson.ScriptPubKeyResult{ ScriptPubKey: btcjson.ScriptPubKeyResult{
Asm: disbuf, Asm: disbuf,
Hex: hex.EncodeToString(pkScript), Hex: hex.EncodeToString(pkScript),

View file

@ -62,13 +62,13 @@ const (
// a transaction which fits into a message could possibly have. // a transaction which fits into a message could possibly have.
maxTxInPerMessage = (MaxMessagePayload / minTxInPayload) + 1 maxTxInPerMessage = (MaxMessagePayload / minTxInPayload) + 1
// minTxOutPayload is the minimum payload size for a transaction output. // MinTxOutPayload is the minimum payload size for a transaction output.
// Value 8 bytes + Varint for PkScript length 1 byte. // Value 8 bytes + Varint for PkScript length 1 byte.
minTxOutPayload = 9 MinTxOutPayload = 9
// maxTxOutPerMessage is the maximum number of transactions outputs that // maxTxOutPerMessage is the maximum number of transactions outputs that
// a transaction which fits into a message could possibly have. // a transaction which fits into a message could possibly have.
maxTxOutPerMessage = (MaxMessagePayload / minTxOutPayload) + 1 maxTxOutPerMessage = (MaxMessagePayload / MinTxOutPayload) + 1
// minTxPayload is the minimum payload size for a transaction. Note // minTxPayload is the minimum payload size for a transaction. Note
// that any realistically usable transaction must have at least one // that any realistically usable transaction must have at least one