2017-01-18 02:20:28 +01:00
|
|
|
// Copyright (c) 2017 The btcsuite developers
|
|
|
|
// Copyright (c) 2017 The Lightning Network Developers
|
|
|
|
// Use of this source code is governed by an ISC
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
|
|
|
package builder
|
|
|
|
|
|
|
|
import (
|
|
|
|
"crypto/rand"
|
2018-05-29 05:22:24 +02:00
|
|
|
"fmt"
|
|
|
|
"math"
|
2017-01-18 02:20:28 +01:00
|
|
|
|
|
|
|
"github.com/btcsuite/btcd/chaincfg/chainhash"
|
|
|
|
"github.com/btcsuite/btcd/txscript"
|
|
|
|
"github.com/btcsuite/btcd/wire"
|
|
|
|
"github.com/btcsuite/btcutil/gcs"
|
|
|
|
)
|
|
|
|
|
2018-05-29 05:22:24 +02:00
|
|
|
const (
|
|
|
|
// DefaultP is the default collision probability (2^-19)
|
|
|
|
DefaultP = 19
|
|
|
|
|
|
|
|
// DefaultM is the default value used for the hash range.
|
|
|
|
DefaultM uint64 = 784931
|
|
|
|
)
|
2017-01-18 02:20:28 +01:00
|
|
|
|
|
|
|
// GCSBuilder is a utility class that makes building GCS filters convenient.
|
|
|
|
type GCSBuilder struct {
|
2018-05-29 05:19:14 +02:00
|
|
|
p uint8
|
|
|
|
|
2018-05-29 05:22:24 +02:00
|
|
|
m uint64
|
|
|
|
|
2018-01-23 07:05:04 +01:00
|
|
|
key [gcs.KeySize]byte
|
|
|
|
|
|
|
|
// data is a set of entries represented as strings. This is done to
|
|
|
|
// deduplicate items as they are added.
|
|
|
|
data map[string]struct{}
|
2017-01-18 02:20:28 +01:00
|
|
|
err error
|
|
|
|
}
|
|
|
|
|
|
|
|
// RandomKey is a utility function that returns a cryptographically random
|
|
|
|
// [gcs.KeySize]byte usable as a key for a GCS filter.
|
|
|
|
func RandomKey() ([gcs.KeySize]byte, error) {
|
|
|
|
var key [gcs.KeySize]byte
|
|
|
|
|
|
|
|
// Read a byte slice from rand.Reader.
|
|
|
|
randKey := make([]byte, gcs.KeySize)
|
|
|
|
_, err := rand.Read(randKey)
|
|
|
|
|
|
|
|
// This shouldn't happen unless the user is on a system that doesn't
|
|
|
|
// have a system CSPRNG. OK to panic in this case.
|
|
|
|
if err != nil {
|
|
|
|
return key, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Copy the byte slice to a [gcs.KeySize]byte array and return it.
|
|
|
|
copy(key[:], randKey[:])
|
|
|
|
return key, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// DeriveKey is a utility function that derives a key from a chainhash.Hash by
|
|
|
|
// truncating the bytes of the hash to the appopriate key size.
|
|
|
|
func DeriveKey(keyHash *chainhash.Hash) [gcs.KeySize]byte {
|
|
|
|
var key [gcs.KeySize]byte
|
|
|
|
copy(key[:], keyHash.CloneBytes()[:])
|
|
|
|
return key
|
|
|
|
}
|
|
|
|
|
|
|
|
// Key retrieves the key with which the builder will build a filter. This is
|
|
|
|
// useful if the builder is created with a random initial key.
|
|
|
|
func (b *GCSBuilder) Key() ([gcs.KeySize]byte, error) {
|
|
|
|
// Do nothing if the builder's errored out.
|
|
|
|
if b.err != nil {
|
|
|
|
return [gcs.KeySize]byte{}, b.err
|
|
|
|
}
|
|
|
|
|
|
|
|
return b.key, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// SetKey sets the key with which the builder will build a filter to the passed
|
|
|
|
// [gcs.KeySize]byte.
|
|
|
|
func (b *GCSBuilder) SetKey(key [gcs.KeySize]byte) *GCSBuilder {
|
|
|
|
// Do nothing if the builder's already errored out.
|
|
|
|
if b.err != nil {
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
|
|
|
copy(b.key[:], key[:])
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
|
|
|
// SetKeyFromHash sets the key with which the builder will build a filter to a
|
|
|
|
// key derived from the passed chainhash.Hash using DeriveKey().
|
|
|
|
func (b *GCSBuilder) SetKeyFromHash(keyHash *chainhash.Hash) *GCSBuilder {
|
|
|
|
// Do nothing if the builder's already errored out.
|
|
|
|
if b.err != nil {
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
|
|
|
return b.SetKey(DeriveKey(keyHash))
|
|
|
|
}
|
|
|
|
|
|
|
|
// SetP sets the filter's probability after calling Builder().
|
|
|
|
func (b *GCSBuilder) SetP(p uint8) *GCSBuilder {
|
|
|
|
// Do nothing if the builder's already errored out.
|
|
|
|
if b.err != nil {
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
|
|
|
// Basic sanity check.
|
|
|
|
if p > 32 {
|
|
|
|
b.err = gcs.ErrPTooBig
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
|
|
|
b.p = p
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
2018-05-29 05:22:24 +02:00
|
|
|
// SetM sets the filter's modulous value after calling Builder().
|
|
|
|
func (b *GCSBuilder) SetM(m uint64) *GCSBuilder {
|
|
|
|
// Do nothing if the builder's already errored out.
|
|
|
|
if b.err != nil {
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
|
|
|
// Basic sanity check.
|
|
|
|
if m > uint64(math.MaxUint32) {
|
|
|
|
b.err = gcs.ErrPTooBig
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
|
|
|
b.m = m
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
2017-01-18 02:20:28 +01:00
|
|
|
// Preallocate sets the estimated filter size after calling Builder() to reduce
|
|
|
|
// the probability of memory reallocations. If the builder has already had data
|
2017-04-28 05:34:50 +02:00
|
|
|
// added to it, Preallocate has no effect.
|
2017-01-18 02:20:28 +01:00
|
|
|
func (b *GCSBuilder) Preallocate(n uint32) *GCSBuilder {
|
|
|
|
// Do nothing if the builder's already errored out.
|
|
|
|
if b.err != nil {
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
2018-01-23 07:05:04 +01:00
|
|
|
if b.data == nil {
|
|
|
|
b.data = make(map[string]struct{}, n)
|
2017-01-18 02:20:28 +01:00
|
|
|
}
|
2017-04-28 05:34:50 +02:00
|
|
|
|
2017-01-18 02:20:28 +01:00
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
|
|
|
// AddEntry adds a []byte to the list of entries to be included in the GCS
|
|
|
|
// filter when it's built.
|
|
|
|
func (b *GCSBuilder) AddEntry(data []byte) *GCSBuilder {
|
|
|
|
// Do nothing if the builder's already errored out.
|
|
|
|
if b.err != nil {
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
2018-01-23 07:05:04 +01:00
|
|
|
b.data[string(data)] = struct{}{}
|
2017-01-18 02:20:28 +01:00
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
|
|
|
// AddEntries adds all the []byte entries in a [][]byte to the list of entries
|
|
|
|
// to be included in the GCS filter when it's built.
|
|
|
|
func (b *GCSBuilder) AddEntries(data [][]byte) *GCSBuilder {
|
|
|
|
// Do nothing if the builder's already errored out.
|
|
|
|
if b.err != nil {
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, entry := range data {
|
|
|
|
b.AddEntry(entry)
|
|
|
|
}
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
|
|
|
// AddHash adds a chainhash.Hash to the list of entries to be included in the
|
|
|
|
// GCS filter when it's built.
|
|
|
|
func (b *GCSBuilder) AddHash(hash *chainhash.Hash) *GCSBuilder {
|
|
|
|
// Do nothing if the builder's already errored out.
|
|
|
|
if b.err != nil {
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
|
|
|
return b.AddEntry(hash.CloneBytes())
|
|
|
|
}
|
|
|
|
|
2017-10-31 06:37:02 +01:00
|
|
|
// AddWitness adds each item of the passed filter stack to the filter, and then
|
|
|
|
// adds each item as a script.
|
2017-04-28 05:37:56 +02:00
|
|
|
func (b *GCSBuilder) AddWitness(witness wire.TxWitness) *GCSBuilder {
|
|
|
|
// Do nothing if the builder's already errored out.
|
|
|
|
if b.err != nil {
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
2017-11-03 20:25:45 +01:00
|
|
|
return b.AddEntries(witness)
|
2017-04-28 05:37:56 +02:00
|
|
|
}
|
|
|
|
|
2017-01-18 02:20:28 +01:00
|
|
|
// Build returns a function which builds a GCS filter with the given parameters
|
|
|
|
// and data.
|
|
|
|
func (b *GCSBuilder) Build() (*gcs.Filter, error) {
|
|
|
|
// Do nothing if the builder's already errored out.
|
|
|
|
if b.err != nil {
|
|
|
|
return nil, b.err
|
|
|
|
}
|
|
|
|
|
2018-05-29 05:22:24 +02:00
|
|
|
// We'll ensure that all the parmaters we need to actually build the
|
|
|
|
// filter properly are set.
|
|
|
|
if b.p == 0 {
|
|
|
|
return nil, fmt.Errorf("p value is not set, cannot build")
|
|
|
|
}
|
|
|
|
if b.m == 0 {
|
|
|
|
return nil, fmt.Errorf("m value is not set, cannot build")
|
|
|
|
}
|
|
|
|
|
2018-01-23 07:05:04 +01:00
|
|
|
dataSlice := make([][]byte, 0, len(b.data))
|
|
|
|
for item := range b.data {
|
|
|
|
dataSlice = append(dataSlice, []byte(item))
|
|
|
|
}
|
|
|
|
|
2018-05-29 05:22:24 +02:00
|
|
|
return gcs.BuildGCSFilter(b.p, b.m, b.key, dataSlice)
|
2017-01-18 02:20:28 +01:00
|
|
|
}
|
|
|
|
|
2018-05-29 05:22:24 +02:00
|
|
|
// WithKeyPNM creates a GCSBuilder with specified key and the passed
|
|
|
|
// probability, modulus and estimated filter size.
|
|
|
|
func WithKeyPNM(key [gcs.KeySize]byte, p uint8, n uint32, m uint64) *GCSBuilder {
|
2017-01-18 02:20:28 +01:00
|
|
|
b := GCSBuilder{}
|
2018-05-29 05:22:24 +02:00
|
|
|
return b.SetKey(key).SetP(p).SetM(m).Preallocate(n)
|
2017-01-18 02:20:28 +01:00
|
|
|
}
|
|
|
|
|
2018-05-29 05:22:24 +02:00
|
|
|
// WithKeyPM creates a GCSBuilder with specified key and the passed
|
|
|
|
// probability. Estimated filter size is set to zero, which means more
|
|
|
|
// reallocations are done when building the filter.
|
|
|
|
func WithKeyPM(key [gcs.KeySize]byte, p uint8, m uint64) *GCSBuilder {
|
|
|
|
return WithKeyPNM(key, p, 0, m)
|
2017-01-18 02:20:28 +01:00
|
|
|
}
|
|
|
|
|
2018-05-29 05:19:14 +02:00
|
|
|
// WithKey creates a GCSBuilder with specified key. Probability is set to 19
|
|
|
|
// (2^-19 collision probability). Estimated filter size is set to zero, which
|
2017-01-18 02:20:28 +01:00
|
|
|
// means more reallocations are done when building the filter.
|
|
|
|
func WithKey(key [gcs.KeySize]byte) *GCSBuilder {
|
2018-05-29 05:22:24 +02:00
|
|
|
return WithKeyPNM(key, DefaultP, 0, DefaultM)
|
2017-01-18 02:20:28 +01:00
|
|
|
}
|
|
|
|
|
2018-05-29 05:22:24 +02:00
|
|
|
// WithKeyHashPNM creates a GCSBuilder with key derived from the specified
|
2017-01-18 02:20:28 +01:00
|
|
|
// chainhash.Hash and the passed probability and estimated filter size.
|
2018-05-29 05:22:24 +02:00
|
|
|
func WithKeyHashPNM(keyHash *chainhash.Hash, p uint8, n uint32,
|
|
|
|
m uint64) *GCSBuilder {
|
|
|
|
|
|
|
|
return WithKeyPNM(DeriveKey(keyHash), p, n, m)
|
2017-01-18 02:20:28 +01:00
|
|
|
}
|
|
|
|
|
2018-05-29 05:22:24 +02:00
|
|
|
// WithKeyHashPM creates a GCSBuilder with key derived from the specified
|
2017-01-18 02:20:28 +01:00
|
|
|
// chainhash.Hash and the passed probability. Estimated filter size is set to
|
|
|
|
// zero, which means more reallocations are done when building the filter.
|
2018-05-29 05:22:24 +02:00
|
|
|
func WithKeyHashPM(keyHash *chainhash.Hash, p uint8, m uint64) *GCSBuilder {
|
|
|
|
return WithKeyHashPNM(keyHash, p, 0, m)
|
2017-01-18 02:20:28 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// WithKeyHash creates a GCSBuilder with key derived from the specified
|
|
|
|
// chainhash.Hash. Probability is set to 20 (2^-20 collision probability).
|
|
|
|
// Estimated filter size is set to zero, which means more reallocations are
|
|
|
|
// done when building the filter.
|
|
|
|
func WithKeyHash(keyHash *chainhash.Hash) *GCSBuilder {
|
2018-05-29 05:22:24 +02:00
|
|
|
return WithKeyHashPNM(keyHash, DefaultP, 0, DefaultM)
|
2017-01-18 02:20:28 +01:00
|
|
|
}
|
|
|
|
|
2018-05-29 05:22:24 +02:00
|
|
|
// WithRandomKeyPNM creates a GCSBuilder with a cryptographically random key and
|
2017-04-28 05:34:50 +02:00
|
|
|
// the passed probability and estimated filter size.
|
2018-05-29 05:22:24 +02:00
|
|
|
func WithRandomKeyPNM(p uint8, n uint32, m uint64) *GCSBuilder {
|
2017-01-18 02:20:28 +01:00
|
|
|
key, err := RandomKey()
|
|
|
|
if err != nil {
|
|
|
|
b := GCSBuilder{err: err}
|
|
|
|
return &b
|
|
|
|
}
|
2018-05-29 05:22:24 +02:00
|
|
|
return WithKeyPNM(key, p, n, m)
|
2017-01-18 02:20:28 +01:00
|
|
|
}
|
|
|
|
|
2018-05-29 05:22:24 +02:00
|
|
|
// WithRandomKeyPM creates a GCSBuilder with a cryptographically random key and
|
2017-04-28 05:34:50 +02:00
|
|
|
// the passed probability. Estimated filter size is set to zero, which means
|
|
|
|
// more reallocations are done when building the filter.
|
2018-05-29 05:22:24 +02:00
|
|
|
func WithRandomKeyPM(p uint8, m uint64) *GCSBuilder {
|
|
|
|
return WithRandomKeyPNM(p, 0, m)
|
2017-01-18 02:20:28 +01:00
|
|
|
}
|
|
|
|
|
2017-04-28 05:34:50 +02:00
|
|
|
// WithRandomKey creates a GCSBuilder with a cryptographically random key.
|
|
|
|
// Probability is set to 20 (2^-20 collision probability). Estimated filter
|
|
|
|
// size is set to zero, which means more reallocations are done when
|
2017-01-18 02:20:28 +01:00
|
|
|
// building the filter.
|
|
|
|
func WithRandomKey() *GCSBuilder {
|
2018-05-29 05:22:24 +02:00
|
|
|
return WithRandomKeyPNM(DefaultP, 0, DefaultM)
|
2017-01-18 02:20:28 +01:00
|
|
|
}
|
2017-04-27 02:51:45 +02:00
|
|
|
|
2017-04-28 05:34:50 +02:00
|
|
|
// BuildBasicFilter builds a basic GCS filter from a block. A basic GCS filter
|
2018-06-15 02:42:53 +02:00
|
|
|
// will contain all the previous output scripts spent by inputs within a block,
|
|
|
|
// as well as the data pushes within all the outputs created within a block.
|
|
|
|
func BuildBasicFilter(block *wire.MsgBlock, prevOutScripts [][]byte) (*gcs.Filter, error) {
|
2017-04-27 02:51:45 +02:00
|
|
|
blockHash := block.BlockHash()
|
|
|
|
b := WithKeyHash(&blockHash)
|
2017-04-28 05:34:50 +02:00
|
|
|
|
|
|
|
// If the filter had an issue with the specified key, then we force it
|
|
|
|
// to bubble up here by calling the Key() function.
|
2017-04-27 02:51:45 +02:00
|
|
|
_, err := b.Key()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2017-04-28 05:34:50 +02:00
|
|
|
|
|
|
|
// In order to build a basic filter, we'll range over the entire block,
|
2018-06-15 02:42:53 +02:00
|
|
|
// adding each whole script itself.
|
|
|
|
for _, tx := range block.Transactions {
|
2017-04-28 05:34:50 +02:00
|
|
|
// For each output in a transaction, we'll add each of the
|
|
|
|
// individual data pushes within the script.
|
2017-04-27 02:51:45 +02:00
|
|
|
for _, txOut := range tx.TxOut {
|
2018-06-15 05:44:12 +02:00
|
|
|
if len(txOut.PkScript) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2018-06-15 04:44:04 +02:00
|
|
|
// In order to allow the filters to later be committed
|
|
|
|
// to within an OP_RETURN output, we ignore all
|
|
|
|
// OP_RETURNs to avoid a circular dependency.
|
|
|
|
if txOut.PkScript[0] == txscript.OP_RETURN &&
|
|
|
|
txscript.IsPushOnlyScript(txOut.PkScript[1:]) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2018-05-24 05:27:03 +02:00
|
|
|
b.AddEntry(txOut.PkScript)
|
2017-04-27 02:51:45 +02:00
|
|
|
}
|
|
|
|
}
|
2017-04-28 05:34:50 +02:00
|
|
|
|
2018-06-15 02:42:53 +02:00
|
|
|
// In the second pass, we'll also add all the prevOutScripts
|
|
|
|
// individually as elements.
|
|
|
|
for _, prevScript := range prevOutScripts {
|
2018-06-15 05:44:12 +02:00
|
|
|
if len(prevScript) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2018-06-15 02:42:53 +02:00
|
|
|
b.AddEntry(prevScript)
|
|
|
|
}
|
|
|
|
|
2017-04-27 02:51:45 +02:00
|
|
|
return b.Build()
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetFilterHash returns the double-SHA256 of the filter.
|
2018-01-17 23:49:07 +01:00
|
|
|
func GetFilterHash(filter *gcs.Filter) (chainhash.Hash, error) {
|
|
|
|
filterData, err := filter.NBytes()
|
|
|
|
if err != nil {
|
2018-01-17 23:56:28 +01:00
|
|
|
return chainhash.Hash{}, err
|
2018-01-17 23:49:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return chainhash.DoubleHashH(filterData), nil
|
2017-04-27 02:51:45 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// MakeHeaderForFilter makes a filter chain header for a filter, given the
|
|
|
|
// filter and the previous filter chain header.
|
2018-01-17 23:49:07 +01:00
|
|
|
func MakeHeaderForFilter(filter *gcs.Filter, prevHeader chainhash.Hash) (chainhash.Hash, error) {
|
2017-04-27 02:51:45 +02:00
|
|
|
filterTip := make([]byte, 2*chainhash.HashSize)
|
2018-01-17 23:49:07 +01:00
|
|
|
filterHash, err := GetFilterHash(filter)
|
|
|
|
if err != nil {
|
|
|
|
return chainhash.Hash{}, err
|
|
|
|
}
|
2017-04-28 05:34:50 +02:00
|
|
|
|
|
|
|
// In the buffer we created above we'll compute hash || prevHash as an
|
|
|
|
// intermediate value.
|
2017-04-27 02:51:45 +02:00
|
|
|
copy(filterTip, filterHash[:])
|
|
|
|
copy(filterTip[chainhash.HashSize:], prevHeader[:])
|
2017-04-28 05:34:50 +02:00
|
|
|
|
|
|
|
// The final filter hash is the double-sha256 of the hash computed
|
|
|
|
// above.
|
2018-01-17 23:49:07 +01:00
|
|
|
return chainhash.DoubleHashH(filterTip), nil
|
2017-04-27 02:51:45 +02:00
|
|
|
}
|