WIP: next hard fork #5

Draft
BrannonKing wants to merge 178 commits from WIP-HF-2022 into master
373 changed files with 19030 additions and 11052 deletions

View file

@ -9,23 +9,20 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
go: [1.16.8, 1.17.1]
go: [1.16.8, 1.17.5]
steps:
- name: Set up Go
uses: actions/setup-go@v2
with:
go-version: ${{ matrix.go }}
- name: Check out source
uses: actions/checkout@v2
- name: Install Linters
run: "curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(go env GOPATH)/bin v1.26.0"
- name: Build
env:
GO111MODULE: "on"
run: go build ./...
- name: Test
env:
GO111MODULE: "on"
run: |
sh ./goclean.sh

35
.github/workflows/full-sync-part-1.yml vendored Normal file
View file

@ -0,0 +1,35 @@
name: Full Sync From 0
on:
workflow_dispatch:
inputs:
note:
description: 'Note'
required: false
default: ''
jobs:
build:
name: Go CI
runs-on: self-hosted
strategy:
matrix:
go: [1.17.5]
steps:
- run: |
echo "Note ${{ github.event.inputs.note }}!"
- name: Setup Go
uses: actions/setup-go@v2
with:
go-version: ${{ matrix.go }}
- name: Checkout source
uses: actions/checkout@v2
- name: Build lbcd
run: go build .
- name: Create datadir
run: echo "TEMP_DATA_DIR=$(mktemp -d)" >> $GITHUB_ENV
- name: Run lbcd
run: ./lbcd --datadir=${{env.TEMP_DATA_DIR}}/data --logdir=${{env.TEMP_DATA_DIR}}/logs --connect=127.0.0.1 --norpc
- name: Remove datadir
if: always()
run: rm -rf ${{env.TEMP_DATA_DIR}}

37
.github/workflows/full-sync-part-2.yml vendored Normal file
View file

@ -0,0 +1,37 @@
name: Full Sync From 814k
on:
workflow_dispatch:
inputs:
note:
description: 'Note'
required: false
default: ''
jobs:
build:
name: Go CI
runs-on: self-hosted
strategy:
matrix:
go: [1.17.5]
steps:
- run: |
echo "Note ${{ github.event.inputs.note }}!"
- name: Setup Go
uses: actions/setup-go@v2
with:
go-version: ${{ matrix.go }}
- name: Checkout source
uses: actions/checkout@v2
- name: Build lbcd
run: go build .
- name: Create datadir
run: echo "TEMP_DATA_DIR=$(mktemp -d)" >> $GITHUB_ENV
- name: Copy initial data
run: cp -r /home/lbry/lbcd_814k/* ${{env.TEMP_DATA_DIR}}
- name: Run lbcd
run: ./lbcd --datadir=${{env.TEMP_DATA_DIR}}/data --logdir=${{env.TEMP_DATA_DIR}}/logs --connect=127.0.0.1 --norpc
- name: Remove datadir
if: always()
run: rm -rf ${{env.TEMP_DATA_DIR}}

57
.github/workflows/golangci-lint.yml vendored Normal file
View file

@ -0,0 +1,57 @@
name: golangci-lint
env:
# go needs absolute directories, using the $HOME variable doesn't work here.
GOCACHE: /home/runner/work/go/pkg/build
GOPATH: /home/runner/work/go
GO_VERSION: '^1.17.0'
on:
push:
tags:
- v*
branches:
- "*"
pull_request:
branches:
- "*"
jobs:
golangci:
name: lint
runs-on: ubuntu-latest
steps:
- name: setup go ${{ env.GO_VERSION }}
uses: actions/setup-go@v2
with:
go-version: '${{ env.GO_VERSION }}'
- name: checkout source
uses: actions/checkout@v2
- name: compile code
run: go install -v ./...
- name: golangci-lint
uses: golangci/golangci-lint-action@v2
with:
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
version: latest
# Optional: working directory, useful for monorepos
# working-directory: somedir
# Optional: golangci-lint command line arguments.
# args: --issues-exit-code=0
# Optional: show only new issues if it's a pull request. The default value is `false`.
# only-new-issues: true
# Optional: if set to true then the action will use pre-installed Go.
skip-go-installation: true
# Optional: if set to true then the action don't cache or restore ~/go/pkg.
# skip-pkg-cache: true
# Optional: if set to true then the action don't cache or restore ~/.cache/go-build.
# skip-build-cache: true

48
.github/workflows/release.yml vendored Normal file
View file

@ -0,0 +1,48 @@
name: goreleaser
on:
workflow_dispatch:
inputs:
note:
description: 'Note'
required: false
default: ''
pull_request:
push:
tags:
- '*'
permissions:
contents: write
jobs:
goreleaser:
runs-on: ubuntu-latest
steps:
-
name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 0
-
name: Set up Go
uses: actions/setup-go@v2
with:
go-version: 1.17.5
-
name: Run GoReleaser
uses: goreleaser/goreleaser-action@v2
with:
distribution: goreleaser
version: latest
args: release --rm-dist
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
name: Upload artifacts
uses: actions/upload-artifact@v2
with:
name: lbcd-${{ github.sha }}
path: |
dist/checksums.txt
dist/*.tar.gz

16
.gitignore vendored
View file

@ -3,6 +3,7 @@
# Databases
btcd.db
lbcd.db
*-shm
*-wal
@ -33,6 +34,21 @@ _testmain.go
*.exe
.DS_Store
# Code coverage files
profile.tmp
profile.cov
# IDE
.idea
.vscode
# Binaries
btcd
btcctl
lbcd
lbcctl
# CI artifacts
dist

152
.golangci.yml Normal file
View file

@ -0,0 +1,152 @@
linters-settings:
depguard:
list-type: blacklist
packages:
# logging is allowed only by logutils.Log, logrus
# is allowed to use only in logutils package
- github.com/sirupsen/logrus
packages-with-error-message:
- github.com/sirupsen/logrus: "logging is allowed only by logutils.Log"
dupl:
threshold: 100
funlen:
lines: 100
statements: 50
gci:
local-prefixes: github.com/golangci/golangci-lint
goconst:
min-len: 2
min-occurrences: 2
gocritic:
enabled-tags:
- diagnostic
- experimental
- opinionated
- performance
- style
disabled-checks:
- dupImport # https://github.com/go-critic/go-critic/issues/845
- ifElseChain
- octalLiteral
- whyNoLint
- wrapperFunc
gocyclo:
min-complexity: 15
goimports:
local-prefixes: github.com/golangci/golangci-lint
gomnd:
settings:
mnd:
# don't include the "operation" and "assign"
checks:
- argument
- case
- condition
- return
govet:
check-shadowing: true
settings:
printf:
funcs:
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf
lll:
line-length: 140
maligned:
suggest-new: true
misspell:
locale: US
nolintlint:
allow-leading-space: true # don't require machine-readable nolint directives (i.e. with no leading space)
allow-unused: false # report any unused nolint directives
require-explanation: false # don't require an explanation for nolint directives
require-specific: false # don't require nolint directives to be specific about which linter is being skipped
linters:
disable-all: true
enable:
- asciicheck
- bodyclose
# - deadcode
- depguard
# - dogsled
# - dupl
# - errcheck
# - exhaustive
- exportloopref
# - funlen
# - gochecknoglobals
# - gochecknoinits
# - gocognit
# - goconst
# - gocritic
# - gocyclo
# - godot
# - godox
# - goerr113
- gofmt
- goimports
# - gomnd
- goprintffuncname
# - gosec
# - gosimple
# - govet
# - ineffassign
# - interfacer
# - lll
# - maligned
# - misspell
- nakedret
# - nestif
# - noctx
# - nolintlint
# - prealloc
- rowserrcheck
# - revive
# - scopelint
# - staticcheck
# - structcheck
# - stylecheck
# - testpackage
# - typecheck
- unconvert
# - unparam
# - unused
# - varcheck
# - whitespace
# - wsl
issues:
# Excluding configuration per-path, per-linter, per-text and per-source
exclude-rules:
- path: _test\.go
linters:
- gomnd
- path: pkg/golinters/errcheck.go
text: "SA1019: errCfg.Exclude is deprecated: use ExcludeFunctions instead"
- path: pkg/commands/run.go
text: "SA1019: lsc.Errcheck.Exclude is deprecated: use ExcludeFunctions instead"
# TODO must be removed after the release of the next version (v1.41.0)
- path: pkg/commands/run.go
linters:
- gomnd
# TODO must be removed after the release of the next version (v1.41.0)
- path: pkg/golinters/nolintlint/nolintlint.go
linters:
- gomnd
# TODO must be removed after the release of the next version (v1.41.0)
- path: pkg/printers/tab.go
linters:
- gomnd
run:
skip-dirs:
- test/testdata_etc
- internal/cache
- internal/renameio
- internal/robustio

57
.goreleaser.yml Normal file
View file

@ -0,0 +1,57 @@
# This is an example .goreleaser.yml file with some sensible defaults.
# Make sure to check the documentation at https://goreleaser.com
before:
hooks:
# You may remove this if you don't use go modules.
- go mod tidy
# you may remove this if you don't need go generate
- go generate ./...
builds:
-
main: .
id: "lbcd"
binary: "lbcd"
env:
- CGO_ENABLED=0
flags:
- -trimpath
ldflags:
- -s -w
- -X main.appBuild={{.Commit}}
targets:
- linux_amd64
- linux_arm64
- darwin_amd64
- darwin_arm64
- windows_amd64
-
main: ./cmd/lbcctl
id: "lbcctl"
binary: "lbcctl"
flags:
- -trimpath
ldflags:
- -s -w
- -X main.appBuild={{.Commit}}
env:
- CGO_ENABLED=0
targets:
- linux_amd64
- linux_arm64
- darwin_amd64
- darwin_arm64
- windows_amd64
checksum:
name_template: 'checksums.txt'
snapshot:
name_template: "{{ incpatch .Version }}-next"
changelog:
sort: asc
filters:
exclude:
- '^docs:'
- '^test:'
release:
draft: true
prerelease: auto

1230
CHANGES

File diff suppressed because it is too large Load diff

View file

@ -1,18 +1,18 @@
# This Dockerfile builds btcd from source and creates a small (55 MB) docker container based on alpine linux.
# This Dockerfile builds lbcd from source and creates a small (55 MB) docker container based on alpine linux.
#
# Clone this repository and run the following command to build and tag a fresh btcd amd64 container:
# Clone this repository and run the following command to build and tag a fresh lbcd amd64 container:
#
# docker build . -t yourregistry/btcd
# docker build . -t yourregistry/lbcd
#
# You can use the following command to buid an arm64v8 container:
#
# docker build . -t yourregistry/btcd --build-arg ARCH=arm64v8
# docker build . -t yourregistry/lbcd --build-arg ARCH=arm64v8
#
# For more information how to use this docker image visit:
# https://github.com/btcsuite/btcd/tree/master/docs
# https://github.com/lbryio/lbcd/tree/master/docs
#
# 8333 Mainnet Bitcoin peer-to-peer port
# 8334 Mainet RPC port
# 9246 Mainnet Bitcoin peer-to-peer port
# 9245 Mainet RPC port
ARG ARCH=amd64
# using the SHA256 instead of tags
@ -39,8 +39,8 @@ FROM $ARCH/alpine:3.12
COPY --from=build-container /go/bin /bin
VOLUME ["/root/.btcd"]
VOLUME ["/root/.lbcd"]
EXPOSE 8333 8334
EXPOSE 9245 9246
ENTRYPOINT ["btcd"]
ENTRYPOINT ["lbcd"]

View file

@ -1,5 +1,6 @@
ISC License
Copyright (c) 2021 The LBRY developers
Copyright (c) 2013-2017 The btcsuite developers
Copyright (c) 2015-2016 The Decred developers

168
README.md
View file

@ -1,121 +1,133 @@
btcd
====
# lbcd
[![Build Status](https://github.com/btcsuite/btcd/workflows/Build%20and%20Test/badge.svg)](https://github.com/btcsuite/btcd/actions)
[![Coverage Status](https://coveralls.io/repos/github/btcsuite/btcd/badge.svg?branch=master)](https://coveralls.io/github/btcsuite/btcd?branch=master)
[![Build Status](https://github.com/lbryio/lbcd/workflows/Build%20and%20Test/badge.svg)](https://github.com/lbryio/lbcd/actions)
[![Coverage Status](https://coveralls.io/repos/github/lbryio/lbcd/badge.svg?branch=master)](https://coveralls.io/github/lbryio/lbcd?branch=master)
[![ISC License](https://img.shields.io/badge/license-ISC-blue.svg)](http://copyfree.org)
[![GoDoc](https://img.shields.io/badge/godoc-reference-blue.svg)](https://pkg.go.dev/github.com/btcsuite/btcd)
<!--[![GoDoc](https://img.shields.io/badge/godoc-reference-blue.svg)](https://pkg.go.dev/github.com/lbryio/lbcd)-->
btcd is an alternative full node bitcoin implementation written in Go (golang).
`lbcd` is a full node implementation of LBRY's blockchain written in Go (golang).
This project is currently under active development and is in a Beta state. It
is extremely stable and has been in production use since October 2013.
This project is currently under active development and is in a Beta state while
we ensure it matches LBRYcrd's functionality. The intention is that it properly
downloads, validates, and serves the block chain using the exact rules
(including consensus bugs) for block acceptance as LBRYcrd.
We have taken great care to avoid lbcd causing a fork to the blockchain.
It properly downloads, validates, and serves the block chain using the exact
rules (including consensus bugs) for block acceptance as Bitcoin Core. We have
taken great care to avoid btcd causing a fork to the block chain. It includes a
full block validation testing framework which contains all of the 'official'
block acceptance tests (and some additional ones) that is run on every pull
request to help ensure it properly follows consensus. Also, it passes all of
the JSON test data in the Bitcoin Core code.
Note: `lbcd` does *NOT* include wallet functionality. That functionality is provided by the
[lbcwallet](https://github.com/lbryio/lbcwallet) and the [LBRY SDK](https://github.com/lbryio/lbry-sdk).
It also properly relays newly mined blocks, maintains a transaction pool, and
relays individual transactions that have not yet made it into a block. It
ensures all individual transactions admitted to the pool follow the rules
required by the block chain and also includes more strict checks which filter
transactions based on miner requirements ("standard" transactions).
## Security
One key difference between btcd and Bitcoin Core is that btcd does *NOT* include
wallet functionality and this was a very intentional design decision. See the
blog entry [here](https://web.archive.org/web/20171125143919/https://blog.conformal.com/btcd-not-your-moms-bitcoin-daemon)
for more details. This means you can't actually make or receive payments
directly with btcd. That functionality is provided by the
[btcwallet](https://github.com/btcsuite/btcwallet) and
[Paymetheus](https://github.com/btcsuite/Paymetheus) (Windows-only) projects
which are both under active development.
We take security seriously. Please contact [security](mailto:security@lbry.com) regarding any security issues.
Our PGP key is [here](https://lbry.com/faq/pgp-key) if you need it.
We maintain a mailing list for notifications of upgrades, security issues,
and soft/hard forks. To join, visit [fork list](https://lbry.com/forklist)
## Requirements
[Go](http://golang.org) 1.16 or newer.
All common operating systems are supported. lbcd requires at least 8GB of RAM
and at least 100GB of disk storage. Both RAM and disk requirements increase slowly over time.
Using a fast NVMe disk is recommended.
`lbcd` is not immune to data loss. It expects a clean shutdown via SIGINT or
SIGTERM. SIGKILL, immediate VM kills, and sudden power loss can cause data
corruption, thus requiring chain resynchronization for recovery.
For compilation, [Go](http://golang.org) 1.16 or newer is required.
## Installation
https://github.com/btcsuite/btcd/releases
Acquire binary files from [releases](https://github.com/lbryio/lbcd/releases)
#### Linux/BSD/MacOSX/POSIX - Build from Source
### To build from Source on Linux/BSD/MacOSX/POSIX
- Install Go according to the installation instructions here:
http://golang.org/doc/install
Install Go according to its [installation instructions](http://golang.org/doc/install).
- Ensure Go was installed properly and is a supported version:
``` sh
git clone https://github.com/lbryio/lbcd
cd lbcd
```bash
$ go version
$ go env GOROOT GOPATH
# Build lbcd
go build .
# Build lbcctl
go build ./cmd/lbcctl
```
NOTE: The `GOROOT` and `GOPATH` above must not be the same path. It is
recommended that `GOPATH` is set to a directory in your home directory such as
`~/goprojects` to avoid write permission issues. It is also recommended to add
`$GOPATH/bin` to your `PATH` at this point.
Both [GoLand](https://www.jetbrains.com/go/)
and [VS Code](https://code.visualstudio.com/docs/languages/go) IDEs are supported.
- Run the following commands to obtain btcd, all dependencies, and install it:
## Usage
```bash
$ cd $GOPATH/src/github.com/btcsuite/btcd
$ GO111MODULE=on go install -v . ./cmd/...
By default, data and logs are stored in `<LBCDDIR>`:
- Linux: `~/.lbcd/`
- MacOS: `/Users/<username>/Library/Application Support/Lbcd/`
To enable RPC access a username and password is required. Example:
``` sh
./lbcd --txindex --rpcuser=rpcuser --rpcpass=rpcpass
```
- btcd (and utilities) will now be installed in ```$GOPATH/bin```. If you did
not already add the bin directory to your system path during Go installation,
we recommend you do so now.
Interact with lbcd via RPC using `lbcctl`
## Updating
#### Linux/BSD/MacOSX/POSIX - Build from Source
- Run the following commands to update btcd, all dependencies, and install it:
```bash
$ cd $GOPATH/src/github.com/btcsuite/btcd
$ git pull
$ GO111MODULE=on go install -v . ./cmd/...
``` sh
./lbcctl --rpcuser=rpcuser --rpcpass=rpcpass getblockcount
./lbcctl --rpcuser=rpcuser --rpcpass=rpcpass getblocktemplate
```
## Getting Started
By default, the RPCs are served over TLS. `lbcd` generates (if not exists) `rpc.cert` and
`rpc.key` under `<LBCDDIR>` where `lbcctl` would search and use them.
btcd has several configuration options available to tweak how it runs, but all
of the basic operations described in the intro section work with zero
configuration.
The RPCs can also be served without TLS *(on localhost only)* using (`--notls`)
#### Linux/BSD/POSIX/Source
```bash
$ ./btcd
``` sh
./lbcd --txindex --rpcuser=rpcuser --rpcpass=rpcpass --notls
./lbcctl --rpcuser=rpcuser --rpcpass=rpcpass --notls getblockcount
```
## IRC
## Working with Different Networks
- irc.libera.chat
- channel #btcd
- [webchat](https://web.libera.chat/gamja/?channels=btcd)
By default, `lbcd` and `lbcctl` use the following ports for different networks respectively:
## Issue Tracker
| Network | RPC Port | Network Port |
| ------- | -------- | ------------ |
| mainnet | 9245 | 9246 |
| testnet | 19245 | 19246 |
| regtest | 29245 | 29246 |
The [integrated github issue tracker](https://github.com/btcsuite/btcd/issues)
is used for this project.
Running `lbcd` and `lbcctl` with `--testnet` or `--regtest` would use different chain params as well as default RPC and Network ports.
## Documentation
``` sh
./lbcd --txindex --rpcuser=rpcuser --rpcpass=rpcpass --regtest
./lbcctl --rpcuser=rpcuser --rpcpass=rpcpass --regtest getblockcount
```
The documentation is a work-in-progress. It is located in the [docs](https://github.com/btcsuite/btcd/tree/master/docs) folder.
The default Network and RPC ports of `lbcd` can be overriden using `--listen` and `--rpclisten`
`lbcctl` can also connect to RPC server specified by `--rpcserver`
## Release Verification
``` sh
./lbcd --txindex --rpcuser=rpcuser --rpcpass=rpcpass --regtest --listen=127.0.0.1:29248 --rpclisten=127.0.0.1:29247
./lbcctl --rpcuser=rpcuser --rpcpass=rpcpass --regtest --rpcserver=127.0.0.1:29247 getblockcount
```
Note: Wallet related RPCs are provided by [lbcwallet](https://github.com/lbryio/lbcwallet).
## Contributing
Contributions to this project are welcome, encouraged, and compensated.
The [integrated github issue tracker](https://github.com/lbryio/lbcd/issues)
is used for this project. All pull requests will be considered.
<!-- ## Release Verification
Please see our [documentation on the current build/verification
process](https://github.com/btcsuite/btcd/tree/master/release) for all our
process](https://github.com/lbryio/lbcd/tree/master/release) for all our
releases for information on how to verify the integrity of published releases
using our reproducible build system.
-->
## License
btcd is licensed under the [copyfree](http://copyfree.org) ISC License.
lbcd is licensed under the [copyfree](http://copyfree.org) ISC License.

View file

@ -23,8 +23,8 @@ import (
"sync/atomic"
"time"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/wire"
)
// AddrManager provides a concurrency safe address manager for caching potential
@ -45,7 +45,7 @@ type AddrManager struct {
nTried int
nNew int
lamtx sync.Mutex
localAddresses map[string]*localAddress
localAddresses map[string]*LocalAddress
version int
}
@ -69,9 +69,9 @@ type serializedAddrManager struct {
TriedBuckets [triedBucketCount][]string
}
type localAddress struct {
na *wire.NetAddress
score AddressPriority
type LocalAddress struct {
NA *wire.NetAddress
Score AddressPriority
}
// AddressPriority type is used to describe the hierarchy of local address
@ -178,7 +178,7 @@ func (a *AddrManager) updateAddress(netAddr, srcAddr *wire.NetAddress) {
// note that to prevent causing excess garbage on getaddr
// messages the netaddresses in addrmanager are *immutable*,
// if we need to change them then we replace the pointer with a
// new copy so that we don't have to copy every na for getaddr.
// new copy so that we don't have to copy every NA for getaddr.
if netAddr.Timestamp.After(ka.na.Timestamp) ||
(ka.na.Services&netAddr.Services) !=
netAddr.Services {
@ -755,7 +755,7 @@ func (a *AddrManager) HostToNetAddress(host string, port uint16, services wire.S
// the relevant .onion address.
func ipString(na *wire.NetAddress) string {
if IsOnionCatTor(na) {
// We know now that na.IP is long enough.
// We know now that NA.IP is long enough.
base32 := base32.StdEncoding.EncodeToString(na.IP[6:])
return strings.ToLower(base32) + ".onion"
}
@ -882,7 +882,7 @@ func (a *AddrManager) Connected(addr *wire.NetAddress) {
// so.
now := time.Now()
if now.After(ka.na.Timestamp.Add(time.Minute * 20)) {
// ka.na is immutable, so replace it.
// ka.NA is immutable, so replace it.
naCopy := *ka.na
naCopy.Timestamp = time.Now()
ka.mtx.Lock()
@ -994,7 +994,7 @@ func (a *AddrManager) SetServices(addr *wire.NetAddress, services wire.ServiceFl
// Update the services if needed.
if ka.na.Services != services {
// ka.na is immutable, so replace it.
// ka.NA is immutable, so replace it.
naCopy := *ka.na
naCopy.Services = services
ka.mtx.Lock()
@ -1003,7 +1003,7 @@ func (a *AddrManager) SetServices(addr *wire.NetAddress, services wire.ServiceFl
}
}
// AddLocalAddress adds na to the list of known local addresses to advertise
// AddLocalAddress adds NA to the list of known local addresses to advertise
// with the given priority.
func (a *AddrManager) AddLocalAddress(na *wire.NetAddress, priority AddressPriority) error {
if !IsRoutable(na) {
@ -1015,13 +1015,13 @@ func (a *AddrManager) AddLocalAddress(na *wire.NetAddress, priority AddressPrior
key := NetAddressKey(na)
la, ok := a.localAddresses[key]
if !ok || la.score < priority {
if !ok || la.Score < priority {
if ok {
la.score = priority + 1
la.Score = priority + 1
} else {
a.localAddresses[key] = &localAddress{
na: na,
score: priority,
a.localAddresses[key] = &LocalAddress{
NA: na,
Score: priority,
}
}
}
@ -1117,12 +1117,12 @@ func (a *AddrManager) GetBestLocalAddress(remoteAddr *wire.NetAddress) *wire.Net
var bestscore AddressPriority
var bestAddress *wire.NetAddress
for _, la := range a.localAddresses {
reach := getReachabilityFrom(la.na, remoteAddr)
reach := getReachabilityFrom(la.NA, remoteAddr)
if reach > bestreach ||
(reach == bestreach && la.score > bestscore) {
(reach == bestreach && la.Score > bestscore) {
bestreach = reach
bestscore = la.score
bestAddress = la.na
bestscore = la.Score
bestAddress = la.NA
}
}
if bestAddress != nil {
@ -1146,6 +1146,15 @@ func (a *AddrManager) GetBestLocalAddress(remoteAddr *wire.NetAddress) *wire.Net
return bestAddress
}
// LocalAddresses returns the list of local addresses for our node.
func (a *AddrManager) LocalAddresses() []*LocalAddress {
var addrs []*LocalAddress
for _, addr := range a.localAddresses {
addrs = append(addrs, addr)
}
return addrs
}
// New returns a new bitcoin address manager.
// Use Start to begin processing asynchronous address updates.
func New(dataDir string, lookupFunc func(string) ([]net.IP, error)) *AddrManager {
@ -1154,7 +1163,7 @@ func New(dataDir string, lookupFunc func(string) ([]net.IP, error)) *AddrManager
lookupFunc: lookupFunc,
rand: rand.New(rand.NewSource(time.Now().UnixNano())),
quit: make(chan struct{}),
localAddresses: make(map[string]*localAddress),
localAddresses: make(map[string]*LocalAddress),
version: serialisationVersion,
}
am.reset()

View file

@ -7,7 +7,7 @@ import (
"os"
"testing"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/wire"
)
// randAddr generates a *wire.NetAddress backed by a random IPv4/IPv6 address.

View file

@ -12,8 +12,8 @@ import (
"testing"
"time"
"github.com/btcsuite/btcd/addrmgr"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/addrmgr"
"github.com/lbryio/lbcd/wire"
)
// naTest is used to describe a test to be performed against the NetAddressKey
@ -34,61 +34,61 @@ var someIP = "173.194.115.66"
func addNaTests() {
// IPv4
// Localhost
addNaTest("127.0.0.1", 8333, "127.0.0.1:8333")
addNaTest("127.0.0.1", 8334, "127.0.0.1:8334")
addNaTest("127.0.0.1", 9244, "127.0.0.1:9244")
addNaTest("127.0.0.1", 9245, "127.0.0.1:9245")
// Class A
addNaTest("1.0.0.1", 8333, "1.0.0.1:8333")
addNaTest("2.2.2.2", 8334, "2.2.2.2:8334")
addNaTest("27.253.252.251", 8335, "27.253.252.251:8335")
addNaTest("123.3.2.1", 8336, "123.3.2.1:8336")
addNaTest("1.0.0.1", 9244, "1.0.0.1:9244")
addNaTest("2.2.2.2", 9245, "2.2.2.2:9245")
addNaTest("27.253.252.251", 9246, "27.253.252.251:9246")
addNaTest("123.3.2.1", 9247, "123.3.2.1:9247")
// Private Class A
addNaTest("10.0.0.1", 8333, "10.0.0.1:8333")
addNaTest("10.1.1.1", 8334, "10.1.1.1:8334")
addNaTest("10.2.2.2", 8335, "10.2.2.2:8335")
addNaTest("10.10.10.10", 8336, "10.10.10.10:8336")
addNaTest("10.0.0.1", 9244, "10.0.0.1:9244")
addNaTest("10.1.1.1", 9245, "10.1.1.1:9245")
addNaTest("10.2.2.2", 9246, "10.2.2.2:9246")
addNaTest("10.10.10.10", 9247, "10.10.10.10:9247")
// Class B
addNaTest("128.0.0.1", 8333, "128.0.0.1:8333")
addNaTest("129.1.1.1", 8334, "129.1.1.1:8334")
addNaTest("180.2.2.2", 8335, "180.2.2.2:8335")
addNaTest("191.10.10.10", 8336, "191.10.10.10:8336")
addNaTest("128.0.0.1", 9244, "128.0.0.1:9244")
addNaTest("129.1.1.1", 9245, "129.1.1.1:9245")
addNaTest("180.2.2.2", 9246, "180.2.2.2:9246")
addNaTest("191.10.10.10", 9247, "191.10.10.10:9247")
// Private Class B
addNaTest("172.16.0.1", 8333, "172.16.0.1:8333")
addNaTest("172.16.1.1", 8334, "172.16.1.1:8334")
addNaTest("172.16.2.2", 8335, "172.16.2.2:8335")
addNaTest("172.16.172.172", 8336, "172.16.172.172:8336")
addNaTest("172.16.0.1", 9244, "172.16.0.1:9244")
addNaTest("172.16.1.1", 9245, "172.16.1.1:9245")
addNaTest("172.16.2.2", 9246, "172.16.2.2:9246")
addNaTest("172.16.172.172", 9247, "172.16.172.172:9247")
// Class C
addNaTest("193.0.0.1", 8333, "193.0.0.1:8333")
addNaTest("200.1.1.1", 8334, "200.1.1.1:8334")
addNaTest("205.2.2.2", 8335, "205.2.2.2:8335")
addNaTest("223.10.10.10", 8336, "223.10.10.10:8336")
addNaTest("193.0.0.1", 9244, "193.0.0.1:9244")
addNaTest("200.1.1.1", 9245, "200.1.1.1:9245")
addNaTest("205.2.2.2", 9246, "205.2.2.2:9246")
addNaTest("223.10.10.10", 9247, "223.10.10.10:9247")
// Private Class C
addNaTest("192.168.0.1", 8333, "192.168.0.1:8333")
addNaTest("192.168.1.1", 8334, "192.168.1.1:8334")
addNaTest("192.168.2.2", 8335, "192.168.2.2:8335")
addNaTest("192.168.192.192", 8336, "192.168.192.192:8336")
addNaTest("192.168.0.1", 9244, "192.168.0.1:9244")
addNaTest("192.168.1.1", 9245, "192.168.1.1:9245")
addNaTest("192.168.2.2", 9246, "192.168.2.2:9246")
addNaTest("192.168.192.192", 9247, "192.168.192.192:9247")
// IPv6
// Localhost
addNaTest("::1", 8333, "[::1]:8333")
addNaTest("fe80::1", 8334, "[fe80::1]:8334")
addNaTest("::1", 9244, "[::1]:9244")
addNaTest("fe80::1", 9245, "[fe80::1]:9245")
// Link-local
addNaTest("fe80::1:1", 8333, "[fe80::1:1]:8333")
addNaTest("fe91::2:2", 8334, "[fe91::2:2]:8334")
addNaTest("fea2::3:3", 8335, "[fea2::3:3]:8335")
addNaTest("feb3::4:4", 8336, "[feb3::4:4]:8336")
addNaTest("fe80::1:1", 9244, "[fe80::1:1]:9244")
addNaTest("fe91::2:2", 9245, "[fe91::2:2]:9245")
addNaTest("fea2::3:3", 9246, "[fea2::3:3]:9246")
addNaTest("feb3::4:4", 9247, "[feb3::4:4]:9247")
// Site-local
addNaTest("fec0::1:1", 8333, "[fec0::1:1]:8333")
addNaTest("fed1::2:2", 8334, "[fed1::2:2]:8334")
addNaTest("fee2::3:3", 8335, "[fee2::3:3]:8335")
addNaTest("fef3::4:4", 8336, "[fef3::4:4]:8336")
addNaTest("fec0::1:1", 9244, "[fec0::1:1]:9244")
addNaTest("fed1::2:2", 9245, "[fed1::2:2]:9245")
addNaTest("fee2::3:3", 9246, "[fee2::3:3]:9246")
addNaTest("fef3::4:4", 9247, "[fef3::4:4]:9247")
}
func addNaTest(ip string, port uint16, want string) {
@ -119,7 +119,7 @@ func TestAddAddressByIP(t *testing.T) {
err error
}{
{
someIP + ":8333",
someIP + ":9244",
nil,
},
{
@ -127,7 +127,7 @@ func TestAddAddressByIP(t *testing.T) {
addrErr,
},
{
someIP[:12] + ":8333",
someIP[:12] + ":9244",
fmtErr,
},
{
@ -212,7 +212,7 @@ func TestAttempt(t *testing.T) {
n := addrmgr.New("testattempt", lookupFunc)
// Add a new address and get it
err := n.AddAddressByIP(someIP + ":8333")
err := n.AddAddressByIP(someIP + ":9244")
if err != nil {
t.Fatalf("Adding address failed: %v", err)
}
@ -234,7 +234,7 @@ func TestConnected(t *testing.T) {
n := addrmgr.New("testconnected", lookupFunc)
// Add a new address and get it
err := n.AddAddressByIP(someIP + ":8333")
err := n.AddAddressByIP(someIP + ":9244")
if err != nil {
t.Fatalf("Adding address failed: %v", err)
}
@ -261,14 +261,14 @@ func TestNeedMoreAddresses(t *testing.T) {
var err error
for i := 0; i < addrsToAdd; i++ {
s := fmt.Sprintf("%d.%d.173.147:8333", i/128+60, i%128+60)
s := fmt.Sprintf("%d.%d.173.147:9244", i/128+60, i%128+60)
addrs[i], err = n.DeserializeNetAddress(s, wire.SFNodeNetwork)
if err != nil {
t.Errorf("Failed to turn %s into an address: %v", s, err)
}
}
srcAddr := wire.NewNetAddressIPPort(net.IPv4(173, 144, 173, 111), 8333, 0)
srcAddr := wire.NewNetAddressIPPort(net.IPv4(173, 144, 173, 111), 9244, 0)
n.AddAddresses(addrs, srcAddr)
numAddrs := n.NumAddresses()
@ -289,14 +289,14 @@ func TestGood(t *testing.T) {
var err error
for i := 0; i < addrsToAdd; i++ {
s := fmt.Sprintf("%d.173.147.%d:8333", i/64+60, i%64+60)
s := fmt.Sprintf("%d.173.147.%d:9244", i/64+60, i%64+60)
addrs[i], err = n.DeserializeNetAddress(s, wire.SFNodeNetwork)
if err != nil {
t.Errorf("Failed to turn %s into an address: %v", s, err)
}
}
srcAddr := wire.NewNetAddressIPPort(net.IPv4(173, 144, 173, 111), 8333, 0)
srcAddr := wire.NewNetAddressIPPort(net.IPv4(173, 144, 173, 111), 9244, 0)
n.AddAddresses(addrs, srcAddr)
for _, addr := range addrs {
@ -323,7 +323,7 @@ func TestGetAddress(t *testing.T) {
}
// Add a new address and get it
err := n.AddAddressByIP(someIP + ":8333")
err := n.AddAddressByIP(someIP + ":9244")
if err != nil {
t.Fatalf("Adding address failed: %v", err)
}

View file

@ -7,7 +7,7 @@ package addrmgr
import (
"time"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/wire"
)
func TstKnownAddressIsBad(ka *KnownAddress) bool {

View file

@ -8,7 +8,7 @@ import (
"sync"
"time"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/wire"
)
// KnownAddress tracks information about a known network address that is used

View file

@ -9,8 +9,8 @@ import (
"testing"
"time"
"github.com/btcsuite/btcd/addrmgr"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/addrmgr"
"github.com/lbryio/lbcd/wire"
)
func TestChance(t *testing.T) {

View file

@ -8,7 +8,7 @@ import (
"fmt"
"net"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/wire"
)
var (

View file

@ -8,8 +8,8 @@ import (
"net"
"testing"
"github.com/btcsuite/btcd/addrmgr"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/addrmgr"
"github.com/lbryio/lbcd/wire"
)
// TestIPTypes ensures the various functions which determine the type of an IP
@ -39,7 +39,7 @@ func TestIPTypes(t *testing.T) {
rfc4193, rfc4380, rfc4843, rfc4862, rfc5737, rfc6052, rfc6145, rfc6598,
local, valid, routable bool) ipTest {
nip := net.ParseIP(ip)
na := *wire.NewNetAddressIPPort(nip, 8333, wire.SFNodeNetwork)
na := *wire.NewNetAddressIPPort(nip, 9246, wire.SFNodeNetwork)
test := ipTest{na, rfc1918, rfc2544, rfc3849, rfc3927, rfc3964, rfc4193, rfc4380,
rfc4843, rfc4862, rfc5737, rfc6052, rfc6145, rfc6598, local, valid, routable}
return test
@ -192,7 +192,7 @@ func TestGroupKey(t *testing.T) {
for i, test := range tests {
nip := net.ParseIP(test.ip)
na := *wire.NewNetAddressIPPort(nip, 8333, wire.SFNodeNetwork)
na := *wire.NewNetAddressIPPort(nip, 9246, wire.SFNodeNetwork)
if key := addrmgr.GroupKey(&na); key != test.expected {
t.Errorf("TestGroupKey #%d (%s): unexpected group key "+
"- got '%s', want '%s'", i, test.name,

View file

@ -1,30 +1,9 @@
blockchain
==========
[![Build Status](https://github.com/btcsuite/btcd/workflows/Build%20and%20Test/badge.svg)](https://github.com/btcsuite/btcd/actions)
[![ISC License](http://img.shields.io/badge/license-ISC-blue.svg)](http://copyfree.org)
[![GoDoc](https://img.shields.io/badge/godoc-reference-blue.svg)](https://pkg.go.dev/github.com/btcsuite/btcd/blockchain)
Package blockchain implements bitcoin block handling and chain selection rules.
The test coverage is currently only around 60%, but will be increasing over
time. See `test_coverage.txt` for the gocov coverage report. Alternatively, if
you are running a POSIX OS, you can run the `cov_report.sh` script for a
real-time report. Package blockchain is licensed under the liberal ISC license.
There is an associated blog post about the release of this package
[here](https://blog.conformal.com/btcchain-the-bitcoin-chain-package-from-bctd/).
This package has intentionally been designed so it can be used as a standalone
package for any projects needing to handle processing of blocks into the bitcoin
block chain.
## Installation and Updating
```bash
$ go get -u github.com/btcsuite/btcd/blockchain
```
## Bitcoin Chain Processing Overview
### Bitcoin Chain Processing Overview
Before a block is allowed into the block chain, it must go through an intensive
series of validation rules. The following list serves as a general outline of
@ -57,47 +36,4 @@ is by no means exhaustive:
transaction values
- Run the transaction scripts to verify the spender is allowed to spend the
coins
- Insert the block into the block database
## Examples
* [ProcessBlock Example](https://pkg.go.dev/github.com/btcsuite/btcd/blockchain#example-BlockChain-ProcessBlock)
Demonstrates how to create a new chain instance and use ProcessBlock to
attempt to add a block to the chain. This example intentionally
attempts to insert a duplicate genesis block to illustrate how an invalid
block is handled.
* [CompactToBig Example](https://pkg.go.dev/github.com/btcsuite/btcd/blockchain#example-CompactToBig)
Demonstrates how to convert the compact "bits" in a block header which
represent the target difficulty to a big integer and display it using the
typical hex notation.
* [BigToCompact Example](https://pkg.go.dev/github.com/btcsuite/btcd/blockchain#example-BigToCompact)
Demonstrates how to convert a target difficulty into the
compact "bits" in a block header which represent that target difficulty.
## GPG Verification Key
All official release tags are signed by Conformal so users can ensure the code
has not been tampered with and is coming from the btcsuite developers. To
verify the signature perform the following:
- Download the public key from the Conformal website at
https://opensource.conformal.com/GIT-GPG-KEY-conformal.txt
- Import the public key into your GPG keyring:
```bash
gpg --import GIT-GPG-KEY-conformal.txt
```
- Verify the release tag with the following command where `TAG_NAME` is a
placeholder for the specific tag:
```bash
git tag -v TAG_NAME
```
## License
Package blockchain is licensed under the [copyfree](http://copyfree.org) ISC
License.
- Insert the block into the block database

View file

@ -7,8 +7,8 @@ package blockchain
import (
"fmt"
"github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/database"
btcutil "github.com/lbryio/lbcutil"
)
// maybeAcceptBlock potentially accepts a block into the block chain and, if
@ -84,9 +84,11 @@ func (b *BlockChain) maybeAcceptBlock(block *btcutil.Block, flags BehaviorFlags)
// Notify the caller that the new block was accepted into the block
// chain. The caller would typically want to react by relaying the
// inventory to other peers.
b.notificationSendLock.Lock()
defer b.notificationSendLock.Unlock()
b.chainLock.Unlock()
defer b.chainLock.Lock()
b.sendNotification(NTBlockAccepted, block)
b.chainLock.Lock()
return isMainChain, nil
}

View file

@ -6,14 +6,12 @@ package blockchain
import (
"testing"
"github.com/btcsuite/btcutil"
)
// BenchmarkIsCoinBase performs a simple benchmark against the IsCoinBase
// function.
func BenchmarkIsCoinBase(b *testing.B) {
tx, _ := btcutil.NewBlock(&Block100000).Tx(1)
tx, _ := GetBlock100000().Tx(1)
b.ResetTimer()
for i := 0; i < b.N; i++ {
IsCoinBase(tx)
@ -23,9 +21,9 @@ func BenchmarkIsCoinBase(b *testing.B) {
// BenchmarkIsCoinBaseTx performs a simple benchmark against the IsCoinBaseTx
// function.
func BenchmarkIsCoinBaseTx(b *testing.B) {
tx := Block100000.Transactions[1]
tx, _ := GetBlock100000().Tx(1)
b.ResetTimer()
for i := 0; i < b.N; i++ {
IsCoinBaseTx(tx)
IsCoinBaseTx(tx.MsgTx())
}
}

View file

@ -10,10 +10,10 @@ import (
"sync"
"time"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/chaincfg"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/database"
"github.com/lbryio/lbcd/wire"
)
// blockStatus is a bit field representing the validation state of the block.
@ -93,6 +93,7 @@ type blockNode struct {
nonce uint32
timestamp int64
merkleRoot chainhash.Hash
claimTrie chainhash.Hash
// status is a bitfield representing the validation state of the block. The
// status field, unlike the other fields, may be written to and so should
@ -114,6 +115,7 @@ func initBlockNode(node *blockNode, blockHeader *wire.BlockHeader, parent *block
nonce: blockHeader.Nonce,
timestamp: blockHeader.Timestamp.Unix(),
merkleRoot: blockHeader.MerkleRoot,
claimTrie: blockHeader.ClaimTrie,
}
if parent != nil {
node.parent = parent
@ -144,6 +146,7 @@ func (node *blockNode) Header() wire.BlockHeader {
Version: node.version,
PrevBlock: *prevHash,
MerkleRoot: node.merkleRoot,
ClaimTrie: node.claimTrie,
Timestamp: time.Unix(node.timestamp, 0),
Bits: node.bits,
Nonce: node.nonce,

View file

@ -11,12 +11,14 @@ import (
"sync"
"time"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/chaincfg"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/database"
"github.com/lbryio/lbcd/txscript"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
"github.com/lbryio/lbcd/claimtrie"
)
const (
@ -114,6 +116,12 @@ type BlockChain struct {
// fields in this struct below this point.
chainLock sync.RWMutex
// notificationSendLock helps us only process one block at a time.
// It's definitely a hack. DCRD has much better structure in this regard.
// Without this you will get an error if you invalidate a block and then generate more right after.
// Taken from https://github.com/gcash/bchd/pull/308
notificationSendLock sync.Mutex
// These fields are related to the memory block index. They both have
// their own locks, however they are often also protected by the chain
// lock to help prevent logic races when blocks are being processed.
@ -180,6 +188,8 @@ type BlockChain struct {
// certain blockchain events.
notificationsLock sync.RWMutex
notifications []NotificationCallback
claimTrie *claimtrie.ClaimTrie
}
// HaveBlock returns whether or not the chain instance has the block represented
@ -195,6 +205,15 @@ func (b *BlockChain) HaveBlock(hash *chainhash.Hash) (bool, error) {
return exists || b.IsKnownOrphan(hash), nil
}
// GetWarnings returns a bool for whether unknownRules
// has been warned.
func (b *BlockChain) GetWarnings() bool {
b.chainLock.RLock()
defer b.chainLock.RUnlock()
return b.unknownRulesWarned
}
// IsKnownOrphan returns whether the passed hash is currently a known orphan.
// Keep in mind that only a limited number of orphans are held onto for a
// limited amount of time, so this function must not be used as an absolute
@ -571,7 +590,8 @@ func (b *BlockChain) connectBlock(node *blockNode, block *btcutil.Block,
}
// No warnings about unknown rules until the chain is current.
if b.isCurrent() {
current := b.isCurrent()
if current {
// Warn if any unknown new rules are either about to activate or
// have already been activated.
if err := b.warnUnknownRuleActivations(node); err != nil {
@ -579,6 +599,14 @@ func (b *BlockChain) connectBlock(node *blockNode, block *btcutil.Block,
}
}
// Handle LBRY Claim Scripts
if b.claimTrie != nil {
shouldFlush := current && b.chainParams.Net != wire.TestNet
if err := b.ParseClaimScripts(block, node, view, shouldFlush); err != nil {
return ruleError(ErrBadClaimTrie, err.Error())
}
}
// Write any block status changes to DB before updating best state.
err := b.index.flushToDB()
if err != nil {
@ -661,9 +689,11 @@ func (b *BlockChain) connectBlock(node *blockNode, block *btcutil.Block,
// Notify the caller that the block was connected to the main chain.
// The caller would typically want to react with actions such as
// updating wallets.
b.notificationSendLock.Lock()
defer b.notificationSendLock.Unlock()
b.chainLock.Unlock()
defer b.chainLock.Lock()
b.sendNotification(NTBlockConnected, block)
b.chainLock.Lock()
return nil
}
@ -761,6 +791,12 @@ func (b *BlockChain) disconnectBlock(node *blockNode, block *btcutil.Block, view
return err
}
if b.claimTrie != nil {
if err = b.claimTrie.ResetHeight(node.parent.height); err != nil {
return err
}
}
// Prune fully spent entries and mark all entries in the view unmodified
// now that the modifications have been committed to the database.
view.commit()
@ -780,9 +816,11 @@ func (b *BlockChain) disconnectBlock(node *blockNode, block *btcutil.Block, view
// Notify the caller that the block was disconnected from the main
// chain. The caller would typically want to react with actions such as
// updating wallets.
b.notificationSendLock.Lock()
defer b.notificationSendLock.Unlock()
b.chainLock.Unlock()
defer b.chainLock.Lock()
b.sendNotification(NTBlockDisconnected, block)
b.chainLock.Lock()
return nil
}
@ -1208,7 +1246,7 @@ func (b *BlockChain) connectBestChain(node *blockNode, block *btcutil.Block, fla
// factors are used to guess, but the key factors that allow the chain to
// believe it is current are:
// - Latest block height is after the latest checkpoint (if enabled)
// - Latest block has a timestamp newer than 24 hours ago
// - Latest block has a timestamp newer than ~6 hours ago (as LBRY block time is one fourth of bitcoin)
//
// This function MUST be called with the chain state lock held (for reads).
func (b *BlockChain) isCurrent() bool {
@ -1219,13 +1257,13 @@ func (b *BlockChain) isCurrent() bool {
return false
}
// Not current if the latest best block has a timestamp before 24 hours
// Not current if the latest best block has a timestamp before 7 hours
// ago.
//
// The chain appears to be current if none of the checks reported
// otherwise.
minus24Hours := b.timeSource.AdjustedTime().Add(-24 * time.Hour).Unix()
return b.bestChain.Tip().timestamp >= minus24Hours
hours := b.timeSource.AdjustedTime().Add(-7 * time.Hour).Unix()
return b.bestChain.Tip().timestamp >= hours
}
// IsCurrent returns whether or not the chain believes it is current. Several
@ -1614,6 +1652,121 @@ func (b *BlockChain) LocateHeaders(locator BlockLocator, hashStop *chainhash.Has
return headers
}
// InvalidateBlock takes a block hash and invalidates it.
//
// This function is safe for concurrent access.
func (b *BlockChain) InvalidateBlock(hash *chainhash.Hash) error {
b.chainLock.Lock()
defer b.chainLock.Unlock()
return b.invalidateBlock(hash)
}
// invalidateBlock takes a block hash and invalidates it.
func (b *BlockChain) invalidateBlock(hash *chainhash.Hash) error {
node := b.index.LookupNode(hash)
if node == nil {
err := fmt.Errorf("block %s is not known", hash)
return err
}
// No need to invalidate if its already invalid.
if node.status.KnownInvalid() {
err := fmt.Errorf("block %s is already invalid", hash)
return err
}
if node.parent == nil {
err := fmt.Errorf("block %s has no parent", hash)
return err
}
b.index.SetStatusFlags(node, statusValidateFailed)
b.index.UnsetStatusFlags(node, statusValid)
detachNodes, attachNodes := b.getReorganizeNodes(node.parent)
err := b.reorganizeChain(detachNodes, attachNodes)
if err != nil {
return err
}
for i, e := 0, detachNodes.Front(); e != nil; i, e = i+1, e.Next() {
n := e.Value.(*blockNode)
b.index.SetStatusFlags(n, statusInvalidAncestor)
b.index.UnsetStatusFlags(n, statusValid)
}
if writeErr := b.index.flushToDB(); writeErr != nil {
log.Warnf("Error flushing block index changes to disk: %v", writeErr)
}
return nil
}
// ReconsiderBlock takes a block hash and allows it to be revalidated.
//
// This function is safe for concurrent access.
func (b *BlockChain) ReconsiderBlock(hash *chainhash.Hash) error {
return b.reconsiderBlock(hash)
}
// reconsiderBlock takes a block hash and allows it to be revalidated.
func (b *BlockChain) reconsiderBlock(hash *chainhash.Hash) error {
node := b.index.LookupNode(hash)
if node == nil {
err := fmt.Errorf("block %s is not known", hash)
return err
}
// No need to reconsider, it is already valid.
if node.status.KnownValid() {
err := fmt.Errorf("block %s is already valid", hash)
return err
}
// Keep a reference to the first node in the chain of invalid
// blocks so we can reprocess after status flags are updated.
firstNode := node
// Find previous node to the point where the blocks are valid again.
for n := node; n.status.KnownInvalid(); n = n.parent {
b.index.UnsetStatusFlags(n, statusInvalidAncestor)
b.index.UnsetStatusFlags(n, statusValidateFailed)
firstNode = n
}
// do we need an rlock on chainstate for this section?
var blk *btcutil.Block
err := b.db.View(func(dbTx database.Tx) error {
var err error
blk, err = dbFetchBlockByNode(dbTx, firstNode)
return err
})
if err != nil {
return err
}
// Process it all again. This will take care of the
// orphans as well.
_, _, err = b.ProcessBlock(blk, BFNoDupBlockCheck)
if err != nil {
return err
}
if writeErr := b.index.flushToDB(); writeErr != nil {
log.Warnf("Error flushing block index changes to disk: %v", writeErr)
}
return nil
}
// ClaimTrie returns the claimTrie associated wit hthe chain.
func (b *BlockChain) ClaimTrie() *claimtrie.ClaimTrie {
return b.claimTrie
}
// IndexManager provides a generic interface that the is called when blocks are
// connected and disconnected to and from the tip of the main chain for the
// purpose of supporting optional indexes.
@ -1700,6 +1853,8 @@ type Config struct {
// This field can be nil if the caller is not interested in using a
// signature cache.
HashCache *txscript.HashCache
ClaimTrie *claimtrie.ClaimTrie
}
// New returns a BlockChain instance using the provided configuration details.
@ -1736,7 +1891,6 @@ func New(config *Config) (*BlockChain, error) {
params := config.ChainParams
targetTimespan := int64(params.TargetTimespan / time.Second)
targetTimePerBlock := int64(params.TargetTimePerBlock / time.Second)
adjustmentFactor := params.RetargetAdjustmentFactor
b := BlockChain{
checkpoints: config.Checkpoints,
checkpointsByHeight: checkpointsByHeight,
@ -1745,8 +1899,8 @@ func New(config *Config) (*BlockChain, error) {
timeSource: config.TimeSource,
sigCache: config.SigCache,
indexManager: config.IndexManager,
minRetargetTimespan: targetTimespan / adjustmentFactor,
maxRetargetTimespan: targetTimespan * adjustmentFactor,
minRetargetTimespan: targetTimespan - (targetTimespan / 8),
maxRetargetTimespan: targetTimespan + (targetTimespan / 2),
blocksPerRetarget: int32(targetTimespan / targetTimePerBlock),
index: newBlockIndex(config.DB, params),
hashCache: config.HashCache,
@ -1755,6 +1909,7 @@ func New(config *Config) (*BlockChain, error) {
prevOrphans: make(map[chainhash.Hash][]*orphanBlock),
warningCaches: newThresholdCaches(vbNumBits),
deploymentCaches: newThresholdCaches(chaincfg.DefinedDeployments),
claimTrie: config.ClaimTrie,
}
// Initialize the chain state from the passed database. When the db
@ -1764,6 +1919,20 @@ func New(config *Config) (*BlockChain, error) {
return nil, err
}
// Helper function to insert the output in genesis block in to the
// transaction database.
fn := func(dbTx database.Tx) error {
genesisBlock := btcutil.NewBlock(b.chainParams.GenesisBlock)
view := NewUtxoViewpoint()
if err := view.connectTransactions(genesisBlock, nil); err != nil {
return err
}
return dbPutUtxoView(dbTx, view)
}
if err := b.db.Update(fn); err != nil {
return nil, err
}
// Perform any upgrades to the various chain-specific buckets as needed.
if err := b.maybeUpgradeDbBuckets(config.Interrupt); err != nil {
return nil, err
@ -1783,6 +1952,14 @@ func New(config *Config) (*BlockChain, error) {
return nil, err
}
if b.claimTrie != nil {
err := rebuildMissingClaimTrieData(&b, config.Interrupt)
if err != nil {
b.claimTrie.Close()
return nil, err
}
}
bestNode := b.bestChain.Tip()
log.Infof("Chain state (height %d, hash %v, totaltx %d, work %v)",
bestNode.height, bestNode.hash, b.stateSnapshot.TotalTxns,
@ -1790,3 +1967,63 @@ func New(config *Config) (*BlockChain, error) {
return &b, nil
}
func rebuildMissingClaimTrieData(b *BlockChain, done <-chan struct{}) error {
target := b.bestChain.Height()
if b.claimTrie.Height() == target {
return nil
}
if b.claimTrie.Height() > target {
return b.claimTrie.ResetHeight(target)
}
start := time.Now()
lastReport := time.Now()
// TODO: move this view inside the loop (or recreate it every 5 sec.)
// as accumulating all inputs has potential to use a huge amount of RAM
// but we need to get the spent inputs working for that to be possible
view := NewUtxoViewpoint()
for h := int32(0); h < target; h++ {
select {
case <-done:
return fmt.Errorf("rebuild unfinished at height %d", b.claimTrie.Height())
default:
}
n := b.bestChain.NodeByHeight(h + 1)
var block *btcutil.Block
err := b.db.View(func(dbTx database.Tx) error {
var err error
block, err = dbFetchBlockByNode(dbTx, n)
return err
})
if err != nil {
return err
}
err = view.fetchInputUtxos(b.db, block)
if err != nil {
return err
}
err = view.connectTransactions(block, nil)
if err != nil {
return err
}
if h >= b.claimTrie.Height() {
err = b.ParseClaimScripts(block, n, view, false)
if err != nil {
return err
}
}
if time.Since(lastReport) > time.Second*5 {
lastReport = time.Now()
log.Infof("Rebuilding claim trie data to %d. At: %d", target, h)
}
}
log.Infof("Completed rebuilding claim trie data to %d. Took %s ",
b.claimTrie.Height(), time.Since(start))
return nil
}

View file

@ -9,108 +9,12 @@ import (
"testing"
"time"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/chaincfg"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
// TestHaveBlock tests the HaveBlock API to ensure proper functionality.
func TestHaveBlock(t *testing.T) {
// Load up blocks such that there is a side chain.
// (genesis block) -> 1 -> 2 -> 3 -> 4
// \-> 3a
testFiles := []string{
"blk_0_to_4.dat.bz2",
"blk_3A.dat.bz2",
}
var blocks []*btcutil.Block
for _, file := range testFiles {
blockTmp, err := loadBlocks(file)
if err != nil {
t.Errorf("Error loading file: %v\n", err)
return
}
blocks = append(blocks, blockTmp...)
}
// Create a new database and chain instance to run tests against.
chain, teardownFunc, err := chainSetup("haveblock",
&chaincfg.MainNetParams)
if err != nil {
t.Errorf("Failed to setup chain instance: %v", err)
return
}
defer teardownFunc()
// Since we're not dealing with the real block chain, set the coinbase
// maturity to 1.
chain.TstSetCoinbaseMaturity(1)
for i := 1; i < len(blocks); i++ {
_, isOrphan, err := chain.ProcessBlock(blocks[i], BFNone)
if err != nil {
t.Errorf("ProcessBlock fail on block %v: %v\n", i, err)
return
}
if isOrphan {
t.Errorf("ProcessBlock incorrectly returned block %v "+
"is an orphan\n", i)
return
}
}
// Insert an orphan block.
_, isOrphan, err := chain.ProcessBlock(btcutil.NewBlock(&Block100000),
BFNone)
if err != nil {
t.Errorf("Unable to process block: %v", err)
return
}
if !isOrphan {
t.Errorf("ProcessBlock indicated block is an not orphan when " +
"it should be\n")
return
}
tests := []struct {
hash string
want bool
}{
// Genesis block should be present (in the main chain).
{hash: chaincfg.MainNetParams.GenesisHash.String(), want: true},
// Block 3a should be present (on a side chain).
{hash: "00000000474284d20067a4d33f6a02284e6ef70764a3a26d6a5b9df52ef663dd", want: true},
// Block 100000 should be present (as an orphan).
{hash: "000000000003ba27aa200b1cecaad478d2b00432346c3f1f3986da1afd33e506", want: true},
// Random hashes should not be available.
{hash: "123", want: false},
}
for i, test := range tests {
hash, err := chainhash.NewHashFromStr(test.hash)
if err != nil {
t.Errorf("NewHashFromStr: %v", err)
continue
}
result, err := chain.HaveBlock(hash)
if err != nil {
t.Errorf("HaveBlock #%d unexpected error: %v", i, err)
return
}
if result != test.want {
t.Errorf("HaveBlock #%d got %v want %v", i, result,
test.want)
continue
}
}
}
// TestCalcSequenceLock tests the LockTimeToSequence function, and the
// CalcSequenceLock method of a Chain instance. The tests exercise several
// combinations of inputs to the CalcSequenceLock function in order to ensure

View file

@ -12,10 +12,10 @@ import (
"sync"
"time"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/database"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
const (

View file

@ -11,8 +11,8 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/database"
"github.com/lbryio/lbcd/wire"
)
// TestErrNotInMainChain ensures the functions related to errNotInMainChain work

123
blockchain/chainquery.go Normal file
View file

@ -0,0 +1,123 @@
package blockchain
import (
"sort"
"strings"
btcutil "github.com/lbryio/lbcutil"
)
type ChainTip struct { // duplicate of btcjson.GetChainTipsResult to avoid circular reference
Height int64
Hash string
BranchLen int64
Status string
}
// nodeHeightSorter implements sort.Interface to allow a slice of nodes to
// be sorted by height in ascending order.
type nodeHeightSorter []ChainTip
// Len returns the number of nodes in the slice. It is part of the
// sort.Interface implementation.
func (s nodeHeightSorter) Len() int {
return len(s)
}
// Swap swaps the nodes at the passed indices. It is part of the
// sort.Interface implementation.
func (s nodeHeightSorter) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Less returns whether the node with index i should sort before the node with
// index j. It is part of the sort.Interface implementation.
func (s nodeHeightSorter) Less(i, j int) bool {
// To ensure stable order when the heights are the same, fall back to
// sorting based on hash.
if s[i].Height == s[j].Height {
return strings.Compare(s[i].Hash, s[j].Hash) < 0
}
return s[i].Height < s[j].Height
}
// ChainTips returns information, in JSON-RPC format, about all the currently
// known chain tips in the block index.
func (b *BlockChain) ChainTips() []ChainTip {
// we need our current tip
// we also need all of our orphans that aren't in the prevOrphans
var results []ChainTip
tip := b.bestChain.Tip()
results = append(results, ChainTip{
Height: int64(tip.height),
Hash: tip.hash.String(),
BranchLen: 0,
Status: "active",
})
b.orphanLock.RLock()
defer b.orphanLock.RUnlock()
notInBestChain := func(block *btcutil.Block) bool {
node := b.bestChain.NodeByHeight(block.Height())
if node == nil {
return false
}
return node.hash.IsEqual(block.Hash())
}
for hash, orphan := range b.orphans {
if len(b.prevOrphans[hash]) > 0 {
continue
}
fork := orphan.block
for fork != nil && notInBestChain(fork) {
fork = b.orphans[*fork.Hash()].block
}
result := ChainTip{
Height: int64(orphan.block.Height()),
Hash: hash.String(),
BranchLen: int64(orphan.block.Height() - fork.Height()),
}
// Determine the status of the chain tip.
//
// active:
// The current best chain tip.
//
// invalid:
// The block or one of its ancestors is invalid.
//
// headers-only:
// The block or one of its ancestors does not have the full block data
// available which also means the block can't be validated or
// connected.
//
// valid-fork:
// The block is fully validated which implies it was probably part of
// main chain at one point and was reorganized.
//
// valid-headers:
// The full block data is available and the header is valid, but the
// block was never validated which implies it was probably never part
// of the main chain.
tipStatus := b.index.LookupNode(&hash).status
if tipStatus.KnownInvalid() {
result.Status = "invalid"
} else if !tipStatus.HaveData() {
result.Status = "headers-only"
} else if tipStatus.KnownValid() {
result.Status = "valid-fork"
} else {
result.Status = "valid-headers"
}
results = append(results, result)
}
// Generate the results sorted by descending height.
sort.Sort(sort.Reverse(nodeHeightSorter(results)))
return results
}

View file

@ -10,7 +10,7 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/wire"
)
// testNoncePrng provides a deterministic prng for the nonce in generated fake

View file

@ -8,10 +8,10 @@ import (
"fmt"
"time"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/chaincfg"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/txscript"
btcutil "github.com/lbryio/lbcutil"
)
// CheckpointConfirmations is the number of blocks before the end of the current
@ -172,7 +172,8 @@ func (b *BlockChain) findPreviousCheckpoint() (*blockNode, error) {
func isNonstandardTransaction(tx *btcutil.Tx) bool {
// Check all of the output public key scripts for non-standard scripts.
for _, txOut := range tx.MsgTx().TxOut {
scriptClass := txscript.GetScriptClass(txOut.PkScript)
stripped := txscript.StripClaimScriptPrefix(txOut.PkScript)
scriptClass := txscript.GetScriptClass(stripped)
if scriptClass == txscript.NonStandardTy {
return true
}

252
blockchain/claimtrie.go Normal file
View file

@ -0,0 +1,252 @@
package blockchain
import (
"bytes"
"fmt"
"strings"
"github.com/pkg/errors"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/txscript"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
"github.com/lbryio/lbcd/claimtrie"
"github.com/lbryio/lbcd/claimtrie/change"
"github.com/lbryio/lbcd/claimtrie/merkletrie"
"github.com/lbryio/lbcd/claimtrie/node"
"github.com/lbryio/lbcd/claimtrie/normalization"
"github.com/lbryio/lbcd/claimtrie/param"
)
func (b *BlockChain) SetClaimtrieHeader(block *btcutil.Block, view *UtxoViewpoint) error {
b.chainLock.Lock()
defer b.chainLock.Unlock()
err := b.ParseClaimScripts(block, nil, view, false)
if err != nil {
return errors.Wrapf(err, "in parse claim scripts")
}
block.MsgBlock().Header.ClaimTrie = *b.claimTrie.MerkleHash()
err = b.claimTrie.ResetHeight(b.claimTrie.Height() - 1)
return errors.Wrapf(err, "in reset height")
}
func (b *BlockChain) ParseClaimScripts(block *btcutil.Block, bn *blockNode, view *UtxoViewpoint, shouldFlush bool) error {
ht := block.Height()
for _, tx := range block.Transactions() {
h := handler{ht, tx, view, map[string][]byte{}}
if err := h.handleTxIns(b.claimTrie); err != nil {
return err
}
if err := h.handleTxOuts(b.claimTrie); err != nil {
return err
}
}
err := b.claimTrie.AppendBlock()
if err != nil {
return errors.Wrapf(err, "in append block")
}
if shouldFlush {
b.claimTrie.FlushToDisk()
}
hash := b.claimTrie.MerkleHash()
if bn != nil && bn.claimTrie != *hash {
// undo our AppendBlock call as we've decided that our interpretation of the block data is incorrect,
// or that the person who made the block assembled the pieces incorrectly.
_ = b.claimTrie.ResetHeight(b.claimTrie.Height() - 1)
return errors.Errorf("height: %d, computed hash: %s != header's ClaimTrie: %s", ht, *hash, bn.claimTrie)
}
return nil
}
type handler struct {
ht int32
tx *btcutil.Tx
view *UtxoViewpoint
spent map[string][]byte
}
func (h *handler) handleTxIns(ct *claimtrie.ClaimTrie) error {
if IsCoinBase(h.tx) {
return nil
}
for _, txIn := range h.tx.MsgTx().TxIn {
op := txIn.PreviousOutPoint
e := h.view.LookupEntry(op)
if e == nil {
return errors.Errorf("missing input in view for %s", op.String())
}
cs, err := txscript.ExtractClaimScript(e.pkScript)
if txscript.IsErrorCode(err, txscript.ErrNotClaimScript) {
continue
}
if err != nil {
return err
}
var id change.ClaimID
name := cs.Name // name of the previous one (that we're now spending)
switch cs.Opcode {
case txscript.OP_CLAIMNAME: // OP code from previous transaction
id = change.NewClaimID(op) // claimID of the previous item now being spent
h.spent[id.Key()] = normalization.NormalizeIfNecessary(name, ct.Height())
err = ct.SpendClaim(name, op, id)
case txscript.OP_UPDATECLAIM:
copy(id[:], cs.ClaimID)
h.spent[id.Key()] = normalization.NormalizeIfNecessary(name, ct.Height())
err = ct.SpendClaim(name, op, id)
case txscript.OP_SUPPORTCLAIM:
copy(id[:], cs.ClaimID)
err = ct.SpendSupport(name, op, id)
}
if err != nil {
return errors.Wrapf(err, "handleTxIns")
}
}
return nil
}
func (h *handler) handleTxOuts(ct *claimtrie.ClaimTrie) error {
for i, txOut := range h.tx.MsgTx().TxOut {
op := *wire.NewOutPoint(h.tx.Hash(), uint32(i))
cs, err := txscript.ExtractClaimScript(txOut.PkScript)
if txscript.IsErrorCode(err, txscript.ErrNotClaimScript) {
continue
}
if err != nil {
return err
}
var id change.ClaimID
name := cs.Name
amt := txOut.Value
switch cs.Opcode {
case txscript.OP_CLAIMNAME:
id = change.NewClaimID(op)
err = ct.AddClaim(name, op, id, amt)
case txscript.OP_SUPPORTCLAIM:
copy(id[:], cs.ClaimID)
err = ct.AddSupport(name, op, amt, id)
case txscript.OP_UPDATECLAIM:
// old code wouldn't run the update if name or claimID didn't match existing data
// that was a safety feature, but it should have rejected the transaction instead
// TODO: reject transactions with invalid update commands
copy(id[:], cs.ClaimID)
normName := normalization.NormalizeIfNecessary(name, ct.Height())
if !bytes.Equal(h.spent[id.Key()], normName) {
node.LogOnce(fmt.Sprintf("Invalid update operation: name or ID mismatch at %d for: %s, %s",
ct.Height(), normName, id.String()))
continue
}
delete(h.spent, id.Key())
err = ct.UpdateClaim(name, op, amt, id)
}
if err != nil {
return errors.Wrapf(err, "handleTxOuts")
}
}
return nil
}
func (b *BlockChain) GetNamesChangedInBlock(height int32) ([]string, error) {
b.chainLock.RLock()
defer b.chainLock.RUnlock()
return b.claimTrie.NamesChangedInBlock(height)
}
func (b *BlockChain) GetClaimsForName(height int32, name string) (string, *node.Node, error) {
normalizedName := normalization.NormalizeIfNecessary([]byte(name), height)
b.chainLock.RLock()
defer b.chainLock.RUnlock()
n, err := b.claimTrie.NodeAt(height, normalizedName)
if err != nil {
return string(normalizedName), nil, err
}
if n == nil {
return string(normalizedName), nil, fmt.Errorf("name does not exist at height %d: %s", height, name)
}
n.SortClaimsByBid()
return string(normalizedName), n, nil
}
func (b *BlockChain) GetProofForName(name, id string, bid, seq int) (chainhash.Hash, int32, *node.Claim, int32, int32, string, []merkletrie.HashSidePair, error) {
// results: block hash, height, claim, bid, takeover, name, pairs, err
b.chainLock.RLock()
defer b.chainLock.RUnlock()
tip := b.bestChain.Tip()
normalizedName := normalization.NormalizeIfNecessary([]byte(name), tip.height)
if tip.height < param.ActiveParams.GrandForkHeight {
err := errors.Errorf("Unable to generate proofs for claims before height %d",
param.ActiveParams.GrandForkHeight)
return tip.hash, tip.height, nil, 0, 0, string(normalizedName), nil, err
}
n, err := b.claimTrie.NodeAt(tip.height, normalizedName)
if n == nil && err == nil {
err = errors.Errorf("Unable to locate a claim with name %s at height %d", normalizedName, tip.height)
}
if err != nil {
return tip.hash, tip.height, nil, 0, 0, string(normalizedName), nil, err
}
// now find the desired claim
n.SortClaimsByBid()
var claim *node.Claim
for i, c := range n.Claims {
if c.Status != node.Activated {
continue
}
if bid >= 0 && i == bid {
claim = c
bid = i
break
}
if seq >= 0 && int(c.Sequence) == seq {
claim = c
bid = i
break
}
if len(id) > 0 && strings.HasPrefix(c.ClaimID.String(), id) {
claim = c
bid = i
break
}
}
if claim == nil {
if bid >= 0 {
err = errors.Errorf("Unable to locate a claim named %s with bid %d at height %d", normalizedName, bid, tip.height)
}
if seq >= 0 {
err = errors.Errorf("Unable to locate a claim named %s with sequence %d at height %d", normalizedName, seq, tip.height)
}
if len(id) > 0 {
err = errors.Errorf("Unable to locate a claim named %s with ID %s at height %d", normalizedName, id, tip.height)
}
return tip.hash, tip.height, nil, 0, 0, string(normalizedName), nil, err
}
pairs := b.claimTrie.MerklePath(normalizedName, n, bid)
return tip.hash, tip.height, claim, int32(bid), n.TakenOverAt, string(normalizedName), pairs, nil
}

View file

@ -5,6 +5,7 @@
package blockchain
import (
"bytes"
"compress/bzip2"
"encoding/binary"
"fmt"
@ -14,13 +15,13 @@ import (
"strings"
"time"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database"
_ "github.com/btcsuite/btcd/database/ffldb"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/chaincfg"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/database"
_ "github.com/lbryio/lbcd/database/ffldb"
"github.com/lbryio/lbcd/txscript"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
const (
@ -63,13 +64,13 @@ func isSupportedDbType(dbType string) bool {
func loadBlocks(filename string) (blocks []*btcutil.Block, err error) {
filename = filepath.Join("testdata/", filename)
var network = wire.MainNet
var network = 0xd9b4bef9 // bitcoin's network ID
var dr io.Reader
var fi io.ReadCloser
fi, err = os.Open(filename)
if err != nil {
return
return blocks, err
}
if strings.HasSuffix(filename, ".bz2") {
@ -95,7 +96,7 @@ func loadBlocks(filename string) (blocks []*btcutil.Block, err error) {
break
}
if rintbuf != uint32(network) {
break
continue
}
err = binary.Read(dr, binary.LittleEndian, &rintbuf)
blocklen := rintbuf
@ -105,14 +106,20 @@ func loadBlocks(filename string) (blocks []*btcutil.Block, err error) {
// read block
dr.Read(rbytes)
// inject claimtrie:
tail := make([]byte, len(rbytes)-68)
copy(tail, rbytes[68:])
rbytes = append(rbytes[:68], bytes.Repeat([]byte{23}, chainhash.HashSize)...)
rbytes = append(rbytes, tail...)
block, err = btcutil.NewBlockFromBytes(rbytes)
if err != nil {
return
return blocks, err
}
blocks = append(blocks, block)
}
return
return blocks, err
}
// chainSetup is used to create a new db and chain instance with the genesis

View file

@ -5,8 +5,8 @@
package blockchain
import (
"github.com/btcsuite/btcd/btcec"
"github.com/btcsuite/btcd/txscript"
"github.com/lbryio/lbcd/btcec"
"github.com/lbryio/lbcd/txscript"
)
// -----------------------------------------------------------------------------

View file

@ -8,7 +8,7 @@ import (
"math/big"
"time"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/chaincfg/chainhash"
)
var (
@ -159,7 +159,6 @@ func CalcWork(bits uint32) *big.Int {
func (b *BlockChain) calcEasiestDifficulty(bits uint32, duration time.Duration) uint32 {
// Convert types used in the calculations below.
durationVal := int64(duration / time.Second)
adjustmentFactor := big.NewInt(b.chainParams.RetargetAdjustmentFactor)
// The test network rules allow minimum difficulty blocks after more
// than twice the desired amount of time needed to generate a block has
@ -178,7 +177,8 @@ func (b *BlockChain) calcEasiestDifficulty(bits uint32, duration time.Duration)
// multiplied by the max adjustment factor.
newTarget := CompactToBig(bits)
for durationVal > 0 && newTarget.Cmp(b.chainParams.PowLimit) < 0 {
newTarget.Mul(newTarget, adjustmentFactor)
adj := new(big.Int).Div(newTarget, big.NewInt(2))
newTarget.Add(newTarget, adj)
durationVal -= b.maxRetargetTimespan
}
@ -224,47 +224,45 @@ func (b *BlockChain) calcNextRequiredDifficulty(lastNode *blockNode, newBlockTim
return b.chainParams.PowLimitBits, nil
}
// Return the previous block's difficulty requirements if this block
// is not at a difficulty retarget interval.
if (lastNode.height+1)%b.blocksPerRetarget != 0 {
// For networks that support it, allow special reduction of the
// required difficulty once too much time has elapsed without
// mining a block.
if b.chainParams.ReduceMinDifficulty {
// Return minimum difficulty when more than the desired
// amount of time has elapsed without mining a block.
reductionTime := int64(b.chainParams.MinDiffReductionTime /
time.Second)
allowMinTime := lastNode.timestamp + reductionTime
if newBlockTime.Unix() > allowMinTime {
return b.chainParams.PowLimitBits, nil
}
// The block was mined within the desired timeframe, so
// return the difficulty for the last block which did
// not have the special minimum difficulty rule applied.
return b.findPrevTestNetDifficulty(lastNode), nil
// For networks that support it, allow special reduction of the
// required difficulty once too much time has elapsed without
// mining a block.
if b.chainParams.ReduceMinDifficulty {
// Return minimum difficulty when more than the desired
// amount of time has elapsed without mining a block.
reductionTime := int64(b.chainParams.MinDiffReductionTime /
time.Second)
allowMinTime := lastNode.timestamp + reductionTime
if newBlockTime.Unix() > allowMinTime {
return b.chainParams.PowLimitBits, nil
}
// For the main network (or any unrecognized networks), simply
// return the previous block's difficulty requirements.
return lastNode.bits, nil
// The block was mined within the desired timeframe, so
// return the difficulty for the last block which did
// not have the special minimum difficulty rule applied.
return b.findPrevTestNetDifficulty(lastNode), nil
}
// Get the block node at the previous retarget (targetTimespan days
// worth of blocks).
firstNode := lastNode.RelativeAncestor(b.blocksPerRetarget - 1)
blocksBack := b.blocksPerRetarget
if blocksBack > lastNode.height {
blocksBack = lastNode.height
}
firstNode := lastNode.RelativeAncestor(blocksBack)
if firstNode == nil {
return 0, AssertError("unable to obtain previous retarget block")
}
targetTimeSpan := int64(b.chainParams.TargetTimespan / time.Second)
// Limit the amount of adjustment that can occur to the previous
// difficulty.
actualTimespan := lastNode.timestamp - firstNode.timestamp
adjustedTimespan := actualTimespan
if actualTimespan < b.minRetargetTimespan {
adjustedTimespan := targetTimeSpan + (actualTimespan-targetTimeSpan)/8
if adjustedTimespan < b.minRetargetTimespan {
adjustedTimespan = b.minRetargetTimespan
} else if actualTimespan > b.maxRetargetTimespan {
} else if adjustedTimespan > b.maxRetargetTimespan {
adjustedTimespan = b.maxRetargetTimespan
}
@ -275,7 +273,6 @@ func (b *BlockChain) calcNextRequiredDifficulty(lastNode *blockNode, newBlockTim
// result.
oldTarget := CompactToBig(lastNode.bits)
newTarget := new(big.Int).Mul(oldTarget, big.NewInt(adjustedTimespan))
targetTimeSpan := int64(b.chainParams.TargetTimespan / time.Second)
newTarget.Div(newTarget, big.NewInt(targetTimeSpan))
// Limit new value to the proof of work limit.

View file

@ -220,6 +220,10 @@ const (
// current chain tip. This is not a block validation rule, but is required
// for block proposals submitted via getblocktemplate RPC.
ErrPrevBlockNotBest
// ErrBadClaimTrie indicates the calculated ClaimTrie root does not match
// the expected value.
ErrBadClaimTrie
)
// Map of ErrorCode values back to their constant names for pretty printing.
@ -267,6 +271,7 @@ var errorCodeStrings = map[ErrorCode]string{
ErrPreviousBlockUnknown: "ErrPreviousBlockUnknown",
ErrInvalidAncestorBlock: "ErrInvalidAncestorBlock",
ErrPrevBlockNotBest: "ErrPrevBlockNotBest",
ErrBadClaimTrie: "ErrBadClaimTrie",
}
// String returns the ErrorCode as a human-readable name.

View file

@ -10,11 +10,11 @@ import (
"os"
"path/filepath"
"github.com/btcsuite/btcd/blockchain"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/database"
_ "github.com/btcsuite/btcd/database/ffldb"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/blockchain"
"github.com/lbryio/lbcd/chaincfg"
"github.com/lbryio/lbcd/database"
_ "github.com/lbryio/lbcd/database/ffldb"
btcutil "github.com/lbryio/lbcutil"
)
// This example demonstrates how to create a new chain instance and use
@ -69,7 +69,7 @@ func ExampleBlockChain_ProcessBlock() {
fmt.Printf("Block accepted. Is it an orphan?: %v", isOrphan)
// Output:
// Failed to process block: already have block 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
// Failed to process block: already have block 9c89283ba0f3227f6c03b70216b9f665f0118d5e0fa729cedf4fb34d6a34f463
}
// This example demonstrates how to convert the compact "bits" in a block header

View file

@ -12,15 +12,15 @@ import (
"path/filepath"
"testing"
"github.com/btcsuite/btcd/blockchain"
"github.com/btcsuite/btcd/blockchain/fullblocktests"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database"
_ "github.com/btcsuite/btcd/database/ffldb"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/blockchain"
"github.com/lbryio/lbcd/blockchain/fullblocktests"
"github.com/lbryio/lbcd/chaincfg"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/database"
_ "github.com/lbryio/lbcd/database/ffldb"
"github.com/lbryio/lbcd/txscript"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
const (
@ -139,7 +139,7 @@ func TestFullBlocks(t *testing.T) {
// Create a new database and chain instance to run tests against.
chain, teardownFunc, err := chainSetup("fullblocktest",
&chaincfg.RegressionNetParams)
fullblocktests.FbRegressionNetParams)
if err != nil {
t.Errorf("Failed to setup chain instance: %v", err)
return

View file

@ -1,9 +1,9 @@
fullblocktests
==============
[![Build Status](https://github.com/btcsuite/btcd/workflows/Build%20and%20Test/badge.svg)](https://github.com/btcsuite/btcd/actions)
[![Build Status](https://github.com/lbryio/lbcd/workflows/Build%20and%20Test/badge.svg)](https://github.com/lbryio/lbcd/actions)
[![ISC License](http://img.shields.io/badge/license-ISC-blue.svg)](http://copyfree.org)
[![GoDoc](https://img.shields.io/badge/godoc-reference-blue.svg)](https://pkg.go.dev/github.com/btcsuite/btcd/blockchain/fullblocktests)
[![GoDoc](https://img.shields.io/badge/godoc-reference-blue.svg)](https://pkg.go.dev/github.com/lbryio/lbcd/blockchain/fullblocktests)
Package fullblocktests provides a set of full block tests to be used for testing
the consensus validation rules. The tests are intended to be flexible enough to
@ -20,7 +20,7 @@ of blocks that exercise the consensus validation rules.
## Installation and Updating
```bash
$ go get -u github.com/btcsuite/btcd/blockchain/fullblocktests
$ go get -u github.com/lbryio/lbcd/blockchain/fullblocktests
```
## License

View file

@ -18,24 +18,24 @@ import (
"runtime"
"time"
"github.com/btcsuite/btcd/blockchain"
"github.com/btcsuite/btcd/btcec"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/blockchain"
"github.com/lbryio/lbcd/btcec"
"github.com/lbryio/lbcd/chaincfg"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/txscript"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
const (
// Intentionally defined here rather than using constants from codebase
// to ensure consensus changes are detected.
maxBlockSigOps = 20000
maxBlockSize = 1000000
maxBlockSize = 8000000
minCoinbaseScriptLen = 2
maxCoinbaseScriptLen = 100
medianTimeBlocks = 11
maxScriptElementSize = 520
maxScriptElementSize = 20000
// numLargeReorgBlocks is the number of blocks to use in the large block
// reorg test (when enabled). This is the equivalent of 1 week's worth
@ -342,10 +342,8 @@ func solveBlock(header *wire.BlockHeader) bool {
return
default:
hdr.Nonce = i
hash := hdr.BlockHash()
if blockchain.HashToBig(&hash).Cmp(
targetDifficulty) <= 0 {
hash := hdr.BlockPoWHash()
if blockchain.HashToBig(&hash).Cmp(targetDifficulty) <= 0 {
results <- sbResult{true, i}
return
}
@ -811,7 +809,7 @@ func Generate(includeLargeReorg bool) (tests [][]TestInstance, err error) {
// Create a test generator instance initialized with the genesis block
// as the tip.
g, err := makeTestGenerator(regressionNetParams)
g, err := makeTestGenerator(FbRegressionNetParams)
if err != nil {
return nil, err
}
@ -1444,7 +1442,7 @@ func Generate(includeLargeReorg bool) (tests [][]TestInstance, err error) {
// Keep incrementing the nonce until the hash treated as
// a uint256 is higher than the limit.
b46.Header.Nonce++
blockHash := b46.BlockHash()
blockHash := b46.Header.BlockPoWHash()
hashNum := blockchain.HashToBig(&blockHash)
if hashNum.Cmp(g.params.PowLimit) >= 0 {
break
@ -1875,7 +1873,7 @@ func Generate(includeLargeReorg bool) (tests [][]TestInstance, err error) {
//
// Comment assumptions:
// maxBlockSigOps = 20000
// maxScriptElementSize = 520
// maxScriptElementSize = 20000
//
// [0-19999] : OP_CHECKSIG
// [20000] : OP_PUSHDATA4

View file

@ -9,9 +9,9 @@ import (
"math/big"
"time"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/chaincfg"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/wire"
)
// newHashFromStr converts the passed big-endian hex string into a
@ -54,6 +54,7 @@ var (
Version: 1,
PrevBlock: *newHashFromStr("0000000000000000000000000000000000000000000000000000000000000000"),
MerkleRoot: *newHashFromStr("4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b"),
ClaimTrie: chainhash.Hash{1}, // EmptyTrieHash
Timestamp: time.Unix(1296688602, 0), // 2011-02-02 23:16:42 +0000 UTC
Bits: 0x207fffff, // 545259519 [7fffff0000000000000000000000000000000000000000000000000000000000]
Nonce: 2,
@ -83,23 +84,25 @@ var (
LockTime: 0,
}},
}
regTestGenesisBlockHash = regTestGenesisBlock.BlockHash()
)
// regressionNetParams defines the network parameters for the regression test
// FbRegressionNetParams defines the network parameters for the regression test
// network.
//
// NOTE: The test generator intentionally does not use the existing definitions
// in the chaincfg package since the intent is to be able to generate known
// good tests which exercise that code. Using the chaincfg parameters would
// allow them to change out from under the tests potentially invalidating them.
var regressionNetParams = &chaincfg.Params{
var FbRegressionNetParams = &chaincfg.Params{
Name: "regtest",
Net: wire.TestNet,
DefaultPort: "18444",
// Chain parameters
GenesisBlock: &regTestGenesisBlock,
GenesisHash: newHashFromStr("5bec7567af40504e0994db3b573c186fffcc4edefe096ff2e58d00523bd7e8a6"),
GenesisHash: &regTestGenesisBlockHash,
PowLimit: regressionPowLimit,
PowLimitBits: 0x207fffff,
CoinbaseMaturity: 100,
@ -113,6 +116,7 @@ var regressionNetParams = &chaincfg.Params{
ReduceMinDifficulty: true,
MinDiffReductionTime: time.Minute * 20, // TargetTimePerBlock * 2
GenerateSupported: true,
MinerConfirmationWindow: 1,
// Checkpoints ordered from oldest to newest.
Checkpoints: nil,

View file

@ -1,9 +1,9 @@
indexers
========
[![Build Status](https://github.com/btcsuite/btcd/workflows/Build%20and%20Test/badge.svg)](https://github.com/btcsuite/btcd/actions)
[![Build Status](https://github.com/lbryio/lbcd/workflows/Build%20and%20Test/badge.svg)](https://github.com/lbryio/lbcd/actions)
[![ISC License](http://img.shields.io/badge/license-ISC-blue.svg)](http://copyfree.org)
[![GoDoc](https://pkg.go.dev/github.com/btcsuite/btcd/blockchain/indexers?status.png)](https://pkg.go.dev/github.com/btcsuite/btcd/blockchain/indexers)
[![GoDoc](https://pkg.go.dev/github.com/lbryio/lbcd/blockchain/indexers?status.png)](https://pkg.go.dev/github.com/lbryio/lbcd/blockchain/indexers)
Package indexers implements optional block chain indexes.
@ -23,7 +23,7 @@ via an RPC interface.
## Installation
```bash
$ go get -u github.com/btcsuite/btcd/blockchain/indexers
$ go get -u github.com/lbryio/lbcd/blockchain/indexers
```
## License

View file

@ -9,13 +9,13 @@ import (
"fmt"
"sync"
"github.com/btcsuite/btcd/blockchain"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/blockchain"
"github.com/lbryio/lbcd/chaincfg"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/database"
"github.com/lbryio/lbcd/txscript"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
const (

View file

@ -9,7 +9,7 @@ import (
"fmt"
"testing"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/wire"
)
// addrIndexBucket provides a mock address index database bucket by implementing

View file

@ -9,7 +9,7 @@ import (
"time"
"github.com/btcsuite/btclog"
"github.com/btcsuite/btcutil"
btcutil "github.com/lbryio/lbcutil"
)
// blockProgressLogger provides periodic logging for other services in order

View file

@ -7,14 +7,14 @@ package indexers
import (
"errors"
"github.com/btcsuite/btcd/blockchain"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/btcsuite/btcutil/gcs"
"github.com/btcsuite/btcutil/gcs/builder"
"github.com/lbryio/lbcd/blockchain"
"github.com/lbryio/lbcd/chaincfg"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/database"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
"github.com/lbryio/lbcutil/gcs"
"github.com/lbryio/lbcutil/gcs/builder"
)
const (

View file

@ -11,9 +11,9 @@ import (
"encoding/binary"
"errors"
"github.com/btcsuite/btcd/blockchain"
"github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/blockchain"
"github.com/lbryio/lbcd/database"
btcutil "github.com/lbryio/lbcutil"
)
var (

View file

@ -8,11 +8,11 @@ import (
"bytes"
"fmt"
"github.com/btcsuite/btcd/blockchain"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/blockchain"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/database"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
var (

View file

@ -8,11 +8,11 @@ import (
"errors"
"fmt"
"github.com/btcsuite/btcd/blockchain"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/blockchain"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/database"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
const (

View file

@ -183,7 +183,7 @@ func (m *medianTime) AddTimeSample(sourceID string, timeVal time.Time) {
// Warn if none of the time samples are close.
if !remoteHasCloseTime {
log.Warnf("Please check your date and time " +
"are correct! btcd will not work " +
"are correct! lbcd will not work " +
"properly with an invalid time")
}
}

View file

@ -9,9 +9,9 @@ import (
"fmt"
"math"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/txscript"
btcutil "github.com/lbryio/lbcutil"
)
const (
@ -230,8 +230,8 @@ func ValidateWitnessCommitment(blk *btcutil.Block) error {
coinbaseWitness := coinbaseTx.MsgTx().TxIn[0].Witness
if len(coinbaseWitness) != 1 {
str := fmt.Sprintf("the coinbase transaction has %d items in "+
"its witness stack when only one is allowed",
len(coinbaseWitness))
"its witness stack when only one is allowed. Height: %d",
len(coinbaseWitness), blk.Height())
return ruleError(ErrInvalidWitnessCommitment, str)
}
witnessNonce := coinbaseWitness[0]

View file

@ -6,16 +6,14 @@ package blockchain
import (
"testing"
"github.com/btcsuite/btcutil"
)
// TestMerkle tests the BuildMerkleTreeStore API.
func TestMerkle(t *testing.T) {
block := btcutil.NewBlock(&Block100000)
block := GetBlock100000()
merkles := BuildMerkleTreeStore(block.Transactions(), false)
calculatedMerkleRoot := merkles[len(merkles)-1]
wantMerkle := &Block100000.Header.MerkleRoot
wantMerkle := block.MsgBlock().Header.MerkleRoot
if !wantMerkle.IsEqual(calculatedMerkleRoot) {
t.Errorf("BuildMerkleTreeStore: merkle root mismatch - "+
"got %v, want %v", calculatedMerkleRoot, wantMerkle)

View file

@ -1,51 +0,0 @@
// Copyright (c) 2017 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package blockchain
import (
"testing"
"github.com/btcsuite/btcd/chaincfg"
)
// TestNotifications ensures that notification callbacks are fired on events.
func TestNotifications(t *testing.T) {
blocks, err := loadBlocks("blk_0_to_4.dat.bz2")
if err != nil {
t.Fatalf("Error loading file: %v\n", err)
}
// Create a new database and chain instance to run tests against.
chain, teardownFunc, err := chainSetup("notifications",
&chaincfg.MainNetParams)
if err != nil {
t.Fatalf("Failed to setup chain instance: %v", err)
}
defer teardownFunc()
notificationCount := 0
callback := func(notification *Notification) {
if notification.Type == NTBlockAccepted {
notificationCount++
}
}
// Register callback multiple times then assert it is called that many
// times.
const numSubscribers = 3
for i := 0; i < numSubscribers; i++ {
chain.Subscribe(callback)
}
_, _, err = chain.ProcessBlock(blocks[1], BFNone)
if err != nil {
t.Fatalf("ProcessBlock fail on block 1: %v\n", err)
}
if notificationCount != numSubscribers {
t.Fatalf("Expected notification callback to be executed %d "+
"times, found %d", numSubscribers, notificationCount)
}
}

View file

@ -8,9 +8,9 @@ import (
"fmt"
"time"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/database"
btcutil "github.com/lbryio/lbcutil"
)
// BehaviorFlags is a bitmask defining tweaks to the normal behavior when
@ -29,6 +29,10 @@ const (
// not be performed.
BFNoPoWCheck
// BFNoDupBlockCheck signals if the block should skip existence
// checks.
BFNoDupBlockCheck
// BFNone is a convenience value to specifically indicate no flags.
BFNone BehaviorFlags = 0
)
@ -148,24 +152,26 @@ func (b *BlockChain) ProcessBlock(block *btcutil.Block, flags BehaviorFlags) (bo
blockHash := block.Hash()
log.Tracef("Processing block %v", blockHash)
// The block must not already exist in the main chain or side chains.
exists, err := b.blockExists(blockHash)
if err != nil {
return false, false, err
}
if exists {
str := fmt.Sprintf("already have block %v", blockHash)
return false, false, ruleError(ErrDuplicateBlock, str)
}
if flags&BFNoDupBlockCheck != BFNoDupBlockCheck {
// The block must not already exist in the main chain or side chains.
exists, err := b.blockExists(blockHash)
if err != nil {
return false, false, err
}
if exists {
str := fmt.Sprintf("already have block %v", blockHash)
return false, false, ruleError(ErrDuplicateBlock, str)
}
// The block must not already exist as an orphan.
if _, exists := b.orphans[*blockHash]; exists {
str := fmt.Sprintf("already have block (orphan) %v", blockHash)
return false, false, ruleError(ErrDuplicateBlock, str)
// The block must not already exist as an orphan.
if _, exists := b.orphans[*blockHash]; exists {
str := fmt.Sprintf("already have block (orphan) %v", blockHash)
return false, false, ruleError(ErrDuplicateBlock, str)
}
}
// Perform preliminary sanity checks on the block and its transactions.
err = checkBlockSanity(block, b.chainParams.PowLimit, b.timeSource, flags)
err := checkBlockSanity(block, b.chainParams.PowLimit, b.timeSource, flags)
if err != nil {
return false, false, err
}

View file

@ -10,9 +10,9 @@ import (
"runtime"
"time"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/txscript"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
// txValidateItem holds a transaction along with which input to validate.

View file

@ -8,7 +8,7 @@ import (
"fmt"
"testing"
"github.com/btcsuite/btcd/txscript"
"github.com/lbryio/lbcd/txscript"
)
// TestCheckBlockScripts ensures that validating the all of the scripts in a

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -1,180 +0,0 @@
File path: reorgTest/blk_0_to_4.dat
Block 0:
f9beb4d9
1d010000
01000000 00000000 00000000 00000000 00000000 00000000 00000000 00000000
00000000 3ba3edfd 7a7b12b2 7ac72c3e 67768f61 7fc81bc3 888a5132 3a9fb8aa
4b1e5e4a 29ab5f49 ffff001d 1dac2b7c
01
01000000 01000000 00000000 00000000 00000000 00000000 00000000 00000000
00000000 00ffffff ff4d04ff ff001d01 04455468 65205469 6d657320 30332f4a
616e2f32 30303920 4368616e 63656c6c 6f72206f 6e206272 696e6b20 6f662073
65636f6e 64206261 696c6f75 7420666f 72206261 6e6b73ff ffffff01 00f2052a
01000000 43410467 8afdb0fe 55482719 67f1a671 30b7105c d6a828e0 3909a679
62e0ea1f 61deb649 f6bc3f4c ef38c4f3 5504e51e c112de5c 384df7ba 0b8d578a
4c702b6b f11d5fac 00000000
Block 1:
f9beb4d9
d4000000
01000000 6fe28c0a b6f1b372 c1a6a246 ae63f74f 931e8365 e15a089c 68d61900
00000000 3bbd67ad e98fbbb7 0718cd80 f9e9acf9 3b5fae91 7bb2b41d 4c3bb82c
77725ca5 81ad5f49 ffff001d 44e69904
01
01000000 01000000 00000000 00000000 00000000 00000000 00000000 00000000
00000000 00ffffff ff04722f 2e2bffff ffff0100 f2052a01 00000043 41046868
0737c76d abb801cb 2204f57d be4e4579 e4f710cd 67dc1b42 27592c81 e9b5cf02
b5ac9e8b 4c9f49be 5251056b 6a6d011e 4c37f6b6 d17ede6b 55faa235 19e2ac00
000000
Block 2:
f9beb4d9
95010000
01000000 13ca7940 4c11c63e ca906bbd f190b751 2872b857 1b5143ae e8cb5737
00000000 fc07c983 d7391736 0aeda657 29d0d4d3 2533eb84 76ee9d64 aa27538f
9b4fc00a d9af5f49 ffff001d 630bea22
02
01000000 01000000 00000000 00000000 00000000 00000000 00000000 00000000
00000000 00ffffff ff04eb96 14e5ffff ffff0100 f2052a01 00000043 41046868
0737c76d abb801cb 2204f57d be4e4579 e4f710cd 67dc1b42 27592c81 e9b5cf02
b5ac9e8b 4c9f49be 5251056b 6a6d011e 4c37f6b6 d17ede6b 55faa235 19e2ac00
000000
01000000 0163451d 1002611c 1388d5ba 4ddfdf99 196a86b5 990fb5b0 dc786207
4fdcb8ee d2000000 004a4930 46022100 3dde52c6 5e339f45 7fe1015e 70eed208
872eb71e dd484c07 206b190e cb2ec3f8 02210011 c78dcfd0 3d43fa63 61242a33
6291ba2a 8c1ef5bc d5472126 2468f2bf 8dee4d01 ffffffff 0200ca9a 3b000000
001976a9 14cb2abd e8bccacc 32e893df 3a054b9e f7f227a4 ce88ac00 286bee00
00000019 76a914ee 26c56fc1 d942be8d 7a24b2a1 001dd894 69398088 ac000000
00
Block 3:
f9beb4d9
96020000
01000000 7d338254 0506faab 0d4cf179 45dda023 49db51f9 6233f24c 28002258
00000000 4806fe80 bf85931b 882ea645 77ca5a03 22bb8af2 3f277b20 55f160cd
972c8e8b 31b25f49 ffff001d e8f0c653
03
01000000 01000000 00000000 00000000 00000000 00000000 00000000 00000000
00000000 00ffffff ff044abd 8159ffff ffff0100 f2052a01 00000043 4104b95c
249d84f4 17e3e395 a1274254 28b54067 1cc15881 eb828c17 b722a53f c599e21c
a5e56c90 f340988d 3933acc7 6beb832f d64cab07 8ddf3ce7 32923031 d1a8ac00
000000
01000000 01f287b5 e067e1cf 80f7da8a f89917b5 505094db d82412d9 35b665eb
bad253d3 77010000 008c4930 46022100 96ee0d02 b35fd61e 4960b44f f396f67e
01fe17f9 de4e0c17 b6a963bd ab2b50a6 02210034 920d4daa 7e9f8abe 5675c931
495809f9 0b9c1189 d05fbaf1 dd6696a5 b0d8f301 41046868 0737c76d abb801cb
2204f57d be4e4579 e4f710cd 67dc1b42 27592c81 e9b5cf02 b5ac9e8b 4c9f49be
5251056b 6a6d011e 4c37f6b6 d17ede6b 55faa235 19e2ffff ffff0100 286bee00
00000019 76a914c5 22664fb0 e55cdc5c 0cea73b4 aad97ec8 34323288 ac000000
00
01000000 01f287b5 e067e1cf 80f7da8a f89917b5 505094db d82412d9 35b665eb
bad253d3 77000000 008c4930 46022100 b08b922a c4bde411 1c229f92 9fe6eb6a
50161f98 1f4cf47e a9214d35 bf74d380 022100d2 f6640327 e677a1e1 cc474991
b9a48ba5 bd1e0c94 d1c8df49 f7b0193b 7ea4fa01 4104b95c 249d84f4 17e3e395
a1274254 28b54067 1cc15881 eb828c17 b722a53f c599e21c a5e56c90 f340988d
3933acc7 6beb832f d64cab07 8ddf3ce7 32923031 d1a8ffff ffff0100 ca9a3b00
00000019 76a914c5 22664fb0 e55cdc5c 0cea73b4 aad97ec8 34323288 ac000000
00
Block 4:
f9beb4d9
73010000
01000000 5da36499 06f35e09 9be42a1d 87b6dd42 11bc1400 6c220694 0807eaae
00000000 48eeeaed 2d9d8522 e6201173 743823fd 4b87cd8a ca8e6408 ec75ca38
302c2ff0 89b45f49 ffff001d 00530839
02
01000000 01000000 00000000 00000000 00000000 00000000 00000000 00000000
00000000 00ffffff ff04d41d 2213ffff ffff0100 f2052a01 00000043 4104678a
fdb0fe55 48271967 f1a67130 b7105cd6 a828e039 09a67962 e0ea1f61 deb649f6
bc3f4cef 38c4f355 04e51ec1 12de5c38 4df7ba0b 8d578a4c 702b6bf1 1d5fac00
000000
01000000 0163451d 1002611c 1388d5ba 4ddfdf99 196a86b5 990fb5b0 dc786207
4fdcb8ee d2000000 004a4930 46022100 8c8fd57b 48762135 8d8f3e69 19f33e08
804736ff 83db47aa 248512e2 6df9b8ba 022100b0 c59e5ee7 bfcbfcd1 a4d83da9
55fb260e fda7f42a 25522625 a3d6f2d9 1174a701 ffffffff 0100f205 2a010000
001976a9 14c52266 4fb0e55c dc5c0cea 73b4aad9 7ec83432 3288ac00 000000
File path: reorgTest/blk_3A.dat
Block 3A:
f9beb4d9
96020000
01000000 7d338254 0506faab 0d4cf179 45dda023 49db51f9 6233f24c 28002258
00000000 5a15f573 1177a353 bdca7aab 20e16624 dfe90adc 70accadc 68016732
302c20a7 31b25f49 ffff001d 6a901440
03
01000000 01000000 00000000 00000000 00000000 00000000 00000000 00000000
00000000 00ffffff ff04ad1b e7d5ffff ffff0100 f2052a01 00000043 4104ed83
704c95d8 29046f1a c2780621 1132102c 34e9ac7f fa1b7111 0658e5b9 d1bdedc4
16f5cefc 1db0625c d0c75de8 192d2b59 2d7e3b00 bcfb4a0e 860d880f d1fcac00
000000
01000000 01f287b5 e067e1cf 80f7da8a f89917b5 505094db d82412d9 35b665eb
bad253d3 77010000 008c4930 46022100 96ee0d02 b35fd61e 4960b44f f396f67e
01fe17f9 de4e0c17 b6a963bd ab2b50a6 02210034 920d4daa 7e9f8abe 5675c931
495809f9 0b9c1189 d05fbaf1 dd6696a5 b0d8f301 41046868 0737c76d abb801cb
2204f57d be4e4579 e4f710cd 67dc1b42 27592c81 e9b5cf02 b5ac9e8b 4c9f49be
5251056b 6a6d011e 4c37f6b6 d17ede6b 55faa235 19e2ffff ffff0100 286bee00
00000019 76a914c5 22664fb0 e55cdc5c 0cea73b4 aad97ec8 34323288 ac000000
00
01000000 01f287b5 e067e1cf 80f7da8a f89917b5 505094db d82412d9 35b665eb
bad253d3 77000000 008c4930 46022100 9cc67ddd aa6f592a 6b2babd4 d6ff954f
25a784cf 4fe4bb13 afb9f49b 08955119 022100a2 d99545b7 94080757 fcf2b563
f2e91287 86332f46 0ec6b90f f085fb28 41a69701 4104b95c 249d84f4 17e3e395
a1274254 28b54067 1cc15881 eb828c17 b722a53f c599e21c a5e56c90 f340988d
3933acc7 6beb832f d64cab07 8ddf3ce7 32923031 d1a8ffff ffff0100 ca9a3b00
00000019 76a914ee 26c56fc1 d942be8d 7a24b2a1 001dd894 69398088 ac000000
00
File path: reorgTest/blk_4A.dat
Block 4A:
f9beb4d9
d4000000
01000000 aae77468 2205667d 4f413a58 47cc8fe8 9795f1d5 645d5b24 1daf3c92
00000000 361c9cde a09637a0 d0c05c3b 4e7a5d91 9edb184a 0a4c7633 d92e2ddd
f04cb854 89b45f49 ffff001d 9e9aa1e8
01
01000000 01000000 00000000 00000000 00000000 00000000 00000000 00000000
00000000 00ffffff ff0401b8 f3eaffff ffff0100 f2052a01 00000043 4104678a
fdb0fe55 48271967 f1a67130 b7105cd6 a828e039 09a67962 e0ea1f61 deb649f6
bc3f4cef 38c4f355 04e51ec1 12de5c38 4df7ba0b 8d578a4c 702b6bf1 1d5fac00
000000
File path: reorgTest/blk_5A.dat
Block 5A:
f9beb4d9
73010000
01000000 ebc7d0de 9c31a71b 7f41d275 2c080ba4 11e1854b d45cb2cf 8c1e4624
00000000 a607774b 79b8eb50 b52a5a32 c1754281 ec67f626 9561df28 57d1fe6a
ea82c696 e1b65f49 ffff001d 4a263577
02
01000000 01000000 00000000 00000000 00000000 00000000 00000000 00000000
00000000 00ffffff ff049971 0c7dffff ffff0100 f2052a01 00000043 4104678a
fdb0fe55 48271967 f1a67130 b7105cd6 a828e039 09a67962 e0ea1f61 deb649f6
bc3f4cef 38c4f355 04e51ec1 12de5c38 4df7ba0b 8d578a4c 702b6bf1 1d5fac00
000000
01000000 0163451d 1002611c 1388d5ba 4ddfdf99 196a86b5 990fb5b0 dc786207
4fdcb8ee d2000000 004a4930 46022100 8c8fd57b 48762135 8d8f3e69 19f33e08
804736ff 83db47aa 248512e2 6df9b8ba 022100b0 c59e5ee7 bfcbfcd1 a4d83da9
55fb260e fda7f42a 25522625 a3d6f2d9 1174a701 ffffffff 0100f205 2a010000
001976a9 14c52266 4fb0e55c dc5c0cea 73b4aad9 7ec83432 3288ac00 000000

View file

@ -7,7 +7,7 @@ package blockchain
import (
"fmt"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/chaincfg/chainhash"
)
// ThresholdState define the various threshold states used when voting on
@ -302,6 +302,12 @@ func (b *BlockChain) deploymentState(prevNode *blockNode, deploymentID uint32) (
}
deployment := &b.chainParams.Deployments[deploymentID]
// added to mimic LBRYcrd:
if deployment.ForceActiveAt > 0 && prevNode != nil && prevNode.height+1 >= deployment.ForceActiveAt {
return ThresholdActive, nil
}
checker := deploymentChecker{deployment: deployment, chain: b}
cache := &b.deploymentCaches[deploymentID]

View file

@ -7,7 +7,7 @@ package blockchain
import (
"testing"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/chaincfg/chainhash"
)
// TestThresholdStateStringer tests the stringized output for the

View file

@ -11,9 +11,9 @@ import (
"fmt"
"time"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/database"
"github.com/lbryio/lbcd/wire"
)
const (

View file

@ -7,11 +7,11 @@ package blockchain
import (
"fmt"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/database"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/database"
"github.com/lbryio/lbcd/txscript"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
// txoFlags is a bitmask defining additional information and state for a

View file

@ -11,11 +11,12 @@ import (
"math/big"
"time"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/chaincfg"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/claimtrie/param"
"github.com/lbryio/lbcd/txscript"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
const (
@ -40,7 +41,7 @@ const (
// baseSubsidy is the starting subsidy amount for mined blocks. This
// value is halved every SubsidyHalvingInterval blocks.
baseSubsidy = 50 * btcutil.SatoshiPerBitcoin
baseSubsidy = 500 * btcutil.SatoshiPerBitcoin
)
var (
@ -192,17 +193,44 @@ func isBIP0030Node(node *blockNode) bool {
// At the target block generation rate for the main network, this is
// approximately every 4 years.
func CalcBlockSubsidy(height int32, chainParams *chaincfg.Params) int64 {
if chainParams.SubsidyReductionInterval == 0 {
return baseSubsidy
h := int64(height)
if h == 0 {
return btcutil.SatoshiPerBitcoin * 4e8
}
if h <= 5100 {
return btcutil.SatoshiPerBitcoin
}
if h <= 55000 {
return btcutil.SatoshiPerBitcoin * (1 + (h-5001)/100)
}
// Equivalent to: baseSubsidy / 2^(height/subsidyHalvingInterval)
return baseSubsidy >> uint(height/chainParams.SubsidyReductionInterval)
lv := (h - 55001) / int64(chainParams.SubsidyReductionInterval)
reduction := (int64(math.Sqrt((float64(8*lv))+1)) - 1) / 2
for !withinLevelBounds(reduction, lv) {
if ((reduction*reduction + reduction) >> 1) > lv {
reduction--
} else {
reduction++
}
}
subsidyReduction := btcutil.SatoshiPerBitcoin * reduction
if subsidyReduction >= baseSubsidy {
return 0
}
return baseSubsidy - subsidyReduction
}
func withinLevelBounds(reduction int64, lv int64) bool {
if ((reduction*reduction + reduction) >> 1) > lv {
return false
}
reduction++
return ((reduction*reduction + reduction) >> 1) > lv
}
// CheckTransactionSanity performs some preliminary checks on a transaction to
// ensure it is sane. These checks are context free.
func CheckTransactionSanity(tx *btcutil.Tx) error {
// ensure it is sane.
func CheckTransactionSanity(tx *btcutil.Tx, enforceSoftFork bool) error {
// A transaction must have at least one input.
msgTx := tx.MsgTx()
if len(msgTx.TxIn) == 0 {
@ -261,6 +289,11 @@ func CheckTransactionSanity(tx *btcutil.Tx) error {
btcutil.MaxSatoshi)
return ruleError(ErrBadTxOutValue, str)
}
err := txscript.AllClaimsAreSane(txOut.PkScript, enforceSoftFork)
if err != nil {
return ruleError(ErrBadTxOutValue, err.Error())
}
}
// Check for duplicate transaction inputs.
@ -324,7 +357,7 @@ func checkProofOfWork(header *wire.BlockHeader, powLimit *big.Int, flags Behavio
// to avoid proof of work checks is set.
if flags&BFNoPoWCheck != BFNoPoWCheck {
// The block hash must be less than the claimed target.
hash := header.BlockHash()
hash := header.BlockPoWHash()
hashNum := HashToBig(&hash)
if hashNum.Cmp(target) > 0 {
str := fmt.Sprintf("block hash of %064x is higher than "+
@ -515,7 +548,7 @@ func checkBlockSanity(block *btcutil.Block, powLimit *big.Int, timeSource Median
// Do some preliminary checks on each transaction to ensure they are
// sane before continuing.
for _, tx := range transactions {
err := CheckTransactionSanity(tx)
err := CheckTransactionSanity(tx, block.Height() >= param.ActiveParams.GrandForkHeight)
if err != nil {
return err
}

View file

@ -5,15 +5,16 @@
package blockchain
import (
"encoding/hex"
"math"
"reflect"
"testing"
"time"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/chaincfg"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
// TestSequenceLocksActive tests the SequenceLockActive function to ensure it
@ -63,96 +64,11 @@ func TestSequenceLocksActive(t *testing.T) {
}
}
// TestCheckConnectBlockTemplate tests the CheckConnectBlockTemplate function to
// ensure it fails.
func TestCheckConnectBlockTemplate(t *testing.T) {
// Create a new database and chain instance to run tests against.
chain, teardownFunc, err := chainSetup("checkconnectblocktemplate",
&chaincfg.MainNetParams)
if err != nil {
t.Errorf("Failed to setup chain instance: %v", err)
return
}
defer teardownFunc()
// Since we're not dealing with the real block chain, set the coinbase
// maturity to 1.
chain.TstSetCoinbaseMaturity(1)
// Load up blocks such that there is a side chain.
// (genesis block) -> 1 -> 2 -> 3 -> 4
// \-> 3a
testFiles := []string{
"blk_0_to_4.dat.bz2",
"blk_3A.dat.bz2",
}
var blocks []*btcutil.Block
for _, file := range testFiles {
blockTmp, err := loadBlocks(file)
if err != nil {
t.Fatalf("Error loading file: %v\n", err)
}
blocks = append(blocks, blockTmp...)
}
for i := 1; i <= 3; i++ {
isMainChain, _, err := chain.ProcessBlock(blocks[i], BFNone)
if err != nil {
t.Fatalf("CheckConnectBlockTemplate: Received unexpected error "+
"processing block %d: %v", i, err)
}
if !isMainChain {
t.Fatalf("CheckConnectBlockTemplate: Expected block %d to connect "+
"to main chain", i)
}
}
// Block 3 should fail to connect since it's already inserted.
err = chain.CheckConnectBlockTemplate(blocks[3])
if err == nil {
t.Fatal("CheckConnectBlockTemplate: Did not received expected error " +
"on block 3")
}
// Block 4 should connect successfully to tip of chain.
err = chain.CheckConnectBlockTemplate(blocks[4])
if err != nil {
t.Fatalf("CheckConnectBlockTemplate: Received unexpected error on "+
"block 4: %v", err)
}
// Block 3a should fail to connect since does not build on chain tip.
err = chain.CheckConnectBlockTemplate(blocks[5])
if err == nil {
t.Fatal("CheckConnectBlockTemplate: Did not received expected error " +
"on block 3a")
}
// Block 4 should connect even if proof of work is invalid.
invalidPowBlock := *blocks[4].MsgBlock()
invalidPowBlock.Header.Nonce++
err = chain.CheckConnectBlockTemplate(btcutil.NewBlock(&invalidPowBlock))
if err != nil {
t.Fatalf("CheckConnectBlockTemplate: Received unexpected error on "+
"block 4 with bad nonce: %v", err)
}
// Invalid block building on chain tip should fail to connect.
invalidBlock := *blocks[4].MsgBlock()
invalidBlock.Header.Bits--
err = chain.CheckConnectBlockTemplate(btcutil.NewBlock(&invalidBlock))
if err == nil {
t.Fatal("CheckConnectBlockTemplate: Did not received expected error " +
"on block 4 with invalid difficulty bits")
}
}
// TestCheckBlockSanity tests the CheckBlockSanity function to ensure it works
// as expected.
func TestCheckBlockSanity(t *testing.T) {
powLimit := chaincfg.MainNetParams.PowLimit
block := btcutil.NewBlock(&Block100000)
block := GetBlock100000()
timeSource := NewMedianTime()
err := CheckBlockSanity(block, powLimit, timeSource)
if err != nil {
@ -234,254 +150,12 @@ func TestCheckSerializedHeight(t *testing.T) {
}
}
// Block100000 defines block 100,000 of the block chain. It is used to
var block100000Hex = "0000002024cbdc8644ee3983e66b003a0733891c069ca74c114c034c7b3e2e7ad7a12cd67e95e0555c0e056f6f2af538268ff9d21b420e529750d08eacb25c40f1322936637109b8a051157604c1c163cd39237687f6244b4e6d2b3a94e9d816babaecbb10c56058c811041b2b9c43000701000000010000000000000000000000000000000000000000000000000000000000000000ffffffff2003a086010410c56058081011314abf0100000d2f6e6f64655374726174756d2f000000000180354a6e0a0000001976a914b5e74e7cc9e1f480a6599895c92aab1401f870f188ac000000000100000002f1b75decc2c1c59c2178852822de412f574ad9796b65ac9092a79630d8109aaf000000006a47304402202f78ed3bf8dcadb6c17e289cd06e9c96b02c6f23aa1f446a4a7896a31cfd1e4702202862261e2eb59475ac91092c620b3cac5a831372bafc446d5ee450866040b532012103db4f3785354d84311fab7624c52784a61e3046d8f364463d327bdd96281b5b90feffffff987ee6b4bf95548d01e443683261dd0ffdcb2eb335b2f7119df0d41b60756b92010000006a47304402200c422c7560b6418d45443138bb957ec44eb293a639f4b2235a622205ca6cac370220759f15d5dc2543fd1aef80104c93427fcb025847bf895669025d1d82c62fbf6801210201864b998db5436990a0029fc3fb153c09e8c2689214b91c8ed68784995c8da0feffffff022bccfedd000000001976a914738f16132942a01d00cb9699bd058c4925aada3288ac1f4d030c000000001976a914c4292e757f5ff6a27c2f0a87d3a4aea5e46c275a88ac9f86010001000000015fbb26ad6d818186032baeef4d3f475dfe165c6da2d93411b8ec5f9f523cf1a4000000006a4730440220356999ad5a9f6f09b676f17dd4a2617a0af234722d68a50edc3768c983c0623d022056b4e5531608aeb0205fde9c44f741158da3bba1f4c3788c9fe79d36d43ea355012103509893a2a7c765d49ac9ff70126cb1af54871d70baba2c7e39ec9b4096289a9bfeffffff02389332fa080000001976a914f85e054405fbcedc2341cf8bf41ea989090587a288acf9321a41000000001976a914e85e90c048fdfbe1c2117a7132856ff4b39b470188ac9f86010001000000013508189b9bb61ac2aa536b905447b58f6c58c17cdef305240f566caa689d760a010000006a4730440220669a2b86e5abe58bae54829d3c271669540a9ad965c2fb79e8cc1fb609c0b60002202f958403d979138075cb53d8cb5ff6bb14e18d66dfdb6701c7d43a8ceeed0fa80121029935a602205a3fb72446064f3bc3a55ab9cd2e3459bf2ffdf80a48ab029d4b42feffffff02523c2f13000000001976a914c5b2ae398496f0f9ceaf81b83c28a27ddc890e3588ac211958f2000000001976a914ac65f1d16e5a2af37408b5d65406000a7ea143ca88ac9f8601000100000001bdd724322c555a21d5eb62d4aadbdc051663bcd4ec03f8d9005992f299783c21000000006a47304402205448141a2a788f73310025123bd24f5bee01dd8f48f18d7abc62d7c26465008902207ab46e6ddf6ba416decf3fbb97b6946a1428ea0a7c25a55cab47c47110d8e9ce0121029d6ff3b1235f2a08560b23dd4a08b14cc415b544801b885365736ea8ab1d3470feffffff029d104ccf000000001976a914999d5b0e3d5efcf601c711127b91841afbf5c37a88ace5c5a07f070000001976a9144aade372298eb387da9b6ac69d215a213e822f3f88ac9f86010001000000011658304d4ce796cd450228a10fdf647c6ea42295c9f5e1663df11481af1c884d010000006b483045022100a35d5d3ccde85b41559047d976ae6210b8e6ba5653c53aae1adc90048de0761002200d6bd6ebc6d73f97855f435c6fd595009ee71d23bb34870ab83ad53f67eeb22b012102d2f681ebfd1a570416d602986a47ca4254d8dedf2935b3f8c2ba55dcee8e98f4feffffff025ee913e6020000001976a91468076c9380d3c6b468ad1d6109c36770fb181e8f88acb165394f000000001976a9147ae970e81b3657cbb59df26517e372165807be0088ac9f86010001000000018f285109f78524a88ff328a4f94de2ac03224c50984b11c68adda192e8f78efa010000006b483045022100d77f2ac32dd6a3015f02f7115a13f617c57952fc5d53a33a87dc3fc00ffe1864022006455f74cff864b10424e445c530a59243f86d309dc92c5010ec5709e38471ab012102fdac7335b41edcd2846fc7e2166bb48312ee583ed6ff70fb5c27bcb2addaad86feffffff028b7a6d5c000000001976a914c65106d2e7ea4ec6aa8aa30ba4d11cfd1143123388ac5934c228000000001976a914d1c4d190b07edb972b91f33c36c6568b80358dd488ac9f860100"
// GetBlock100000 defines block 100,000 of the block chain. It is used to
// test Block operations.
var Block100000 = wire.MsgBlock{
Header: wire.BlockHeader{
Version: 1,
PrevBlock: chainhash.Hash([32]byte{ // Make go vet happy.
0x50, 0x12, 0x01, 0x19, 0x17, 0x2a, 0x61, 0x04,
0x21, 0xa6, 0xc3, 0x01, 0x1d, 0xd3, 0x30, 0xd9,
0xdf, 0x07, 0xb6, 0x36, 0x16, 0xc2, 0xcc, 0x1f,
0x1c, 0xd0, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00,
}), // 000000000002d01c1fccc21636b607dfd930d31d01c3a62104612a1719011250
MerkleRoot: chainhash.Hash([32]byte{ // Make go vet happy.
0x66, 0x57, 0xa9, 0x25, 0x2a, 0xac, 0xd5, 0xc0,
0xb2, 0x94, 0x09, 0x96, 0xec, 0xff, 0x95, 0x22,
0x28, 0xc3, 0x06, 0x7c, 0xc3, 0x8d, 0x48, 0x85,
0xef, 0xb5, 0xa4, 0xac, 0x42, 0x47, 0xe9, 0xf3,
}), // f3e94742aca4b5ef85488dc37c06c3282295ffec960994b2c0d5ac2a25a95766
Timestamp: time.Unix(1293623863, 0), // 2010-12-29 11:57:43 +0000 UTC
Bits: 0x1b04864c, // 453281356
Nonce: 0x10572b0f, // 274148111
},
Transactions: []*wire.MsgTx{
{
Version: 1,
TxIn: []*wire.TxIn{
{
PreviousOutPoint: wire.OutPoint{
Hash: chainhash.Hash{},
Index: 0xffffffff,
},
SignatureScript: []byte{
0x04, 0x4c, 0x86, 0x04, 0x1b, 0x02, 0x06, 0x02,
},
Sequence: 0xffffffff,
},
},
TxOut: []*wire.TxOut{
{
Value: 0x12a05f200, // 5000000000
PkScript: []byte{
0x41, // OP_DATA_65
0x04, 0x1b, 0x0e, 0x8c, 0x25, 0x67, 0xc1, 0x25,
0x36, 0xaa, 0x13, 0x35, 0x7b, 0x79, 0xa0, 0x73,
0xdc, 0x44, 0x44, 0xac, 0xb8, 0x3c, 0x4e, 0xc7,
0xa0, 0xe2, 0xf9, 0x9d, 0xd7, 0x45, 0x75, 0x16,
0xc5, 0x81, 0x72, 0x42, 0xda, 0x79, 0x69, 0x24,
0xca, 0x4e, 0x99, 0x94, 0x7d, 0x08, 0x7f, 0xed,
0xf9, 0xce, 0x46, 0x7c, 0xb9, 0xf7, 0xc6, 0x28,
0x70, 0x78, 0xf8, 0x01, 0xdf, 0x27, 0x6f, 0xdf,
0x84, // 65-byte signature
0xac, // OP_CHECKSIG
},
},
},
LockTime: 0,
},
{
Version: 1,
TxIn: []*wire.TxIn{
{
PreviousOutPoint: wire.OutPoint{
Hash: chainhash.Hash([32]byte{ // Make go vet happy.
0x03, 0x2e, 0x38, 0xe9, 0xc0, 0xa8, 0x4c, 0x60,
0x46, 0xd6, 0x87, 0xd1, 0x05, 0x56, 0xdc, 0xac,
0xc4, 0x1d, 0x27, 0x5e, 0xc5, 0x5f, 0xc0, 0x07,
0x79, 0xac, 0x88, 0xfd, 0xf3, 0x57, 0xa1, 0x87,
}), // 87a157f3fd88ac7907c05fc55e271dc4acdc5605d187d646604ca8c0e9382e03
Index: 0,
},
SignatureScript: []byte{
0x49, // OP_DATA_73
0x30, 0x46, 0x02, 0x21, 0x00, 0xc3, 0x52, 0xd3,
0xdd, 0x99, 0x3a, 0x98, 0x1b, 0xeb, 0xa4, 0xa6,
0x3a, 0xd1, 0x5c, 0x20, 0x92, 0x75, 0xca, 0x94,
0x70, 0xab, 0xfc, 0xd5, 0x7d, 0xa9, 0x3b, 0x58,
0xe4, 0xeb, 0x5d, 0xce, 0x82, 0x02, 0x21, 0x00,
0x84, 0x07, 0x92, 0xbc, 0x1f, 0x45, 0x60, 0x62,
0x81, 0x9f, 0x15, 0xd3, 0x3e, 0xe7, 0x05, 0x5c,
0xf7, 0xb5, 0xee, 0x1a, 0xf1, 0xeb, 0xcc, 0x60,
0x28, 0xd9, 0xcd, 0xb1, 0xc3, 0xaf, 0x77, 0x48,
0x01, // 73-byte signature
0x41, // OP_DATA_65
0x04, 0xf4, 0x6d, 0xb5, 0xe9, 0xd6, 0x1a, 0x9d,
0xc2, 0x7b, 0x8d, 0x64, 0xad, 0x23, 0xe7, 0x38,
0x3a, 0x4e, 0x6c, 0xa1, 0x64, 0x59, 0x3c, 0x25,
0x27, 0xc0, 0x38, 0xc0, 0x85, 0x7e, 0xb6, 0x7e,
0xe8, 0xe8, 0x25, 0xdc, 0xa6, 0x50, 0x46, 0xb8,
0x2c, 0x93, 0x31, 0x58, 0x6c, 0x82, 0xe0, 0xfd,
0x1f, 0x63, 0x3f, 0x25, 0xf8, 0x7c, 0x16, 0x1b,
0xc6, 0xf8, 0xa6, 0x30, 0x12, 0x1d, 0xf2, 0xb3,
0xd3, // 65-byte pubkey
},
Sequence: 0xffffffff,
},
},
TxOut: []*wire.TxOut{
{
Value: 0x2123e300, // 556000000
PkScript: []byte{
0x76, // OP_DUP
0xa9, // OP_HASH160
0x14, // OP_DATA_20
0xc3, 0x98, 0xef, 0xa9, 0xc3, 0x92, 0xba, 0x60,
0x13, 0xc5, 0xe0, 0x4e, 0xe7, 0x29, 0x75, 0x5e,
0xf7, 0xf5, 0x8b, 0x32,
0x88, // OP_EQUALVERIFY
0xac, // OP_CHECKSIG
},
},
{
Value: 0x108e20f00, // 4444000000
PkScript: []byte{
0x76, // OP_DUP
0xa9, // OP_HASH160
0x14, // OP_DATA_20
0x94, 0x8c, 0x76, 0x5a, 0x69, 0x14, 0xd4, 0x3f,
0x2a, 0x7a, 0xc1, 0x77, 0xda, 0x2c, 0x2f, 0x6b,
0x52, 0xde, 0x3d, 0x7c,
0x88, // OP_EQUALVERIFY
0xac, // OP_CHECKSIG
},
},
},
LockTime: 0,
},
{
Version: 1,
TxIn: []*wire.TxIn{
{
PreviousOutPoint: wire.OutPoint{
Hash: chainhash.Hash([32]byte{ // Make go vet happy.
0xc3, 0x3e, 0xbf, 0xf2, 0xa7, 0x09, 0xf1, 0x3d,
0x9f, 0x9a, 0x75, 0x69, 0xab, 0x16, 0xa3, 0x27,
0x86, 0xaf, 0x7d, 0x7e, 0x2d, 0xe0, 0x92, 0x65,
0xe4, 0x1c, 0x61, 0xd0, 0x78, 0x29, 0x4e, 0xcf,
}), // cf4e2978d0611ce46592e02d7e7daf8627a316ab69759a9f3df109a7f2bf3ec3
Index: 1,
},
SignatureScript: []byte{
0x47, // OP_DATA_71
0x30, 0x44, 0x02, 0x20, 0x03, 0x2d, 0x30, 0xdf,
0x5e, 0xe6, 0xf5, 0x7f, 0xa4, 0x6c, 0xdd, 0xb5,
0xeb, 0x8d, 0x0d, 0x9f, 0xe8, 0xde, 0x6b, 0x34,
0x2d, 0x27, 0x94, 0x2a, 0xe9, 0x0a, 0x32, 0x31,
0xe0, 0xba, 0x33, 0x3e, 0x02, 0x20, 0x3d, 0xee,
0xe8, 0x06, 0x0f, 0xdc, 0x70, 0x23, 0x0a, 0x7f,
0x5b, 0x4a, 0xd7, 0xd7, 0xbc, 0x3e, 0x62, 0x8c,
0xbe, 0x21, 0x9a, 0x88, 0x6b, 0x84, 0x26, 0x9e,
0xae, 0xb8, 0x1e, 0x26, 0xb4, 0xfe, 0x01,
0x41, // OP_DATA_65
0x04, 0xae, 0x31, 0xc3, 0x1b, 0xf9, 0x12, 0x78,
0xd9, 0x9b, 0x83, 0x77, 0xa3, 0x5b, 0xbc, 0xe5,
0xb2, 0x7d, 0x9f, 0xff, 0x15, 0x45, 0x68, 0x39,
0xe9, 0x19, 0x45, 0x3f, 0xc7, 0xb3, 0xf7, 0x21,
0xf0, 0xba, 0x40, 0x3f, 0xf9, 0x6c, 0x9d, 0xee,
0xb6, 0x80, 0xe5, 0xfd, 0x34, 0x1c, 0x0f, 0xc3,
0xa7, 0xb9, 0x0d, 0xa4, 0x63, 0x1e, 0xe3, 0x95,
0x60, 0x63, 0x9d, 0xb4, 0x62, 0xe9, 0xcb, 0x85,
0x0f, // 65-byte pubkey
},
Sequence: 0xffffffff,
},
},
TxOut: []*wire.TxOut{
{
Value: 0xf4240, // 1000000
PkScript: []byte{
0x76, // OP_DUP
0xa9, // OP_HASH160
0x14, // OP_DATA_20
0xb0, 0xdc, 0xbf, 0x97, 0xea, 0xbf, 0x44, 0x04,
0xe3, 0x1d, 0x95, 0x24, 0x77, 0xce, 0x82, 0x2d,
0xad, 0xbe, 0x7e, 0x10,
0x88, // OP_EQUALVERIFY
0xac, // OP_CHECKSIG
},
},
{
Value: 0x11d260c0, // 299000000
PkScript: []byte{
0x76, // OP_DUP
0xa9, // OP_HASH160
0x14, // OP_DATA_20
0x6b, 0x12, 0x81, 0xee, 0xc2, 0x5a, 0xb4, 0xe1,
0xe0, 0x79, 0x3f, 0xf4, 0xe0, 0x8a, 0xb1, 0xab,
0xb3, 0x40, 0x9c, 0xd9,
0x88, // OP_EQUALVERIFY
0xac, // OP_CHECKSIG
},
},
},
LockTime: 0,
},
{
Version: 1,
TxIn: []*wire.TxIn{
{
PreviousOutPoint: wire.OutPoint{
Hash: chainhash.Hash([32]byte{ // Make go vet happy.
0x0b, 0x60, 0x72, 0xb3, 0x86, 0xd4, 0xa7, 0x73,
0x23, 0x52, 0x37, 0xf6, 0x4c, 0x11, 0x26, 0xac,
0x3b, 0x24, 0x0c, 0x84, 0xb9, 0x17, 0xa3, 0x90,
0x9b, 0xa1, 0xc4, 0x3d, 0xed, 0x5f, 0x51, 0xf4,
}), // f4515fed3dc4a19b90a317b9840c243bac26114cf637522373a7d486b372600b
Index: 0,
},
SignatureScript: []byte{
0x49, // OP_DATA_73
0x30, 0x46, 0x02, 0x21, 0x00, 0xbb, 0x1a, 0xd2,
0x6d, 0xf9, 0x30, 0xa5, 0x1c, 0xce, 0x11, 0x0c,
0xf4, 0x4f, 0x7a, 0x48, 0xc3, 0xc5, 0x61, 0xfd,
0x97, 0x75, 0x00, 0xb1, 0xae, 0x5d, 0x6b, 0x6f,
0xd1, 0x3d, 0x0b, 0x3f, 0x4a, 0x02, 0x21, 0x00,
0xc5, 0xb4, 0x29, 0x51, 0xac, 0xed, 0xff, 0x14,
0xab, 0xba, 0x27, 0x36, 0xfd, 0x57, 0x4b, 0xdb,
0x46, 0x5f, 0x3e, 0x6f, 0x8d, 0xa1, 0x2e, 0x2c,
0x53, 0x03, 0x95, 0x4a, 0xca, 0x7f, 0x78, 0xf3,
0x01, // 73-byte signature
0x41, // OP_DATA_65
0x04, 0xa7, 0x13, 0x5b, 0xfe, 0x82, 0x4c, 0x97,
0xec, 0xc0, 0x1e, 0xc7, 0xd7, 0xe3, 0x36, 0x18,
0x5c, 0x81, 0xe2, 0xaa, 0x2c, 0x41, 0xab, 0x17,
0x54, 0x07, 0xc0, 0x94, 0x84, 0xce, 0x96, 0x94,
0xb4, 0x49, 0x53, 0xfc, 0xb7, 0x51, 0x20, 0x65,
0x64, 0xa9, 0xc2, 0x4d, 0xd0, 0x94, 0xd4, 0x2f,
0xdb, 0xfd, 0xd5, 0xaa, 0xd3, 0xe0, 0x63, 0xce,
0x6a, 0xf4, 0xcf, 0xaa, 0xea, 0x4e, 0xa1, 0x4f,
0xbb, // 65-byte pubkey
},
Sequence: 0xffffffff,
},
},
TxOut: []*wire.TxOut{
{
Value: 0xf4240, // 1000000
PkScript: []byte{
0x76, // OP_DUP
0xa9, // OP_HASH160
0x14, // OP_DATA_20
0x39, 0xaa, 0x3d, 0x56, 0x9e, 0x06, 0xa1, 0xd7,
0x92, 0x6d, 0xc4, 0xbe, 0x11, 0x93, 0xc9, 0x9b,
0xf2, 0xeb, 0x9e, 0xe0,
0x88, // OP_EQUALVERIFY
0xac, // OP_CHECKSIG
},
},
},
LockTime: 0,
},
},
func GetBlock100000() *btcutil.Block {
var block100000Bytes, _ = hex.DecodeString(block100000Hex)
var results, _ = btcutil.NewBlockFromBytes(block100000Bytes)
return results
}

View file

@ -7,7 +7,7 @@ package blockchain
import (
"math"
"github.com/btcsuite/btcd/chaincfg"
"github.com/lbryio/lbcd/chaincfg"
)
const (
@ -195,6 +195,12 @@ func (b *BlockChain) calcNextBlockVersion(prevNode *blockNode) (int32, error) {
expectedVersion := uint32(vbTopBits)
for id := 0; id < len(b.chainParams.Deployments); id++ {
deployment := &b.chainParams.Deployments[id]
// added to mimic LBRYcrd:
if deployment.ForceActiveAt > 0 && prevNode != nil && prevNode.height+1 >= deployment.ForceActiveAt {
continue
}
cache := &b.deploymentCaches[id]
checker := deploymentChecker{deployment: deployment, chain: b}
state, err := b.thresholdState(prevNode, checker, cache)

View file

@ -7,9 +7,9 @@ package blockchain
import (
"fmt"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/txscript"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
const (
@ -20,11 +20,11 @@ const (
// weight of a "base" byte is 4, while the weight of a witness byte is
// 1. As a result, for a block to be valid, the BlockWeight MUST be
// less than, or equal to MaxBlockWeight.
MaxBlockWeight = 4000000
MaxBlockWeight = 8000000
// MaxBlockBaseSize is the maximum number of bytes within a block
// which can be allocated to non-witness data.
MaxBlockBaseSize = 1000000
MaxBlockBaseSize = 8000000
// MaxBlockSigOpsCost is the maximum number of signature operations
// allowed for a block. It is calculated via a weighted algorithm which

View file

@ -1,68 +1,11 @@
btcec
=====
[![Build Status](https://github.com/btcsuite/btcd/workflows/Build%20and%20Test/badge.svg)](https://github.com/btcsuite/btcd/actions)
[![ISC License](http://img.shields.io/badge/license-ISC-blue.svg)](http://copyfree.org)
[![GoDoc](https://pkg.go.dev/github.com/btcsuite/btcd/btcec?status.png)](https://pkg.go.dev/github.com/btcsuite/btcd/btcec)
Package btcec implements elliptic curve cryptography needed for working with
btcec implements elliptic curve cryptography needed for working with
Bitcoin (secp256k1 only for now). It is designed so that it may be used with the
standard crypto/ecdsa packages provided with go. A comprehensive suite of test
is provided to ensure proper functionality. Package btcec was originally based
on work from ThePiachu which is licensed under the same terms as Go, but it has
signficantly diverged since then. The btcsuite developers original is licensed
under the liberal ISC license.
Although this package was primarily written for btcd, it has intentionally been
designed so it can be used as a standalone package for any projects needing to
use secp256k1 elliptic curve cryptography.
## Installation and Updating
```bash
$ go get -u github.com/btcsuite/btcd/btcec
```
## Examples
* [Sign Message](https://pkg.go.dev/github.com/btcsuite/btcd/btcec#example-package--SignMessage)
Demonstrates signing a message with a secp256k1 private key that is first
parsed form raw bytes and serializing the generated signature.
* [Verify Signature](https://pkg.go.dev/github.com/btcsuite/btcd/btcec#example-package--VerifySignature)
Demonstrates verifying a secp256k1 signature against a public key that is
first parsed from raw bytes. The signature is also parsed from raw bytes.
* [Encryption](https://pkg.go.dev/github.com/btcsuite/btcd/btcec#example-package--EncryptMessage)
Demonstrates encrypting a message for a public key that is first parsed from
raw bytes, then decrypting it using the corresponding private key.
* [Decryption](https://pkg.go.dev/github.com/btcsuite/btcd/btcec#example-package--DecryptMessage)
Demonstrates decrypting a message using a private key that is first parsed
from raw bytes.
## GPG Verification Key
All official release tags are signed by Conformal so users can ensure the code
has not been tampered with and is coming from the btcsuite developers. To
verify the signature perform the following:
- Download the public key from the Conformal website at
https://opensource.conformal.com/GIT-GPG-KEY-conformal.txt
- Import the public key into your GPG keyring:
```bash
gpg --import GIT-GPG-KEY-conformal.txt
```
- Verify the release tag with the following command where `TAG_NAME` is a
placeholder for the specific tag:
```bash
git tag -v TAG_NAME
```
## License
Package btcec is licensed under the [copyfree](http://copyfree.org) ISC License
except for btcec.go and btcec_test.go which is under the same license as Go.
signficantly diverged since then.

View file

@ -8,8 +8,8 @@ import (
"encoding/hex"
"fmt"
"github.com/btcsuite/btcd/btcec"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/btcec"
"github.com/lbryio/lbcd/chaincfg/chainhash"
)
// This example demonstrates signing a message with a secp256k1 private key that

View file

@ -5,6 +5,7 @@
// This file is ignored during the regular build due to the following build tag.
// It is called by go generate and used to automatically generate pre-computed
// tables used to accelerate operations.
//go:build ignore
// +build ignore
package main
@ -17,7 +18,7 @@ import (
"log"
"os"
"github.com/btcsuite/btcd/btcec"
"github.com/lbryio/lbcd/btcec"
)
func main() {

View file

@ -4,6 +4,7 @@
// This file is ignored during the regular build due to the following build tag.
// This build tag is set during go generate.
//go:build gensecp256k1
// +build gensecp256k1
package btcec

View file

@ -12,7 +12,7 @@ import (
"strings"
)
//go:generate go run -tags gensecp256k1 genprecomps.go
//go:rm -f gensecp256k1.go; generate go run -tags gensecp256k1 genprecomps.go
// loadS256BytePoints decompresses and deserializes the pre-computed byte points
// used to accelerate scalar base multiplication for the secp256k1 curve. This

View file

@ -1,70 +1,8 @@
btcjson
=======
[![Build Status](https://github.com/btcsuite/btcd/workflows/Build%20and%20Test/badge.svg)](https://github.com/btcsuite/btcd/actions)
[![ISC License](http://img.shields.io/badge/license-ISC-blue.svg)](http://copyfree.org)
[![GoDoc](https://img.shields.io/badge/godoc-reference-blue.svg)](https://pkg.go.dev/github.com/btcsuite/btcd/btcjson)
Package btcjson implements concrete types for marshalling to and from the
bitcoin JSON-RPC API. A comprehensive suite of tests is provided to ensure
proper functionality.
Although this package was primarily written for the btcsuite, it has
intentionally been designed so it can be used as a standalone package for any
projects needing to marshal to and from bitcoin JSON-RPC requests and responses.
Note that although it's possible to use this package directly to implement an
RPC client, it is not recommended since it is only intended as an infrastructure
package. Instead, RPC clients should use the
[btcrpcclient](https://github.com/btcsuite/btcrpcclient) package which provides
a full blown RPC client with many features such as automatic connection
management, websocket support, automatic notification re-registration on
reconnect, and conversion from the raw underlying RPC types (strings, floats,
ints, etc) to higher-level types with many nice and useful properties.
## Installation and Updating
```bash
$ go get -u github.com/btcsuite/btcd/btcjson
```
## Examples
* [Marshal Command](https://pkg.go.dev/github.com/btcsuite/btcd/btcjson#example-MarshalCmd)
Demonstrates how to create and marshal a command into a JSON-RPC request.
* [Unmarshal Command](https://pkg.go.dev/github.com/btcsuite/btcd/btcjson#example-UnmarshalCmd)
Demonstrates how to unmarshal a JSON-RPC request and then unmarshal the
concrete request into a concrete command.
* [Marshal Response](https://pkg.go.dev/github.com/btcsuite/btcd/btcjson#example-MarshalResponse)
Demonstrates how to marshal a JSON-RPC response.
* [Unmarshal Response](https://pkg.go.dev/github.com/btcsuite/btcd/btcjson#example-package--UnmarshalResponse)
Demonstrates how to unmarshal a JSON-RPC response and then unmarshal the
result field in the response to a concrete type.
## GPG Verification Key
All official release tags are signed by Conformal so users can ensure the code
has not been tampered with and is coming from the btcsuite developers. To
verify the signature perform the following:
- Download the public key from the Conformal website at
https://opensource.conformal.com/GIT-GPG-KEY-conformal.txt
- Import the public key into your GPG keyring:
```bash
gpg --import GIT-GPG-KEY-conformal.txt
```
- Verify the release tag with the following command where `TAG_NAME` is a
placeholder for the specific tag:
```bash
git tag -v TAG_NAME
```
## License
Package btcjson is licensed under the [copyfree](http://copyfree.org) ISC
License.
proper functionality.

View file

@ -12,7 +12,7 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// TestBtcdExtCmds tests all of the btcd extended commands marshal and unmarshal

View file

@ -9,7 +9,7 @@ import (
"encoding/json"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// TestBtcdExtCustomResults ensures any results that have custom marshalling

View file

@ -11,7 +11,7 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// TestBtcWalletExtCmds tests all of the btcwallet extended commands marshal and

View file

@ -13,7 +13,7 @@ import (
"fmt"
"reflect"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/wire"
)
// AddNodeSubCmd defines the type used in the addnode JSON-RPC command for the

View file

@ -12,8 +12,8 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/btcsuite/btcd/wire"
"github.com/lbryio/lbcd/btcjson"
"github.com/lbryio/lbcd/wire"
)
// TestChainSvrCmds tests all of the chain server commands marshal and unmarshal
@ -388,7 +388,7 @@ func TestChainSvrCmds(t *testing.T) {
return btcjson.NewGetBlockFilterCmd("0000afaf", nil)
},
marshalled: `{"jsonrpc":"1.0","method":"getblockfilter","params":["0000afaf"],"id":1}`,
unmarshalled: &btcjson.GetBlockFilterCmd{"0000afaf", nil},
unmarshalled: &btcjson.GetBlockFilterCmd{BlockHash: "0000afaf", FilterType: nil},
},
{
name: "getblockfilter optional filtertype",
@ -399,7 +399,7 @@ func TestChainSvrCmds(t *testing.T) {
return btcjson.NewGetBlockFilterCmd("0000afaf", btcjson.NewFilterTypeName(btcjson.FilterTypeBasic))
},
marshalled: `{"jsonrpc":"1.0","method":"getblockfilter","params":["0000afaf","basic"],"id":1}`,
unmarshalled: &btcjson.GetBlockFilterCmd{"0000afaf", btcjson.NewFilterTypeName(btcjson.FilterTypeBasic)},
unmarshalled: &btcjson.GetBlockFilterCmd{BlockHash: "0000afaf", FilterType: btcjson.NewFilterTypeName(btcjson.FilterTypeBasic)},
},
{
name: "getblockhash",

View file

@ -9,10 +9,10 @@ import (
"encoding/hex"
"encoding/json"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/lbryio/lbcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/wire"
btcutil "github.com/lbryio/lbcutil"
)
// GetBlockHeaderVerboseResult models the data from the getblockheader command when
@ -25,6 +25,7 @@ type GetBlockHeaderVerboseResult struct {
Version int32 `json:"version"`
VersionHex string `json:"versionHex"`
MerkleRoot string `json:"merkleroot"`
ClaimTrie string `json:"nameclaimroot,omitempty"`
Time int64 `json:"time"`
Nonce uint64 `json:"nonce"`
Bits string `json:"bits"`
@ -65,6 +66,27 @@ type GetBlockStatsResult struct {
UTXOSizeIncrease int64 `json:"utxo_size_inc"`
}
type GetBlockVerboseResultBase struct {
Hash string `json:"hash"`
Confirmations int64 `json:"confirmations"`
StrippedSize int32 `json:"strippedsize"`
Size int32 `json:"size"`
Weight int32 `json:"weight"`
Height int64 `json:"height"`
Version int32 `json:"version"`
VersionHex string `json:"versionHex"`
MerkleRoot string `json:"merkleroot"`
Time int64 `json:"time"`
Nonce uint32 `json:"nonce"`
Bits string `json:"bits"`
Difficulty float64 `json:"difficulty"`
PreviousHash string `json:"previousblockhash,omitempty"`
NextHash string `json:"nextblockhash,omitempty"`
ClaimTrie string `json:"nameclaimroot,omitempty"`
TxCount int `json:"nTx"` // For backwards compatibility only
}
// GetBlockVerboseResult models the data from the getblock command when the
// verbose flag is set to 1. When the verbose flag is set to 0, getblock returns a
// hex-encoded string. When the verbose flag is set to 1, getblock returns an object
@ -72,23 +94,8 @@ type GetBlockStatsResult struct {
// getblock returns an object whose tx field is an array of raw transactions.
// Use GetBlockVerboseTxResult to unmarshal data received from passing verbose=2 to getblock.
type GetBlockVerboseResult struct {
Hash string `json:"hash"`
Confirmations int64 `json:"confirmations"`
StrippedSize int32 `json:"strippedsize"`
Size int32 `json:"size"`
Weight int32 `json:"weight"`
Height int64 `json:"height"`
Version int32 `json:"version"`
VersionHex string `json:"versionHex"`
MerkleRoot string `json:"merkleroot"`
Tx []string `json:"tx,omitempty"`
RawTx []TxRawResult `json:"rawtx,omitempty"` // Note: this field is always empty when verbose != 2.
Time int64 `json:"time"`
Nonce uint32 `json:"nonce"`
Bits string `json:"bits"`
Difficulty float64 `json:"difficulty"`
PreviousHash string `json:"previousblockhash"`
NextHash string `json:"nextblockhash,omitempty"`
GetBlockVerboseResultBase
Tx []string `json:"tx"`
}
// GetBlockVerboseTxResult models the data from the getblock command when the
@ -98,23 +105,8 @@ type GetBlockVerboseResult struct {
// getblock returns an object whose tx field is an array of raw transactions.
// Use GetBlockVerboseResult to unmarshal data received from passing verbose=1 to getblock.
type GetBlockVerboseTxResult struct {
Hash string `json:"hash"`
Confirmations int64 `json:"confirmations"`
StrippedSize int32 `json:"strippedsize"`
Size int32 `json:"size"`
Weight int32 `json:"weight"`
Height int64 `json:"height"`
Version int32 `json:"version"`
VersionHex string `json:"versionHex"`
MerkleRoot string `json:"merkleroot"`
Tx []TxRawResult `json:"tx,omitempty"`
RawTx []TxRawResult `json:"rawtx,omitempty"` // Deprecated: removed in Bitcoin Core
Time int64 `json:"time"`
Nonce uint32 `json:"nonce"`
Bits string `json:"bits"`
Difficulty float64 `json:"difficulty"`
PreviousHash string `json:"previousblockhash"`
NextHash string `json:"nextblockhash,omitempty"`
GetBlockVerboseResultBase
Tx []TxRawResult `json:"tx"`
}
// GetChainTxStatsResult models the data from the getchaintxstats command.
@ -296,6 +288,10 @@ type GetBlockTemplateResult struct {
// Block proposal from BIP 0023.
Capabilities []string `json:"capabilities,omitempty"`
RejectReasion string `json:"reject-reason,omitempty"`
ClaimTrieHash string `json:"claimtrie"`
Rules []string `json:"rules,omitempty"`
}
// GetMempoolEntryResult models the data returned from the getmempoolentry's
@ -329,6 +325,14 @@ type GetMempoolEntryResult struct {
Depends []string `json:"depends"`
}
// GetChainTipsResult models the data returns from the getchaintips command.
type GetChainTipsResult struct {
Height int64 `json:"height"`
Hash string `json:"hash"`
BranchLen int64 `json:"branchlen"`
Status string `json:"status"`
}
// GetMempoolInfoResult models the data returned from the getmempoolinfo
// command.
type GetMempoolInfoResult struct {
@ -428,6 +432,9 @@ type ScriptPubKeyResult struct {
Hex string `json:"hex,omitempty"`
ReqSigs int32 `json:"reqSigs,omitempty"`
Type string `json:"type"`
SubType string `json:"subtype"`
IsClaim bool `json:"isclaim"`
IsSupport bool `json:"issupport"`
Addresses []string `json:"addresses,omitempty"`
}
@ -586,6 +593,8 @@ func (v *Vin) MarshalJSON() ([]byte, error) {
type PrevOut struct {
Addresses []string `json:"addresses,omitempty"`
Value float64 `json:"value"`
IsClaim bool `json:"isclaim"`
IsSupport bool `json:"issupport"`
}
// VinPrevOut is like Vin except it includes PrevOut. It is used by searchrawtransaction

View file

@ -9,10 +9,10 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcutil"
"github.com/davecgh/go-spew/spew"
"github.com/lbryio/lbcd/btcjson"
"github.com/lbryio/lbcd/chaincfg/chainhash"
btcutil "github.com/lbryio/lbcutil"
)
// TestChainSvrCustomResults ensures any results that have custom marshalling
@ -70,7 +70,7 @@ func TestChainSvrCustomResults(t *testing.T) {
},
Sequence: 4294967295,
},
expected: `{"txid":"123","vout":1,"scriptSig":{"asm":"0","hex":"00"},"prevOut":{"addresses":["addr1"],"value":0},"sequence":4294967295}`,
expected: `{"txid":"123","vout":1,"scriptSig":{"asm":"0","hex":"00"},"prevOut":{"addresses":["addr1"],"value":0,"isclaim":false,"issupport":false},"sequence":4294967295}`,
},
}

View file

@ -12,7 +12,7 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// TestChainSvrWsCmds tests all of the chain server websocket-specific commands

View file

@ -12,7 +12,7 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// TestChainSvrWsNtfns tests all of the chain server websocket-specific

View file

@ -9,7 +9,7 @@ import (
"encoding/json"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// TestChainSvrWsResults ensures any results that have custom marshalling

134
btcjson/claimcmds.go Normal file
View file

@ -0,0 +1,134 @@
package btcjson
func init() {
// No special flags for commands in this file.
flags := UsageFlag(0)
MustRegisterCmd("getchangesinblock", (*GetChangesInBlockCmd)(nil), flags)
MustRegisterCmd("getclaimsforname", (*GetClaimsForNameCmd)(nil), flags)
MustRegisterCmd("getclaimsfornamebyid", (*GetClaimsForNameByIDCmd)(nil), flags)
MustRegisterCmd("getclaimsfornamebybid", (*GetClaimsForNameByBidCmd)(nil), flags)
MustRegisterCmd("getclaimsfornamebyseq", (*GetClaimsForNameBySeqCmd)(nil), flags)
MustRegisterCmd("normalize", (*GetNormalizedCmd)(nil), flags)
MustRegisterCmd("getprooffornamebyid", (*GetProofForNameByIDCmd)(nil), flags)
MustRegisterCmd("getprooffornamebybid", (*GetProofForNameByBidCmd)(nil), flags)
MustRegisterCmd("getprooffornamebyseq", (*GetProofForNameBySeqCmd)(nil), flags)
}
// optional inputs are required to be pointers, but they support things like `jsonrpcdefault:"false"`
// optional inputs have to be at the bottom of the struct
// optional outputs require ",omitempty"
// traditional bitcoin fields are all lowercase
type GetChangesInBlockCmd struct {
HashOrHeight *string `json:"hashorheight" jsonrpcdefault:""`
}
type GetChangesInBlockResult struct {
Hash string `json:"hash"`
Height int32 `json:"height"`
Names []string `json:"names"`
}
type GetClaimsForNameCmd struct {
Name string `json:"name"`
HashOrHeight *string `json:"hashorheight" jsonrpcdefault:""`
IncludeValues *bool `json:"includevalues" jsonrpcdefault:"false"`
}
type GetClaimsForNameByIDCmd struct {
Name string `json:"name"`
PartialClaimIDs []string `json:"partialclaimids"`
HashOrHeight *string `json:"hashorheight" jsonrpcdefault:""`
IncludeValues *bool `json:"includevalues" jsonrpcdefault:"false"`
}
type GetClaimsForNameByBidCmd struct {
Name string `json:"name"`
Bids []int32 `json:"bids"`
HashOrHeight *string `json:"hashorheight" jsonrpcdefault:""`
IncludeValues *bool `json:"includevalues" jsonrpcdefault:"false"`
}
type GetClaimsForNameBySeqCmd struct {
Name string `json:"name"`
Sequences []int32 `json:"sequences" jsonrpcusage:"[sequence,...]"`
HashOrHeight *string `json:"hashorheight" jsonrpcdefault:""`
IncludeValues *bool `json:"includevalues" jsonrpcdefault:"false"`
}
type GetClaimsForNameResult struct {
Hash string `json:"hash"`
Height int32 `json:"height"`
LastTakeoverHeight int32 `json:"lasttakeoverheight"`
NormalizedName string `json:"normalizedname"`
Claims []ClaimResult `json:"claims"`
// UnclaimedSupports []SupportResult `json:"supportswithoutclaim"` how would this work with other constraints?
}
type SupportResult struct {
TXID string `json:"txid"`
N uint32 `json:"n"`
Height int32 `json:"height"`
ValidAtHeight int32 `json:"validatheight"`
Amount int64 `json:"amount"`
Address string `json:"address,omitempty"`
Value string `json:"value,omitempty"`
}
type ClaimResult struct {
ClaimID string `json:"claimid"`
TXID string `json:"txid"`
N uint32 `json:"n"`
Bid int32 `json:"bid"`
Sequence int32 `json:"sequence"`
Height int32 `json:"height"`
ValidAtHeight int32 `json:"validatheight"`
Amount int64 `json:"amount"`
EffectiveAmount int64 `json:"effectiveamount"`
Supports []SupportResult `json:"supports,omitempty"`
Address string `json:"address,omitempty"`
Value string `json:"value,omitempty"`
}
type GetNormalizedCmd struct {
Name string `json:"name"`
}
type GetNormalizedResult struct {
NormalizedName string `json:"normalizedname"`
}
type GetProofForNameByIDCmd struct {
Name string `json:"name"`
PartialClaimID string `json:"partialclaimid"`
}
type GetProofForNameByBidCmd struct {
Name string `json:"name"`
Bid int `json:"bid"`
}
type GetProofForNameBySeqCmd struct {
Name string `json:"name"`
Sequence int `json:"sequence"`
}
type ProofPairResult struct {
Right bool `json:"right"`
Hash string `json:"hash"`
}
type ProofResult struct { // should we include the claim trie hash?
BlockHash string `json:"blockhash"`
BlockHeight int32 `json:"blockheight"`
NormalizedName string `json:"normalizedname"`
ClaimID string `json:"claimid"`
TXID string `json:"txid"`
N uint32 `json:"n"`
Bid int32 `json:"bid"`
Sequence int32 `json:"sequence"`
Takeover int32 `json:"takeover"`
Pairs []ProofPairResult `json:"pairs"`
}

View file

@ -8,7 +8,7 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// TestCmdMethod tests the CmdMethod function to ensure it retunrs the expected

View file

@ -10,7 +10,7 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// TestAssignField tests the assignField function handles supported combinations

View file

@ -7,7 +7,7 @@ package btcjson_test
import (
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// TestErrorCodeStringer tests the stringized output for the ErrorCode type.

View file

@ -8,7 +8,7 @@ import (
"encoding/json"
"fmt"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// This example demonstrates how to create and marshal a command into a JSON-RPC

View file

@ -547,6 +547,12 @@ func GenerateHelp(method string, descs map[string]string, resultTypes ...interfa
return desc
}
if strings.Contains(key, "base-") {
if desc, ok := descs[strings.ReplaceAll(key, "base-", "-")]; ok {
return desc
}
}
missingKey = key
return key
}

View file

@ -8,7 +8,7 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// TestHelpReflectInternals ensures the various help functions which deal with

View file

@ -8,7 +8,7 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// TestHelpers tests the various helper functions which create pointers to

View file

@ -226,8 +226,12 @@ func NewResponse(rpcVersion RPCVersion, id interface{}, marshalledResult []byte,
// JSON-RPC client.
func MarshalResponse(rpcVersion RPCVersion, id interface{}, result interface{}, rpcErr *RPCError) ([]byte, error) {
if !rpcVersion.IsValid() {
str := fmt.Sprintf("rpcversion '%s' is invalid", rpcVersion)
return nil, makeError(ErrInvalidType, str)
if rpcVersion == "" {
rpcVersion = RpcVersion1
} else {
str := fmt.Sprintf("rpcversion '%s' is unsupported", rpcVersion)
return nil, makeError(ErrInvalidType, str)
}
}
marshalledResult, err := json.Marshal(result)

View file

@ -9,7 +9,7 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// TestIsValidIDType ensures the IsValidIDType function behaves as expected.

View file

@ -9,7 +9,7 @@ import (
"sort"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/lbryio/lbcd/btcjson"
)
// TestUsageFlagStringer tests the stringized output for the UsageFlag type.

View file

@ -12,7 +12,7 @@ import (
"encoding/json"
"fmt"
"github.com/btcsuite/btcutil"
btcutil "github.com/lbryio/lbcutil"
)
// AddMultisigAddressCmd defines the addmutisigaddress JSON-RPC command.
@ -242,7 +242,8 @@ func NewGetBalancesCmd() *GetBalancesCmd {
// GetNewAddressCmd defines the getnewaddress JSON-RPC command.
type GetNewAddressCmd struct {
Account *string
Account *string
AddressType *string // must be one of legacy / p2pkh or p2sh-p2wkh / p2sh-segwit, or p2wkh / bech32
}
// NewGetNewAddressCmd returns a new instance which can be used to issue a

View file

@ -11,8 +11,8 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/btcjson"
"github.com/btcsuite/btcutil"
"github.com/lbryio/lbcd/btcjson"
btcutil "github.com/lbryio/lbcutil"
)
// TestWalletSvrCmds tests all of the wallet server commands marshal and

View file

@ -8,7 +8,7 @@ import (
"encoding/json"
"fmt"
"github.com/btcsuite/btcd/txscript"
"github.com/lbryio/lbcd/txscript"
)
// CreateWalletResult models the result of the createwallet command.
@ -25,7 +25,6 @@ type CreateWalletResult struct {
type embeddedAddressInfo struct {
Address string `json:"address"`
ScriptPubKey string `json:"scriptPubKey"`
Solvable bool `json:"solvable"`
Descriptor *string `json:"desc,omitempty"`
IsScript bool `json:"isscript"`
IsChange bool `json:"ischange"`
@ -229,6 +228,7 @@ type InfoWalletResult struct {
PaytxFee float64 `json:"paytxfee"`
RelayFee float64 `json:"relayfee"`
Errors string `json:"errors"`
Staked float64 `json:"staked"`
}
// ListTransactionsResult models the data from the listtransactions command.
@ -293,7 +293,9 @@ type ListUnspentResult struct {
RedeemScript string `json:"redeemScript,omitempty"`
Amount float64 `json:"amount"`
Confirmations int64 `json:"confirmations"`
Solvable bool `json:"solvable"`
Spendable bool `json:"spendable"`
IsStake bool `json:"isstake"`
}
// SignRawTransactionError models the data that contains script verification

View file

@ -10,8 +10,8 @@ import (
"reflect"
"testing"
"github.com/btcsuite/btcd/txscript"
"github.com/davecgh/go-spew/spew"
"github.com/lbryio/lbcd/txscript"
)
// TestGetAddressInfoResult ensures that custom unmarshalling of

Some files were not shown because too many files have changed in this diff Show more