Compare commits
2 commits
master
...
hk-custom-
Author | SHA1 | Date | |
---|---|---|---|
|
84e330df3d | ||
|
f44e662dbe |
187 changed files with 11635 additions and 22179 deletions
94
.github/workflows/main_ci.yml
vendored
94
.github/workflows/main_ci.yml
vendored
|
@ -1,94 +0,0 @@
|
|||
name: Run Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
audit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12
|
||||
registry-url: https://registry.npmjs.org/
|
||||
- run: npm ci
|
||||
- run: npm run audit
|
||||
unit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12
|
||||
registry-url: https://registry.npmjs.org/
|
||||
- run: npm ci
|
||||
- run: npm run unit
|
||||
coverage:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12
|
||||
registry-url: https://registry.npmjs.org/
|
||||
- run: npm ci
|
||||
- run: npm run coverage
|
||||
integration:
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
regtest:
|
||||
image: junderw/bitcoinjs-regtest-server@sha256:5b69cf95d9edf6d5b3a00504665d6b3c382a6aa3728fe8ce897974c519061463
|
||||
ports:
|
||||
- 8080:8080
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12
|
||||
registry-url: https://registry.npmjs.org/
|
||||
- run: npm ci
|
||||
- run: APIURL=http://127.0.0.1:8080/1 npm run integration
|
||||
format:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12
|
||||
registry-url: https://registry.npmjs.org/
|
||||
- run: npm ci
|
||||
- run: npm run format:ci
|
||||
gitdiff:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12
|
||||
registry-url: https://registry.npmjs.org/
|
||||
- run: npm ci
|
||||
- run: npm run gitdiff:ci
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12
|
||||
registry-url: https://registry.npmjs.org/
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
lint-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12
|
||||
registry-url: https://registry.npmjs.org/
|
||||
- run: npm ci
|
||||
- run: npm run lint:tests
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -2,6 +2,3 @@ coverage
|
|||
node_modules
|
||||
.nyc_output
|
||||
npm-debug.log
|
||||
test/*.js
|
||||
test/integration/*.js
|
||||
!test/ts-node-register.js
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
[]
|
|
@ -1,4 +0,0 @@
|
|||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all"
|
||||
}
|
18
.travis.yml
Normal file
18
.travis.yml
Normal file
|
@ -0,0 +1,18 @@
|
|||
sudo: false
|
||||
language: node_js
|
||||
node_js:
|
||||
- "4"
|
||||
- "5"
|
||||
- "6"
|
||||
- "7"
|
||||
- "8"
|
||||
matrix:
|
||||
include:
|
||||
- node_js: "7"
|
||||
env: TEST_SUITE=standard
|
||||
- node_js: "7"
|
||||
env: TEST_SUITE=coverage
|
||||
env:
|
||||
- TEST_SUITE=integration
|
||||
- TEST_SUITE=unit
|
||||
script: npm run-script $TEST_SUITE
|
187
CHANGELOG.md
187
CHANGELOG.md
|
@ -1,190 +1,3 @@
|
|||
# 6.0.0
|
||||
__removed__
|
||||
- bip32: Removed the re-export. Please add as dependency to your app instead.
|
||||
- ECPair: Please use bip32 moving forward. ecpair package was created for those who need it.
|
||||
- TransactionBuilder: Any internal files used only in TB (classify, templates, etc.) were also removed.
|
||||
|
||||
__added__
|
||||
- taproot segwit v1 address support (bech32m) via address module (#1676)
|
||||
- hashForWitnessV1 method on Transaction class (#1745)
|
||||
|
||||
__fixed__
|
||||
- Transaction version read/write differed. (#1717)
|
||||
|
||||
# 5.2.0
|
||||
__changed__
|
||||
- Updated PSBT to allow for witnessUtxo and nonWitnessUtxo simultaneously (Re: segwit psbt bug) (#1563)
|
||||
|
||||
__added__
|
||||
- PSBT methods `getInputType`, `inputHasPubkey`, `inputHasHDKey`, `outputHasPubkey`, `outputHasHDKey` (#1563)
|
||||
|
||||
# 5.1.10
|
||||
__fixed__
|
||||
- Fixed psbt.signInputAsync (and consequentially all Async signing methods) not handling rejection of keypair.sign properly (#1582)
|
||||
|
||||
# 5.1.9
|
||||
__fixed__
|
||||
- Fixed errors for psbt.txOutputs getter (#1578)
|
||||
|
||||
# 5.1.8
|
||||
__fixed__
|
||||
- Throw errors when p2wsh or p2wpkh contain uncompressed pubkeys (#1573)
|
||||
|
||||
__added__
|
||||
- Add txInputs and txOutputs for Psbt (#1561)
|
||||
|
||||
__changed__
|
||||
- (Not exposed) Added BufferWriter to help ease maintenance of certain forks of this library (#1533)
|
||||
|
||||
# 5.1.7
|
||||
__fixed__
|
||||
- Fixed Transaction class Output interface typing for TypeScript (#1506)
|
||||
- Add `weight()` to Block class, add optional includeWitness arg to Transaction byteLength method (#1515)
|
||||
- Match the old TransactionBuilder behavior of allowing for multiple instances of the same pubkey to be in a p2ms script for PSBT (#1519)
|
||||
|
||||
__added__
|
||||
- Allow the API consumer to pass in the finalizer functions to allow for any type of transaction to be finalized. It places the most crucial part of transaction construction on the consumer, and should be used with caution. (#1491)
|
||||
|
||||
# 5.1.6
|
||||
__fixed__
|
||||
- `PsbtOutputExtended` did not support using the address attribute properly. It is now fixed.
|
||||
|
||||
# 5.1.5
|
||||
__added__
|
||||
- `Psbt` now has `getFee(): number` for use when all inputs are finalized. It returns the satoshi fee of the transaction. Calling getFee, getFeeRate, or extractTransaction will cache these values so if you call one after the other, the second call will return immediately.
|
||||
|
||||
# 5.1.4
|
||||
__changed__
|
||||
- `Psbt` inputs using segwit scripts can now work with nonWitnessUtxo as well as the original witnessUtxo. The reasoning for this is that nonWitnessUtxo has all the information contained in the witnessUtxo, so rejecting signing even though we have all the info we need is unnecessary. Trying to sign a non-segwit script with a witnessUtxo will still throw an Error as it should.
|
||||
|
||||
# 5.1.3
|
||||
__changed__
|
||||
- TypeScript types: Made Signer and SignerAsync use any for network since we only check for equivalence. (#1448)
|
||||
- TypeScript types: Made the args for addInput and addOutput for Psbt actually accept updateInput and updateOutput parameters. (#1449)
|
||||
|
||||
# 5.1.2
|
||||
__added__
|
||||
- `ECPair` and `bip32` objects now have a lowR boolean attribute defaulted to false. You may set it to true to ensure that the sign method uses low R values (#1442) (This is to enable low R usage in Psbt, since we decided not to give the low R flag to the Psbt class, since it makes more sense to be an attribute of the Signer interface)
|
||||
|
||||
# 5.1.1
|
||||
__changed__
|
||||
- Name inconsistencies for Psbt class. (Quick fix)
|
||||
|
||||
# 5.1.0
|
||||
__added__
|
||||
- A new `Psbt` class for creating, distributing, combining, signing, and compiling Transactions (#1425)
|
||||
- A `name` attribute to the Payment interface. P2SH and P2WSH are nested with `'-'` as separator, and p2ms is in the format of `'p2ms(m of n)''` all others are just hard coded. (#1433)
|
||||
|
||||
__changed__
|
||||
- `TransactionBuilder`: Migrate to stricter type checks during sign by switching to a single object parameter (#1416)
|
||||
- `tests`: Use regtest-client as separate library (#1421)
|
||||
|
||||
# 5.0.5
|
||||
__added__
|
||||
- Added `ECPairInterface` `Stack` and `StackElement` interfaces to the main index.ts export (TypeScript only affected)
|
||||
|
||||
# 5.0.4
|
||||
__added__
|
||||
- low R value support for ECPair, bip32, and TransactionBuilder (default off) via `txb.setLowR()` (#1385)
|
||||
|
||||
__fixed__
|
||||
- Fixed Various TypeScript types that have been pushed out since v5.0.0 (#1388)
|
||||
|
||||
# 5.0.0
|
||||
__added__
|
||||
- TypeScript support (#1319)
|
||||
- `Block.prototype.checkTxRoots` will check the merkleRoot and witnessCommit if it exists against the transactions array. (e52abec) (0426c66)
|
||||
|
||||
__changed__
|
||||
- `Transaction.prototype.getHash` now has `forWitness?: boolean` which when true returns the hash for wtxid (a652d04)
|
||||
- `Block.calculateMerkleRoot` now has `forWitness?: boolean` which when true returns the witness commit (a652d04)
|
||||
|
||||
__removed__
|
||||
- `Block.prototype.checkMerkleRoot` was removed, please use `checkTxRoots` (0426c66)
|
||||
|
||||
# 4.0.5
|
||||
__fixed__
|
||||
- Fixed bug where Angular apps break due to lack of crypto at build time. Reverted #1373 and added (6bead5d).
|
||||
|
||||
# 4.0.4
|
||||
__fixed__
|
||||
- Fixed bug where Electron v4 breaks due to lack of `'rmd160'` alias for ripemd160 hash. (#1373)
|
||||
|
||||
# 4.0.3
|
||||
__fixed__
|
||||
- Fixed `TransactionBuilder` to require that the Transaction has outputs before signing (#1151)
|
||||
- Fixed `payments.p2sh`, which now takes the network from the redeem attribute if one is not given in the object argument (#1232)
|
||||
- Fixed `Block.calculateTarget` to allow for exponents up to 29 (#1285)
|
||||
- Fixed some low priority rarely occurring bugs with multisig payments and `TransactionBuilder` multisig processing (#1307)
|
||||
|
||||
__added__
|
||||
- Regtest network object to `networks` (#1261)
|
||||
|
||||
# 4.0.2
|
||||
__fixed__
|
||||
- Fixed `TransactionBuilder` not throwing when payment type validation should fail (#1195)
|
||||
|
||||
__removed__
|
||||
- Removed rogue `package.json` from `src/payments` (#1216)
|
||||
|
||||
# 4.0.1
|
||||
__fixed__
|
||||
- Fixed `tiny-secp256k1` dependency version (used `ecurve`) (#1139)
|
||||
- Fixed `TransactionBuilder` throwing when trying to sign `P2WSH(P2WPKH)` (#1135)
|
||||
|
||||
# 4.0.0
|
||||
__added__
|
||||
- Added [`bip32`](https://github.com/bitcoinjs/bip32) dependency as a primary export (#1073)
|
||||
- Added `ECPair.fromPrivateKey` (#1070)
|
||||
- Added `payments` export, with support for `p2pkh`, `p2pk`, `p2ms`, `p2sh`, `p2wpkh`, `p2wsh` and `embed` payment types (#1096, #1119)
|
||||
- Added `script.signature.encode/decode` for script signatures (#459)
|
||||
|
||||
__changed__
|
||||
- `ECPair.prototype.sign` now returns a 64-byte signature `Buffer`, not an `ECSignature` object (#1084)
|
||||
- `ECPair` (and all ECDSA code) now uses [`tiny-secp256k1`](https://github.com/bitcoinjs/tiny-secp256k1), which uses the [`libsecp256k1` library](https://github.com/bitcoin-core/secp256k1) (#1070)
|
||||
- `TransactionBuilder` internal variables are now `__` prefixed to discourage public usage (#1038)
|
||||
- `TransactionBuilder` now defaults to version 2 transaction versions (#1036)
|
||||
- `script.decompile` now returns `[Buffer]` or `null`, if decompilation failed (#1039)
|
||||
|
||||
__fixed__
|
||||
- Fixed `TransactionBuilder` rejecting uncompressed public keys to comply with BIP143 (#987)
|
||||
|
||||
__removed__
|
||||
- Removed Node 4/5 LTS support (#1080)
|
||||
- Removed `ECPair.fromPublicKeyBuffer`, use `ECPair.fromPublicKey` (#1070)
|
||||
- Removed `ECPair.prototype.getAddress`, use `payments.p2pkh` instead (#1085)
|
||||
- Removed `ECPair.prototype.getPrivateKey`, use `ECPair.prototype.privateKey` property (#1070)
|
||||
- Removed `ECPair.prototype.getPublicKey`, use `ECPair.prototype.publicKey` property (#1070)
|
||||
- Removed `ECPair.prototype.getNetwork`, use `ECPair.prototype.network` property (#1070)
|
||||
- Removed `ECSignature`, use `script.signature.encode/decode` instead (#459)
|
||||
- Removed `HDNode`, use `bip32` export instead (#1073)
|
||||
- Removed `bufferutils` (#1035)
|
||||
- Removed `networks.litecoin`, BYO non-Bitcoin networks instead (#1095)
|
||||
- Removed `script.isCanonicalSignature`, use `script.isCanonicalScriptSignature` instead (#1094)
|
||||
- Removed `script.*.input/output/check` functions (`templates`), use `payments.*` instead (`templates` previously added in #681, #682) (#1119)
|
||||
- Removed dependency `bigi`, uses `bn.js` internally now (via `tiny-secp256k1`) (#1070, #1112)
|
||||
- Removed public access to `ECPair` constructor, use exported functions `ECPair.fromPrivateKey`, `ECPair.fromWIF`, `ECPair.makeRandom`, or `ECPair.fromPublicKey` (#1070)
|
||||
|
||||
# 3.3.2
|
||||
__fixed__
|
||||
- Fixed `decodeStack` arbitrarily supporting non-Array arguments (#942)
|
||||
|
||||
# 3.3.1
|
||||
__changed__
|
||||
- Increased the `TransactionBuilder` `maximumFeeRate` from 1000 to 2500 satoshis/byte. (#931)
|
||||
|
||||
# 3.3.0
|
||||
__added__
|
||||
- Added `ECSignature.prototype.toRSBuffer`/`ECSignature.fromRSBuffer` (#915)
|
||||
- Added support to `TransactionBuilder` for 64-byte signatures via `.sign` (#915)
|
||||
- Added support to `TransactionBuilder` for the `.publicKey` standard as an alternative to `.getPublicKey()` (#915)
|
||||
|
||||
# 3.2.1
|
||||
__fixed__
|
||||
- Fixed `script.scripthash.input.check` recursion (#898)
|
||||
- Fixed `TransactionBuilder` sometimes ignoring witness value (#901)
|
||||
- Fixed `script.witnessScriptHash.input` implementation (previously used the P2SH impl.) (#911)
|
||||
|
||||
# 3.2.0
|
||||
__added__
|
||||
- Added `address.fromBech32/toBech32` (#846)
|
||||
|
|
|
@ -1,83 +0,0 @@
|
|||
|
||||
[//]: # (This is partially derived from https://github.com/bitcoin/bitcoin/blob/6579d80572d2d33aceabbd3db45a6a9f809aa5e3/CONTRIBUTING.md)
|
||||
|
||||
# Contributing to bitcoinjs-lib
|
||||
Firstly in terms of structure, there is no particular concept of "bitcoinjs developers" in a sense of privileged people.
|
||||
Open source revolves around a meritocracy where contributors who help gain trust from the community.
|
||||
|
||||
For practical purpose, there are repository "maintainers" who are responsible for merging pull requests.
|
||||
|
||||
We are always accepting of pull requests, but we do adhere to specific standards in regards to coding style, test driven development and commit messages.
|
||||
|
||||
|
||||
## Communication Channels
|
||||
GitHub is the preferred method of communication between members.
|
||||
|
||||
Otherwise, in order of preference:
|
||||
* bitcoinjs.slack.com
|
||||
* #bitcoinjs-dev on Freenode IRC
|
||||
|
||||
|
||||
## Workflow
|
||||
The codebase is maintained using the "contributor workflow" where everyone without exception contributes patch proposals using "pull requests".
|
||||
This facilitates social contribution, easy testing and peer review.
|
||||
|
||||
To contribute a patch, the workflow is as follows:
|
||||
|
||||
1. Fork repository
|
||||
1. Create topic branch
|
||||
1. Commit patches
|
||||
1. Push changes to your fork
|
||||
1. Submit a pull request to https://github.com/bitcoinjs/bitcoinjs-lib
|
||||
|
||||
[Commits should be atomic](https://en.wikipedia.org/wiki/Atomic_commit#Atomic_commit_convention) and diffs easy to read.
|
||||
|
||||
If your pull request is accepted for merging, you may be asked by a maintainer to squash and or [rebase](https://git-scm.com/docs/git-rebase) your commits before it is merged.
|
||||
|
||||
Please refrain from creating several pull requests for the same change.
|
||||
|
||||
Patchsets should be focused:
|
||||
|
||||
* Adding a feature, or
|
||||
* Fixing a bug, or
|
||||
* Refactoring code.
|
||||
|
||||
If you combine these, the PR may be rejected or asked to be split up.
|
||||
|
||||
The length of time required for peer review is unpredictable and will vary from pull request to pull request.
|
||||
|
||||
Refer to the [Git manual](https://git-scm.com/doc) for any information about `git`.
|
||||
|
||||
|
||||
## Regarding TypeScript
|
||||
This library is written in TypeScript with tslint, prettier, and the tsc transpiler. These tools will help during testing to notice improper logic before committing and sending a pull request.
|
||||
|
||||
Some rules regarding TypeScript:
|
||||
|
||||
* Modify the typescript source code in an IDE that will give you warnings for transpile/lint errors.
|
||||
* Once you are done with the modifications, run `npm run format` then `npm test`
|
||||
* Running the tests will transpile the ts files into js and d.ts files.
|
||||
* Use `git diff` or other tools to verify that the ts and js are changing the same parts.
|
||||
* Commit all changes to ts, js, and d.ts files.
|
||||
* Add tests where necessary.
|
||||
* Submit your pull request.
|
||||
|
||||
Using TypeScript is for preventing bugs while writing code, as well as automatically generating type definitions. However, the JS file diffs must be verified, and any unverified JS will not be published to npm.
|
||||
|
||||
|
||||
## We adhere to Bitcoin-Core policy
|
||||
Bitcoin script payment/script templates are based on community consensus, but typically adhere to bitcoin-core node policy by default.
|
||||
|
||||
- `bitcoinjs.script.decompile` is consensus bound only, it does not reject based on policy.
|
||||
- `bitcoinjs.script.compile` will try to adhere to bitcoin-core `IsStandard` policies rules. (eg. minimalpush in https://github.com/bitcoinjs/bitcoinjs-lib/pull/638)
|
||||
|
||||
Any elliptic curve `sign` operations should adhere to `IsStandard` policies, like `LOW_S`, but `verify` should not reject them [by default].
|
||||
|
||||
If you need non-standard rejecting `decoding`, you should use an external module, not this library.
|
||||
|
||||
#### TLDR
|
||||
Where "standards compliant" refers to the default policies of bitcoin-core, we adhere to the following:
|
||||
- Any "creation" event must create standards-compliant data (standards bound)
|
||||
- Any "validation" event must allow for non-standards compliant data (consensus bound)
|
||||
|
||||
For stricter validation, use an external module which we [may have] provided.
|
2
LICENSE
2
LICENSE
|
@ -1,6 +1,6 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2011-2020 bitcoinjs-lib contributors
|
||||
Copyright (c) 2011-2018 bitcoinjs-lib contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
248
README.md
248
README.md
|
@ -1,139 +1,173 @@
|
|||
# BitcoinJS (bitcoinjs-lib)
|
||||
[![Github CI](https://github.com/bitcoinjs/bitcoinjs-lib/actions/workflows/main_ci.yml/badge.svg)](https://github.com/bitcoinjs/bitcoinjs-lib/actions/workflows/main_ci.yml) [![NPM](https://img.shields.io/npm/v/bitcoinjs-lib.svg)](https://www.npmjs.org/package/bitcoinjs-lib) [![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg?style=flat-square)](https://github.com/prettier/prettier)
|
||||
[![Build Status](https://travis-ci.org/bitcoinjs/bitcoinjs-lib.png?branch=master)](https://travis-ci.org/bitcoinjs/bitcoinjs-lib)
|
||||
[![NPM](https://img.shields.io/npm/v/bitcoinjs-lib.svg)](https://www.npmjs.org/package/bitcoinjs-lib)
|
||||
[![tip for next commit](https://tip4commit.com/projects/735.svg)](http://tip4commit.com/projects/735)
|
||||
|
||||
A javascript Bitcoin library for node.js and browsers. Written in TypeScript, but committing the JS files to verify.
|
||||
[![js-standard-style](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard)
|
||||
|
||||
The pure JavaScript Bitcoin library for node.js and browsers.
|
||||
Estimated to be in use by over 15 million wallet users and is the backbone for almost all Bitcoin web wallets in production today.
|
||||
|
||||
|
||||
## Features
|
||||
- Clean: Pure JavaScript, concise code, easy to read.
|
||||
- Tested: Coverage > 90%, third-party integration tests.
|
||||
- Careful: Two person approval process for small, focused pull requests.
|
||||
- Compatible: Works on Node.js and all modern browsers.
|
||||
- Powerful: Support for advanced features, such as multi-sig, HD Wallets.
|
||||
- Secure: Strong random number generation, PGP signed releases, trusted developers.
|
||||
- Principled: No support for browsers with crap RNG (IE < 11)
|
||||
- Standardized: Node community coding style, Browserify, Node's stdlib and Buffers.
|
||||
- Fast: Optimized code, uses typed arrays instead of byte arrays for performance.
|
||||
- Experiment-friendly: Bitcoin Mainnet and Testnet support.
|
||||
- Altcoin-ready: Capable of working with bitcoin-derived cryptocurrencies (such as Dogecoin).
|
||||
|
||||
Released under the terms of the [MIT LICENSE](LICENSE).
|
||||
|
||||
## Should I use this in production?
|
||||
If you are thinking of using the *master* branch of this library in production, **stop**.
|
||||
If you are thinking of using the master branch of this library in production, **stop**.
|
||||
Master is not stable; it is our development branch, and [only tagged releases may be classified as stable](https://github.com/bitcoinjs/bitcoinjs-lib/tags).
|
||||
|
||||
|
||||
## Can I trust this code?
|
||||
> Don't trust. Verify.
|
||||
|
||||
We recommend every user of this library and the [bitcoinjs](https://github.com/bitcoinjs) ecosystem audit and verify any underlying code for its validity and suitability, including reviewing any and all of your project's dependencies.
|
||||
|
||||
Mistakes and bugs happen, but with your help in resolving and reporting [issues](https://github.com/bitcoinjs/bitcoinjs-lib/issues), together we can produce open source software that is:
|
||||
|
||||
- Easy to audit and verify,
|
||||
- Tested, with test coverage >95%,
|
||||
- Advanced and feature rich,
|
||||
- Standardized, using [prettier](https://github.com/prettier/prettier) and Node `Buffer`'s throughout, and
|
||||
- Friendly, with a strong and helpful community, ready to answer questions.
|
||||
|
||||
|
||||
## Documentation
|
||||
Presently, we do not have any formal documentation other than our [examples](#examples), please [ask for help](https://github.com/bitcoinjs/bitcoinjs-lib/issues/new) if our examples aren't enough to guide you.
|
||||
|
||||
You can find a [Web UI](https://bitcoincore.tech/apps/bitcoinjs-ui/index.html) that covers most of the `psbt.ts`, `transaction.ts` and `p2*.ts` APIs [here](https://bitcoincore.tech/apps/bitcoinjs-ui/index.html).
|
||||
|
||||
## Installation
|
||||
``` bash
|
||||
npm install bitcoinjs-lib
|
||||
# optionally, install a key derivation library as well
|
||||
npm install ecpair bip32
|
||||
# ecpair is the ECPair class for single keys
|
||||
# bip32 is for generating HD keys
|
||||
```
|
||||
|
||||
Previous versions of the library included classes for key management (ECPair, HDNode(->"bip32")) but now these have been separated into different libraries. This lowers the bundle size significantly if you don't need to perform any crypto functions (converting private to public keys and deriving HD keys).
|
||||
|
||||
Typically we support the [Node Maintenance LTS version](https://github.com/nodejs/Release). TypeScript target will be set
|
||||
to the ECMAScript version in which all features are fully supported by current Active Node LTS.
|
||||
However, depending on adoption among other environments (browsers etc.) we may keep the target back a year or two.
|
||||
If in doubt, see the [main_ci.yml](.github/workflows/main_ci.yml) for what versions are used by our continuous integration tests.
|
||||
|
||||
**WARNING**: We presently don't provide any tooling to verify that the release on `npm` matches GitHub. As such, you should verify anything downloaded by `npm` against your own verified copy.
|
||||
|
||||
|
||||
## Usage
|
||||
Crypto is hard.
|
||||
|
||||
When working with private keys, the random number generator is fundamentally one of the most important parts of any software you write.
|
||||
For random number generation, we *default* to the [`randombytes`](https://github.com/crypto-browserify/randombytes) module, which uses [`window.crypto.getRandomValues`](https://developer.mozilla.org/en-US/docs/Web/API/window.crypto.getRandomValues) in the browser, or Node js' [`crypto.randomBytes`](https://nodejs.org/api/crypto.html#crypto_crypto_randombytes_size_callback), depending on your build system.
|
||||
Although this default is ~OK, there is no simple way to detect if the underlying RNG provided is good enough, or if it is **catastrophically bad**.
|
||||
You should always verify this yourself to your own standards.
|
||||
|
||||
This library uses [tiny-secp256k1](https://github.com/bitcoinjs/tiny-secp256k1), which uses [RFC6979](https://tools.ietf.org/html/rfc6979) to help prevent `k` re-use and exploitation.
|
||||
Unfortunately, this isn't a silver bullet.
|
||||
Often, Javascript itself is working against us by bypassing these counter-measures.
|
||||
|
||||
Problems in [`Buffer (UInt8Array)`](https://github.com/feross/buffer), for example, can trivially result in **catastrophic fund loss** without any warning.
|
||||
It can do this through undermining your random number generation, accidentally producing a [duplicate `k` value](https://www.nilsschneider.net/2013/01/28/recovering-bitcoin-private-keys.html), sending Bitcoin to a malformed output script, or any of a million different ways.
|
||||
Running tests in your target environment is important and a recommended step to verify continuously.
|
||||
|
||||
Finally, **adhere to best practice**.
|
||||
We are not an authorative source of best practice, but, at the very least:
|
||||
|
||||
* [Don't re-use addresses](https://en.bitcoin.it/wiki/Address_reuse).
|
||||
* Don't share BIP32 extended public keys ('xpubs'). [They are a liability](https://bitcoin.stackexchange.com/questions/56916/derivation-of-parent-private-key-from-non-hardened-child), and it only takes 1 misplaced private key (or a buggy implementation!) and you are vulnerable to **catastrophic fund loss**.
|
||||
* [Don't use `Math.random`](https://security.stackexchange.com/questions/181580/why-is-math-random-not-designed-to-be-cryptographically-secure) - in any way - don't.
|
||||
* Enforce that users always verify (manually) a freshly-decoded human-readable version of their intended transaction before broadcast.
|
||||
* [Don't *ask* users to generate mnemonics](https://en.bitcoin.it/wiki/Brainwallet#cite_note-1), or 'brain wallets', humans are terrible random number generators.
|
||||
* Lastly, if you can, use [Typescript](https://www.typescriptlang.org/) or similar.
|
||||
|
||||
## Setup
|
||||
### Node.js
|
||||
``` javascript
|
||||
var bitcoin = require('bitcoinjs-lib')
|
||||
```
|
||||
|
||||
### Browser
|
||||
The recommended method of using `bitcoinjs-lib` in your browser is through [Browserify](https://github.com/substack/node-browserify).
|
||||
If you're familiar with how to use browserify, ignore this and carry on, otherwise, it is recommended to read the tutorial at https://browserify.org/.
|
||||
If you're familiar with how to use browserify, ignore this and proceed normally.
|
||||
These steps are advisory only, and may not be suitable for your application.
|
||||
|
||||
**NOTE**: We use Node Maintenance LTS features, if you need strict ES5, use [`--transform babelify`](https://github.com/babel/babelify) in conjunction with your `browserify` step (using an [`es2015`](https://babeljs.io/docs/plugins/preset-es2015/) preset).
|
||||
[Browserify](https://github.com/substack/node-browserify) is assumed to be installed for these steps.
|
||||
|
||||
For your project, create an `index.js` file
|
||||
``` javascript
|
||||
let bitcoin = require('bitcoinjs-lib')
|
||||
|
||||
// your code here
|
||||
function myFunction () {
|
||||
return bitcoin.ECPair.makeRandom().toWIF()
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
myFunction
|
||||
}
|
||||
```
|
||||
|
||||
Now, to compile for the browser:
|
||||
``` bash
|
||||
browserify index.js --standalone foo > app.js
|
||||
```
|
||||
|
||||
You can now put `<script src="app.js" />` in your web page, using `foo.myFunction` to create a new Bitcoin private key.
|
||||
|
||||
**NOTE**: If you uglify the javascript, you must exclude the following variable names from being mangled: `BigInteger`, `ECPair`, `Point`.
|
||||
This is because of the function-name-duck-typing used in [typeforce](https://github.com/dcousens/typeforce).
|
||||
|
||||
Example:
|
||||
``` bash
|
||||
uglifyjs ... --mangle reserved=['BigInteger','ECPair','Point']
|
||||
```
|
||||
|
||||
**NOTE**: This library tracks Node LTS features, if you need strict ES5, use [`--transform babelify`](https://github.com/babel/babelify) in conjunction with your `browserify` step (using an [`es2015`](http://babeljs.io/docs/plugins/preset-es2015/) preset).
|
||||
|
||||
**NOTE**: If you expect this library to run on an iOS 10 device, ensure that you are using [buffer@5.0.5](https://github.com/feross/buffer/pull/155) or greater.
|
||||
|
||||
**WARNING**: iOS devices have [problems](https://github.com/feross/buffer/issues/136), use at least [buffer@5.0.5](https://github.com/feross/buffer/pull/155) or greater, and enforce the test suites (for `Buffer`, and any other dependency) pass before use.
|
||||
|
||||
### Typescript or VSCode users
|
||||
Type declarations for Typescript are included in this library. Normal installation should include all the needed type information.
|
||||
Type declarations for Typescript are available for version `^3.0.0` of the library.
|
||||
``` bash
|
||||
npm install @types/bitcoinjs-lib
|
||||
```
|
||||
|
||||
You can now use `bitcoinjs-lib` as a typescript compliant library.
|
||||
``` javascript
|
||||
import { HDNode, Transaction } from 'bitcoinjs-lib'
|
||||
```
|
||||
|
||||
For VSCode (and other editors), users are advised to install the type declarations, as Intellisense uses that information to help you code (autocompletion, static analysis).
|
||||
|
||||
Report any typescript related bugs at [@dlebrecht DefinitelyTyped fork](https://github.com/dlebrecht/DefinitelyTyped), submit PRs to [DefinitelyTyped](https://github.com/DefinitelyTyped/DefinitelyTyped)
|
||||
|
||||
|
||||
### Flow
|
||||
Definitions for [Flow typechecker](https://flowtype.org/) are available in flow-typed repository.
|
||||
|
||||
[You can either download them directly](https://github.com/flowtype/flow-typed/blob/master/definitions/npm/bitcoinjs-lib_v2.x.x/flow_v0.17.x-/bitcoinjs-lib_v2.x.x.js) from the repo, or with the flow-typed CLI
|
||||
|
||||
# npm install -g flow-typed
|
||||
$ flow-typed install -f 0.27 bitcoinjs-lib@2.2.0 # 0.27 for flow version, 2.2.0 for bitcoinjs-lib version
|
||||
|
||||
The definitions are complete and up to date with version 2.2.0. The definitions are maintained by [@runn1ng](https://github.com/runn1ng).
|
||||
|
||||
## Examples
|
||||
The below examples are implemented as integration tests, they should be very easy to understand.
|
||||
Otherwise, pull requests are appreciated.
|
||||
Some examples interact (via HTTPS) with a 3rd Party Blockchain Provider (3PBP).
|
||||
|
||||
- [Taproot Key Spend](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/taproot.md)
|
||||
|
||||
- [Generate a random address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.spec.ts)
|
||||
- [Import an address via WIF](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.spec.ts)
|
||||
- [Generate a 2-of-3 P2SH multisig address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.spec.ts)
|
||||
- [Generate a SegWit address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.spec.ts)
|
||||
- [Generate a SegWit P2SH address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.spec.ts)
|
||||
- [Generate a SegWit 3-of-4 multisig address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.spec.ts)
|
||||
- [Generate a SegWit 2-of-2 P2SH multisig address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.spec.ts)
|
||||
- [Support the retrieval of transactions for an address (3rd party blockchain)](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.spec.ts)
|
||||
- [Generate a Testnet address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.spec.ts)
|
||||
- [Generate a Litecoin address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.spec.ts)
|
||||
- [Create a 1-to-1 Transaction](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a typical Transaction](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction with an OP\_RETURN output](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction with a 2-of-4 P2SH(multisig) input](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction with a SegWit P2SH(P2WPKH) input](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction with a SegWit P2WPKH input](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction with a SegWit P2PK input](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction with a SegWit 3-of-4 P2SH(P2WSH(multisig)) input](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction and sign with an HDSigner interface (bip32)](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.spec.ts)
|
||||
- [Import a BIP32 testnet xpriv and export to WIF](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.spec.ts)
|
||||
- [Export a BIP32 xpriv, then import it](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.spec.ts)
|
||||
- [Export a BIP32 xpub](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.spec.ts)
|
||||
- [Create a BIP32, bitcoin, account 0, external address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.spec.ts)
|
||||
- [Create a BIP44, bitcoin, account 0, external address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.spec.ts)
|
||||
- [Create a BIP49, bitcoin testnet, account 0, external address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.spec.ts)
|
||||
- [Use BIP39 to generate BIP32 addresses](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction where Alice can redeem the output after the expiry (in the past)](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/cltv.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction where Alice can redeem the output after the expiry (in the future)](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/cltv.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction where Alice and Bob can redeem the output at any time](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/cltv.spec.ts)
|
||||
- [Create (but fail to broadcast via 3PBP) a Transaction where Alice attempts to redeem before the expiry](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/cltv.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction where Alice can redeem the output after the expiry (in the future) (simple CHECKSEQUENCEVERIFY)](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/csv.spec.ts)
|
||||
- [Create (but fail to broadcast via 3PBP) a Transaction where Alice attempts to redeem before the expiry (simple CHECKSEQUENCEVERIFY)](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/csv.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction where Bob and Charles can send (complex CHECKSEQUENCEVERIFY)](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/csv.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction where Alice (mediator) and Bob can send after 2 blocks (complex CHECKSEQUENCEVERIFY)](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/csv.spec.ts)
|
||||
- [Create (and broadcast via 3PBP) a Transaction where Alice (mediator) can send after 5 blocks (complex CHECKSEQUENCEVERIFY)](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/csv.spec.ts)
|
||||
- [Generate a random address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.js#L12)
|
||||
- [Generate an address from a SHA256 hash](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.js#L19)
|
||||
- [Import an address via WIF](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.js#L29)
|
||||
- [Generate a 2-of-3 P2SH multisig address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.js#L36)
|
||||
- [Generate a SegWit address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.js#L50)
|
||||
- [Generate a SegWit P2SH address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.js#L60)
|
||||
- [Generate a SegWit 3-of-4 multisig address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.js#L71)
|
||||
- [Generate a SegWit 2-of-2 P2SH multisig address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.js#L86)
|
||||
- [Support the retrieval of transactions for an address (3rd party blockchain)](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.js#L100)
|
||||
- [Generate a Testnet address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.js#L121)
|
||||
- [Generate a Litecoin address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/addresses.js#L131)
|
||||
- [Create a 1-to-1 Transaction](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.js#L14)
|
||||
- [Create a 2-to-2 Transaction](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.js#L28)
|
||||
- [Create (and broadcast via 3PBP) a typical Transaction](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.js#L46)
|
||||
- [Create (and broadcast via 3PBP) a Transaction with an OP\_RETURN output](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.js#L88)
|
||||
- [Create (and broadcast via 3PBP) a Transaction with a 2-of-4 P2SH(multisig) input](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.js#L115)
|
||||
- [Create (and broadcast via 3PBP) a Transaction with a SegWit P2SH(P2WPKH) input](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.js#L151)
|
||||
- [Create (and broadcast via 3PBP) a Transaction with a SegWit 3-of-4 P2SH(P2WSH(multisig)) input](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/transactions.js#L183)
|
||||
- [Import a BIP32 testnet xpriv and export to WIF](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.js#L8)
|
||||
- [Export a BIP32 xpriv, then import it](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.js#L15)
|
||||
- [Export a BIP32 xpub](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.js#L26)
|
||||
- [Create a BIP32, bitcoin, account 0, external address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.js#L35)
|
||||
- [Create a BIP44, bitcoin, account 0, external address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.js#L50)
|
||||
- [Create a BIP49, bitcoin testnet, account 0, external address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.js#L66)
|
||||
- [Use BIP39 to generate BIP32 addresses](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/bip32.js#L83)
|
||||
- [Create (and broadcast via 3PBP) a Transaction where Alice can redeem the output after the expiry](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/cltv.js#L37)
|
||||
- [Create (and broadcast via 3PBP) a Transaction where Alice and Bob can redeem the output at any time](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/cltv.js#L71)
|
||||
- [Create (but fail to broadcast via 3PBP) a Transaction where Alice attempts to redeem before the expiry](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/cltv.js#L104)
|
||||
- [Recover a private key from duplicate R values](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/crypto.js#L14)
|
||||
- [Recover a BIP32 parent private key from the parent public key, and a derived, non-hardened child private key](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/crypto.js#L115)
|
||||
- [Generate a single-key stealth address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/stealth.js#L70:)
|
||||
- [Generate a single-key stealth address (randomly)](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/stealth.js#L89:)
|
||||
- [Recover parent recipient.d, if a derived private key is leaked (and nonce was revealed)](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/stealth.js#L105)
|
||||
- [Generate a dual-key stealth address](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/stealth.js#L122)
|
||||
- [Generate a dual-key stealth address (randomly)](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/test/integration/stealth.js#L145)
|
||||
|
||||
If you have a use case that you feel could be listed here, please [ask for it](https://github.com/bitcoinjs/bitcoinjs-lib/issues/new)!
|
||||
|
||||
|
||||
## Projects utilizing BitcoinJS
|
||||
- [BitAddress](https://www.bitaddress.org)
|
||||
- [Blockchain.info](https://blockchain.info/wallet)
|
||||
- [Blocktrail](https://www.blocktrail.com/)
|
||||
- [Dark Wallet](https://www.darkwallet.is/)
|
||||
- [DecentralBank](http://decentralbank.com/)
|
||||
- [Dogechain Wallet](https://dogechain.info)
|
||||
- [EI8HT Wallet](http://ei8.ht/)
|
||||
- [GreenAddress](https://greenaddress.it)
|
||||
- [Helperbit](https://helperbit.com)
|
||||
- [Melis Wallet](https://melis.io)
|
||||
- [Robocoin](https://wallet.robocoin.com)
|
||||
- [Skyhook ATM](http://projectskyhook.com)
|
||||
|
||||
|
||||
## Contributing
|
||||
See [CONTRIBUTING.md](CONTRIBUTING.md).
|
||||
We are always accepting of pull requests, but we do adhere to specific standards in regards to coding style, test driven development and commit messages.
|
||||
|
||||
Please make your best effort to adhere to these when contributing to save on trivial corrections.
|
||||
|
||||
|
||||
### Running the test suite
|
||||
|
@ -153,7 +187,7 @@ npm run-script coverage
|
|||
- [BIP69](https://github.com/bitcoinjs/bip69) - Lexicographical Indexing of Transaction Inputs and Outputs
|
||||
- [Base58](https://github.com/cryptocoinjs/bs58) - Base58 encoding/decoding
|
||||
- [Base58 Check](https://github.com/bitcoinjs/bs58check) - Base58 check encoding/decoding
|
||||
- [Bech32](https://github.com/bitcoinjs/bech32) - A BIP173/BIP350 compliant Bech32/Bech32m encoding library
|
||||
- [Bech32](https://github.com/bitcoinjs/bech32) - A BIP173 compliant Bech32 encoding library
|
||||
- [coinselect](https://github.com/bitcoinjs/coinselect) - A fee-optimizing, transaction input selection module for bitcoinjs-lib.
|
||||
- [merkle-lib](https://github.com/bitcoinjs/merkle-lib) - A performance conscious library for merkle root and tree calculations.
|
||||
- [minimaldata](https://github.com/bitcoinjs/minimaldata) - A module to check bitcoin policy: SCRIPT_VERIFY_MINIMALDATA
|
||||
|
|
2680
package-lock.json
generated
2680
package-lock.json
generated
File diff suppressed because it is too large
Load diff
88
package.json
88
package.json
|
@ -1,11 +1,10 @@
|
|||
{
|
||||
"name": "bitcoinjs-lib",
|
||||
"version": "6.0.1",
|
||||
"version": "3.3.2",
|
||||
"description": "Client-side Bitcoin JavaScript library",
|
||||
"main": "./src/index.js",
|
||||
"types": "./src/index.d.ts",
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
"node": ">=4.0.0"
|
||||
},
|
||||
"keywords": [
|
||||
"bitcoinjs",
|
||||
|
@ -15,31 +14,13 @@
|
|||
"bitcoinjs"
|
||||
],
|
||||
"scripts": {
|
||||
"audit": "NPM_AUDIT_IGNORE_DEV=1 NPM_AUDIT_IGNORE_LEVEL=low npm-audit-whitelister .npm-audit-whitelister.json",
|
||||
"build": "npm run clean && tsc -p ./tsconfig.json && npm run formatjs",
|
||||
"build:tests": "npm run clean:jstests && tsc -p ./test/tsconfig.json",
|
||||
"clean": "rimraf src",
|
||||
"clean:jstests": "rimraf 'test/**/!(ts-node-register)*.js'",
|
||||
"coverage-report": "npm run build && npm run nobuild:coverage-report",
|
||||
"coverage-html": "npm run build && npm run nobuild:coverage-html",
|
||||
"coverage": "npm run build && npm run nobuild:coverage",
|
||||
"format": "npm run prettier -- --write",
|
||||
"formatjs": "npm run prettierjs -- --write",
|
||||
"format:ci": "npm run prettier -- --check && npm run prettierjs -- --check",
|
||||
"gitdiff:ci": "npm run build && git diff --exit-code",
|
||||
"integration": "npm run build && npm run nobuild:integration",
|
||||
"lint": "tslint -p tsconfig.json -c tslint.json",
|
||||
"lint:tests": "tslint -p test/tsconfig.json -c tslint.json",
|
||||
"mocha:ts": "mocha --recursive --require test/ts-node-register",
|
||||
"nobuild:coverage-report": "nyc report --reporter=lcov",
|
||||
"nobuild:coverage-html": "nyc report --reporter=html",
|
||||
"nobuild:coverage": "npm run build:tests && nyc --check-coverage --branches 90 --functions 90 --lines 90 mocha && npm run clean:jstests",
|
||||
"nobuild:integration": "npm run mocha:ts -- --timeout 50000 'test/integration/*.ts'",
|
||||
"nobuild:unit": "npm run mocha:ts -- 'test/*.ts'",
|
||||
"prettier": "prettier \"ts_src/**/*.ts\" \"test/**/*.ts\" --ignore-path ./.prettierignore",
|
||||
"prettierjs": "prettier \"src/**/*.js\" --ignore-path ./.prettierignore",
|
||||
"test": "npm run build && npm run format:ci && npm run lint && npm run nobuild:coverage",
|
||||
"unit": "npm run build && npm run nobuild:unit"
|
||||
"coverage-report": "nyc report --reporter=lcov",
|
||||
"coverage-html": "nyc report --reporter=html",
|
||||
"coverage": "nyc --check-coverage --branches 90 --functions 90 mocha",
|
||||
"integration": "mocha test/integration/",
|
||||
"standard": "standard",
|
||||
"test": "npm run standard && npm run coverage",
|
||||
"unit": "mocha"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
@ -49,45 +30,34 @@
|
|||
"src"
|
||||
],
|
||||
"dependencies": {
|
||||
"bech32": "^2.0.0",
|
||||
"bip174": "^2.0.1",
|
||||
"bs58check": "^2.1.2",
|
||||
"bech32": "^1.1.2",
|
||||
"bigi": "^1.4.0",
|
||||
"bip66": "^1.1.0",
|
||||
"bitcoin-ops": "^1.3.0",
|
||||
"bs58check": "^2.0.0",
|
||||
"create-hash": "^1.1.0",
|
||||
"create-hmac": "^1.1.3",
|
||||
"ecurve": "^1.0.0",
|
||||
"merkle-lib": "^2.0.10",
|
||||
"pushdata-bitcoin": "^1.0.1",
|
||||
"randombytes": "^2.0.1",
|
||||
"safe-buffer": "^5.0.1",
|
||||
"typeforce": "^1.11.3",
|
||||
"varuint-bitcoin": "^1.1.2",
|
||||
"varuint-bitcoin": "^1.0.4",
|
||||
"wif": "^2.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bs58": "^4.0.0",
|
||||
"@types/bs58check": "^2.1.0",
|
||||
"@types/create-hash": "^1.2.2",
|
||||
"@types/mocha": "^5.2.7",
|
||||
"@types/node": "^16.11.7",
|
||||
"@types/proxyquire": "^1.3.28",
|
||||
"@types/randombytes": "^2.0.0",
|
||||
"@types/wif": "^2.0.2",
|
||||
"bip32": "^3.0.1",
|
||||
"bip39": "^3.0.2",
|
||||
"bip39": "^2.3.0",
|
||||
"bip65": "^1.0.1",
|
||||
"bip68": "^1.0.3",
|
||||
"bn.js": "^4.11.8",
|
||||
"bs58": "^4.0.0",
|
||||
"dhttp": "^3.0.0",
|
||||
"ecpair": "^2.0.1",
|
||||
"hoodwink": "^2.0.0",
|
||||
"dhttp": "^2.4.2",
|
||||
"minimaldata": "^1.0.2",
|
||||
"mocha": "^7.1.1",
|
||||
"npm-audit-whitelister": "^1.0.2",
|
||||
"nyc": "^15.1.0",
|
||||
"prettier": "1.16.4",
|
||||
"proxyquire": "^2.0.1",
|
||||
"randombytes": "^2.1.0",
|
||||
"regtest-client": "0.2.0",
|
||||
"rimraf": "^2.6.3",
|
||||
"tiny-secp256k1": "^2.1.2",
|
||||
"ts-node": "^8.3.0",
|
||||
"tslint": "^6.1.3",
|
||||
"typescript": "^4.4.4"
|
||||
"mocha": "^5.0.1",
|
||||
"nyc": "^11.4.1",
|
||||
"proxyquire": "^1.4.0",
|
||||
"sinon": "^4.3.0",
|
||||
"sinon-test": "^2.1.3",
|
||||
"standard": "^9.0.2"
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
||||
|
|
17
src/address.d.ts
vendored
17
src/address.d.ts
vendored
|
@ -1,17 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
import { Network } from './networks';
|
||||
export interface Base58CheckResult {
|
||||
hash: Buffer;
|
||||
version: number;
|
||||
}
|
||||
export interface Bech32Result {
|
||||
version: number;
|
||||
prefix: string;
|
||||
data: Buffer;
|
||||
}
|
||||
export declare function fromBase58Check(address: string): Base58CheckResult;
|
||||
export declare function fromBech32(address: string): Bech32Result;
|
||||
export declare function toBase58Check(hash: Buffer, version: number): string;
|
||||
export declare function toBech32(data: Buffer, version: number, prefix: string): string;
|
||||
export declare function fromOutputScript(output: Buffer, network?: Network): string;
|
||||
export declare function toOutputScript(address: string, network?: Network): Buffer;
|
211
src/address.js
211
src/address.js
|
@ -1,148 +1,97 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.toOutputScript = exports.fromOutputScript = exports.toBech32 = exports.toBase58Check = exports.fromBech32 = exports.fromBase58Check = void 0;
|
||||
const networks = require('./networks');
|
||||
const payments = require('./payments');
|
||||
const bscript = require('./script');
|
||||
const types = require('./types');
|
||||
const bech32_1 = require('bech32');
|
||||
const bs58check = require('bs58check');
|
||||
const { typeforce } = types;
|
||||
const FUTURE_SEGWIT_MAX_SIZE = 40;
|
||||
const FUTURE_SEGWIT_MIN_SIZE = 2;
|
||||
const FUTURE_SEGWIT_MAX_VERSION = 16;
|
||||
const FUTURE_SEGWIT_MIN_VERSION = 1;
|
||||
const FUTURE_SEGWIT_VERSION_DIFF = 0x50;
|
||||
const FUTURE_SEGWIT_VERSION_WARNING =
|
||||
'WARNING: Sending to a future segwit version address can lead to loss of funds. ' +
|
||||
'End users MUST be warned carefully in the GUI and asked if they wish to proceed ' +
|
||||
'with caution. Wallets should verify the segwit version from the output of fromBech32, ' +
|
||||
'then decide when it is safe to use which version of segwit.';
|
||||
function _toFutureSegwitAddress(output, network) {
|
||||
const data = output.slice(2);
|
||||
if (
|
||||
data.length < FUTURE_SEGWIT_MIN_SIZE ||
|
||||
data.length > FUTURE_SEGWIT_MAX_SIZE
|
||||
)
|
||||
throw new TypeError('Invalid program length for segwit address');
|
||||
const version = output[0] - FUTURE_SEGWIT_VERSION_DIFF;
|
||||
if (
|
||||
version < FUTURE_SEGWIT_MIN_VERSION ||
|
||||
version > FUTURE_SEGWIT_MAX_VERSION
|
||||
)
|
||||
throw new TypeError('Invalid version for segwit address');
|
||||
if (output[1] !== data.length)
|
||||
throw new TypeError('Invalid script for segwit address');
|
||||
console.warn(FUTURE_SEGWIT_VERSION_WARNING);
|
||||
return toBech32(data, version, network.bech32);
|
||||
}
|
||||
function fromBase58Check(address) {
|
||||
const payload = bs58check.decode(address);
|
||||
var Buffer = require('safe-buffer').Buffer
|
||||
var bech32 = require('bech32')
|
||||
var bs58check = require('bs58check')
|
||||
var bscript = require('./script')
|
||||
var btemplates = require('./templates')
|
||||
var networks = require('./networks')
|
||||
var typeforce = require('typeforce')
|
||||
var types = require('./types')
|
||||
|
||||
function fromBase58Check (address) {
|
||||
var payload = bs58check.decode(address)
|
||||
|
||||
// TODO: 4.0.0, move to "toOutputScript"
|
||||
if (payload.length < 21) throw new TypeError(address + ' is too short');
|
||||
if (payload.length > 21) throw new TypeError(address + ' is too long');
|
||||
const version = payload.readUInt8(0);
|
||||
const hash = payload.slice(1);
|
||||
return { version, hash };
|
||||
if (payload.length < 21) throw new TypeError(address + ' is too short')
|
||||
if (payload.length > 21) throw new TypeError(address + ' is too long')
|
||||
|
||||
var version = payload.readUInt8(0)
|
||||
var hash = payload.slice(1)
|
||||
|
||||
return { version: version, hash: hash }
|
||||
}
|
||||
exports.fromBase58Check = fromBase58Check;
|
||||
function fromBech32(address) {
|
||||
let result;
|
||||
let version;
|
||||
try {
|
||||
result = bech32_1.bech32.decode(address);
|
||||
} catch (e) {}
|
||||
if (result) {
|
||||
version = result.words[0];
|
||||
if (version !== 0) throw new TypeError(address + ' uses wrong encoding');
|
||||
} else {
|
||||
result = bech32_1.bech32m.decode(address);
|
||||
version = result.words[0];
|
||||
if (version === 0) throw new TypeError(address + ' uses wrong encoding');
|
||||
}
|
||||
const data = bech32_1.bech32.fromWords(result.words.slice(1));
|
||||
|
||||
function fromBech32 (address) {
|
||||
var result = bech32.decode(address)
|
||||
var data = bech32.fromWords(result.words.slice(1))
|
||||
|
||||
return {
|
||||
version,
|
||||
version: result.words[0],
|
||||
prefix: result.prefix,
|
||||
data: Buffer.from(data),
|
||||
};
|
||||
data: Buffer.from(data)
|
||||
}
|
||||
}
|
||||
exports.fromBech32 = fromBech32;
|
||||
function toBase58Check(hash, version) {
|
||||
typeforce(types.tuple(types.Hash160bit, types.UInt8), arguments);
|
||||
const payload = Buffer.allocUnsafe(21);
|
||||
payload.writeUInt8(version, 0);
|
||||
hash.copy(payload, 1);
|
||||
return bs58check.encode(payload);
|
||||
|
||||
function toBase58Check (hash, version) {
|
||||
typeforce(types.tuple(types.Hash160bit, types.UInt8), arguments)
|
||||
|
||||
var payload = Buffer.allocUnsafe(21)
|
||||
payload.writeUInt8(version, 0)
|
||||
hash.copy(payload, 1)
|
||||
|
||||
return bs58check.encode(payload)
|
||||
}
|
||||
exports.toBase58Check = toBase58Check;
|
||||
function toBech32(data, version, prefix) {
|
||||
const words = bech32_1.bech32.toWords(data);
|
||||
words.unshift(version);
|
||||
return version === 0
|
||||
? bech32_1.bech32.encode(prefix, words)
|
||||
: bech32_1.bech32m.encode(prefix, words);
|
||||
|
||||
function toBech32 (data, version, prefix) {
|
||||
var words = bech32.toWords(data)
|
||||
words.unshift(version)
|
||||
|
||||
return bech32.encode(prefix, words)
|
||||
}
|
||||
exports.toBech32 = toBech32;
|
||||
function fromOutputScript(output, network) {
|
||||
// TODO: Network
|
||||
network = network || networks.bitcoin;
|
||||
try {
|
||||
return payments.p2pkh({ output, network }).address;
|
||||
} catch (e) {}
|
||||
try {
|
||||
return payments.p2sh({ output, network }).address;
|
||||
} catch (e) {}
|
||||
try {
|
||||
return payments.p2wpkh({ output, network }).address;
|
||||
} catch (e) {}
|
||||
try {
|
||||
return payments.p2wsh({ output, network }).address;
|
||||
} catch (e) {}
|
||||
try {
|
||||
return _toFutureSegwitAddress(output, network);
|
||||
} catch (e) {}
|
||||
throw new Error(bscript.toASM(output) + ' has no matching Address');
|
||||
|
||||
function fromOutputScript (outputScript, network) {
|
||||
network = network || networks.bitcoin
|
||||
|
||||
if (btemplates.pubKeyHash.output.check(outputScript)) return toBase58Check(bscript.compile(outputScript).slice(3, 23), network.pubKeyHash)
|
||||
if (btemplates.scriptHash.output.check(outputScript)) return toBase58Check(bscript.compile(outputScript).slice(2, 22), network.scriptHash)
|
||||
if (btemplates.witnessPubKeyHash.output.check(outputScript)) return toBech32(bscript.compile(outputScript).slice(2, 22), 0, network.bech32)
|
||||
if (btemplates.witnessScriptHash.output.check(outputScript)) return toBech32(bscript.compile(outputScript).slice(2, 34), 0, network.bech32)
|
||||
|
||||
throw new Error(bscript.toASM(outputScript) + ' has no matching Address')
|
||||
}
|
||||
exports.fromOutputScript = fromOutputScript;
|
||||
function toOutputScript(address, network) {
|
||||
network = network || networks.bitcoin;
|
||||
let decodeBase58;
|
||||
let decodeBech32;
|
||||
|
||||
function toOutputScript (address, network) {
|
||||
network = network || networks.bitcoin
|
||||
|
||||
var decode
|
||||
try {
|
||||
decodeBase58 = fromBase58Check(address);
|
||||
decode = fromBase58Check(address)
|
||||
} catch (e) {}
|
||||
if (decodeBase58) {
|
||||
if (decodeBase58.version === network.pubKeyHash)
|
||||
return payments.p2pkh({ hash: decodeBase58.hash }).output;
|
||||
if (decodeBase58.version === network.scriptHash)
|
||||
return payments.p2sh({ hash: decodeBase58.hash }).output;
|
||||
|
||||
if (decode) {
|
||||
if (decode.version === network.pubKeyHash) return btemplates.pubKeyHash.output.encode(decode.hash)
|
||||
if (decode.version === network.scriptHash) return btemplates.scriptHash.output.encode(decode.hash)
|
||||
} else {
|
||||
try {
|
||||
decodeBech32 = fromBech32(address);
|
||||
decode = fromBech32(address)
|
||||
} catch (e) {}
|
||||
if (decodeBech32) {
|
||||
if (decodeBech32.prefix !== network.bech32)
|
||||
throw new Error(address + ' has an invalid prefix');
|
||||
if (decodeBech32.version === 0) {
|
||||
if (decodeBech32.data.length === 20)
|
||||
return payments.p2wpkh({ hash: decodeBech32.data }).output;
|
||||
if (decodeBech32.data.length === 32)
|
||||
return payments.p2wsh({ hash: decodeBech32.data }).output;
|
||||
} else if (
|
||||
decodeBech32.version >= FUTURE_SEGWIT_MIN_VERSION &&
|
||||
decodeBech32.version <= FUTURE_SEGWIT_MAX_VERSION &&
|
||||
decodeBech32.data.length >= FUTURE_SEGWIT_MIN_SIZE &&
|
||||
decodeBech32.data.length <= FUTURE_SEGWIT_MAX_SIZE
|
||||
) {
|
||||
console.warn(FUTURE_SEGWIT_VERSION_WARNING);
|
||||
return bscript.compile([
|
||||
decodeBech32.version + FUTURE_SEGWIT_VERSION_DIFF,
|
||||
decodeBech32.data,
|
||||
]);
|
||||
|
||||
if (decode) {
|
||||
if (decode.prefix !== network.bech32) throw new Error(address + ' has an invalid prefix')
|
||||
if (decode.version === 0) {
|
||||
if (decode.data.length === 20) return btemplates.witnessPubKeyHash.output.encode(decode.data)
|
||||
if (decode.data.length === 32) return btemplates.witnessScriptHash.output.encode(decode.data)
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new Error(address + ' has no matching Script');
|
||||
|
||||
throw new Error(address + ' has no matching Script')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fromBase58Check: fromBase58Check,
|
||||
fromBech32: fromBech32,
|
||||
fromOutputScript: fromOutputScript,
|
||||
toBase58Check: toBase58Check,
|
||||
toBech32: toBech32,
|
||||
toOutputScript: toOutputScript
|
||||
}
|
||||
exports.toOutputScript = toOutputScript;
|
||||
|
|
7
src/bip66.d.ts
vendored
7
src/bip66.d.ts
vendored
|
@ -1,7 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
export declare function check(buffer: Buffer): boolean;
|
||||
export declare function decode(buffer: Buffer): {
|
||||
r: Buffer;
|
||||
s: Buffer;
|
||||
};
|
||||
export declare function encode(r: Buffer, s: Buffer): Buffer;
|
102
src/bip66.js
102
src/bip66.js
|
@ -1,102 +0,0 @@
|
|||
'use strict';
|
||||
// Reference https://github.com/bitcoin/bips/blob/master/bip-0066.mediawiki
|
||||
// Format: 0x30 [total-length] 0x02 [R-length] [R] 0x02 [S-length] [S]
|
||||
// NOTE: SIGHASH byte ignored AND restricted, truncate before use
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.encode = exports.decode = exports.check = void 0;
|
||||
function check(buffer) {
|
||||
if (buffer.length < 8) return false;
|
||||
if (buffer.length > 72) return false;
|
||||
if (buffer[0] !== 0x30) return false;
|
||||
if (buffer[1] !== buffer.length - 2) return false;
|
||||
if (buffer[2] !== 0x02) return false;
|
||||
const lenR = buffer[3];
|
||||
if (lenR === 0) return false;
|
||||
if (5 + lenR >= buffer.length) return false;
|
||||
if (buffer[4 + lenR] !== 0x02) return false;
|
||||
const lenS = buffer[5 + lenR];
|
||||
if (lenS === 0) return false;
|
||||
if (6 + lenR + lenS !== buffer.length) return false;
|
||||
if (buffer[4] & 0x80) return false;
|
||||
if (lenR > 1 && buffer[4] === 0x00 && !(buffer[5] & 0x80)) return false;
|
||||
if (buffer[lenR + 6] & 0x80) return false;
|
||||
if (lenS > 1 && buffer[lenR + 6] === 0x00 && !(buffer[lenR + 7] & 0x80))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
exports.check = check;
|
||||
function decode(buffer) {
|
||||
if (buffer.length < 8) throw new Error('DER sequence length is too short');
|
||||
if (buffer.length > 72) throw new Error('DER sequence length is too long');
|
||||
if (buffer[0] !== 0x30) throw new Error('Expected DER sequence');
|
||||
if (buffer[1] !== buffer.length - 2)
|
||||
throw new Error('DER sequence length is invalid');
|
||||
if (buffer[2] !== 0x02) throw new Error('Expected DER integer');
|
||||
const lenR = buffer[3];
|
||||
if (lenR === 0) throw new Error('R length is zero');
|
||||
if (5 + lenR >= buffer.length) throw new Error('R length is too long');
|
||||
if (buffer[4 + lenR] !== 0x02) throw new Error('Expected DER integer (2)');
|
||||
const lenS = buffer[5 + lenR];
|
||||
if (lenS === 0) throw new Error('S length is zero');
|
||||
if (6 + lenR + lenS !== buffer.length) throw new Error('S length is invalid');
|
||||
if (buffer[4] & 0x80) throw new Error('R value is negative');
|
||||
if (lenR > 1 && buffer[4] === 0x00 && !(buffer[5] & 0x80))
|
||||
throw new Error('R value excessively padded');
|
||||
if (buffer[lenR + 6] & 0x80) throw new Error('S value is negative');
|
||||
if (lenS > 1 && buffer[lenR + 6] === 0x00 && !(buffer[lenR + 7] & 0x80))
|
||||
throw new Error('S value excessively padded');
|
||||
// non-BIP66 - extract R, S values
|
||||
return {
|
||||
r: buffer.slice(4, 4 + lenR),
|
||||
s: buffer.slice(6 + lenR),
|
||||
};
|
||||
}
|
||||
exports.decode = decode;
|
||||
/*
|
||||
* Expects r and s to be positive DER integers.
|
||||
*
|
||||
* The DER format uses the most significant bit as a sign bit (& 0x80).
|
||||
* If the significant bit is set AND the integer is positive, a 0x00 is prepended.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* 0 => 0x00
|
||||
* 1 => 0x01
|
||||
* -1 => 0xff
|
||||
* 127 => 0x7f
|
||||
* -127 => 0x81
|
||||
* 128 => 0x0080
|
||||
* -128 => 0x80
|
||||
* 255 => 0x00ff
|
||||
* -255 => 0xff01
|
||||
* 16300 => 0x3fac
|
||||
* -16300 => 0xc054
|
||||
* 62300 => 0x00f35c
|
||||
* -62300 => 0xff0ca4
|
||||
*/
|
||||
function encode(r, s) {
|
||||
const lenR = r.length;
|
||||
const lenS = s.length;
|
||||
if (lenR === 0) throw new Error('R length is zero');
|
||||
if (lenS === 0) throw new Error('S length is zero');
|
||||
if (lenR > 33) throw new Error('R length is too long');
|
||||
if (lenS > 33) throw new Error('S length is too long');
|
||||
if (r[0] & 0x80) throw new Error('R value is negative');
|
||||
if (s[0] & 0x80) throw new Error('S value is negative');
|
||||
if (lenR > 1 && r[0] === 0x00 && !(r[1] & 0x80))
|
||||
throw new Error('R value excessively padded');
|
||||
if (lenS > 1 && s[0] === 0x00 && !(s[1] & 0x80))
|
||||
throw new Error('S value excessively padded');
|
||||
const signature = Buffer.allocUnsafe(6 + lenR + lenS);
|
||||
// 0x30 [total-length] 0x02 [R-length] [R] 0x02 [S-length] [S]
|
||||
signature[0] = 0x30;
|
||||
signature[1] = signature.length - 2;
|
||||
signature[2] = 0x02;
|
||||
signature[3] = r.length;
|
||||
r.copy(signature, 4);
|
||||
signature[4 + lenR] = 0x02;
|
||||
signature[5 + lenR] = s.length;
|
||||
s.copy(signature, 6 + lenR);
|
||||
return signature;
|
||||
}
|
||||
exports.encode = encode;
|
30
src/block.d.ts
vendored
30
src/block.d.ts
vendored
|
@ -1,30 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
import { Transaction } from './transaction';
|
||||
export declare class Block {
|
||||
static fromBuffer(buffer: Buffer): Block;
|
||||
static fromHex(hex: string): Block;
|
||||
static calculateTarget(bits: number): Buffer;
|
||||
static calculateMerkleRoot(transactions: Transaction[], forWitness?: boolean): Buffer;
|
||||
version: number;
|
||||
prevHash?: Buffer;
|
||||
merkleRoot?: Buffer;
|
||||
timestamp: number;
|
||||
witnessCommit?: Buffer;
|
||||
bits: number;
|
||||
nonce: number;
|
||||
transactions?: Transaction[];
|
||||
getWitnessCommit(): Buffer | null;
|
||||
hasWitnessCommit(): boolean;
|
||||
hasWitness(): boolean;
|
||||
weight(): number;
|
||||
byteLength(headersOnly?: boolean, allowWitness?: boolean): number;
|
||||
getHash(): Buffer;
|
||||
getId(): string;
|
||||
getUTCDate(): Date;
|
||||
toBuffer(headersOnly?: boolean): Buffer;
|
||||
toHex(headersOnly?: boolean): string;
|
||||
checkTxRoots(): boolean;
|
||||
checkProofOfWork(): boolean;
|
||||
private __checkMerkleRoot;
|
||||
private __checkWitnessCommit;
|
||||
}
|
391
src/block.js
391
src/block.js
|
@ -1,220 +1,177 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.Block = void 0;
|
||||
const bufferutils_1 = require('./bufferutils');
|
||||
const bcrypto = require('./crypto');
|
||||
const merkle_1 = require('./merkle');
|
||||
const transaction_1 = require('./transaction');
|
||||
const types = require('./types');
|
||||
const { typeforce } = types;
|
||||
const errorMerkleNoTxes = new TypeError(
|
||||
'Cannot compute merkle root for zero transactions',
|
||||
);
|
||||
const errorWitnessNotSegwit = new TypeError(
|
||||
'Cannot compute witness commit for non-segwit block',
|
||||
);
|
||||
class Block {
|
||||
constructor() {
|
||||
this.version = 1;
|
||||
this.prevHash = undefined;
|
||||
this.merkleRoot = undefined;
|
||||
this.timestamp = 0;
|
||||
this.witnessCommit = undefined;
|
||||
this.bits = 0;
|
||||
this.nonce = 0;
|
||||
this.transactions = undefined;
|
||||
}
|
||||
static fromBuffer(buffer) {
|
||||
if (buffer.length < 80) throw new Error('Buffer too small (< 80 bytes)');
|
||||
const bufferReader = new bufferutils_1.BufferReader(buffer);
|
||||
const block = new Block();
|
||||
block.version = bufferReader.readInt32();
|
||||
block.prevHash = bufferReader.readSlice(32);
|
||||
block.merkleRoot = bufferReader.readSlice(32);
|
||||
block.timestamp = bufferReader.readUInt32();
|
||||
block.bits = bufferReader.readUInt32();
|
||||
block.nonce = bufferReader.readUInt32();
|
||||
if (buffer.length === 80) return block;
|
||||
const readTransaction = () => {
|
||||
const tx = transaction_1.Transaction.fromBuffer(
|
||||
bufferReader.buffer.slice(bufferReader.offset),
|
||||
true,
|
||||
);
|
||||
bufferReader.offset += tx.byteLength();
|
||||
return tx;
|
||||
};
|
||||
const nTransactions = bufferReader.readVarInt();
|
||||
block.transactions = [];
|
||||
for (let i = 0; i < nTransactions; ++i) {
|
||||
const tx = readTransaction();
|
||||
block.transactions.push(tx);
|
||||
}
|
||||
const witnessCommit = block.getWitnessCommit();
|
||||
// This Block contains a witness commit
|
||||
if (witnessCommit) block.witnessCommit = witnessCommit;
|
||||
return block;
|
||||
}
|
||||
static fromHex(hex) {
|
||||
return Block.fromBuffer(Buffer.from(hex, 'hex'));
|
||||
}
|
||||
static calculateTarget(bits) {
|
||||
const exponent = ((bits & 0xff000000) >> 24) - 3;
|
||||
const mantissa = bits & 0x007fffff;
|
||||
const target = Buffer.alloc(32, 0);
|
||||
target.writeUIntBE(mantissa, 29 - exponent, 3);
|
||||
return target;
|
||||
}
|
||||
static calculateMerkleRoot(transactions, forWitness) {
|
||||
typeforce([{ getHash: types.Function }], transactions);
|
||||
if (transactions.length === 0) throw errorMerkleNoTxes;
|
||||
if (forWitness && !txesHaveWitnessCommit(transactions))
|
||||
throw errorWitnessNotSegwit;
|
||||
const hashes = transactions.map(transaction =>
|
||||
transaction.getHash(forWitness),
|
||||
);
|
||||
const rootHash = (0, merkle_1.fastMerkleRoot)(hashes, bcrypto.hash256);
|
||||
return forWitness
|
||||
? bcrypto.hash256(
|
||||
Buffer.concat([rootHash, transactions[0].ins[0].witness[0]]),
|
||||
)
|
||||
: rootHash;
|
||||
}
|
||||
getWitnessCommit() {
|
||||
if (!txesHaveWitnessCommit(this.transactions)) return null;
|
||||
// The merkle root for the witness data is in an OP_RETURN output.
|
||||
// There is no rule for the index of the output, so use filter to find it.
|
||||
// The root is prepended with 0xaa21a9ed so check for 0x6a24aa21a9ed
|
||||
// If multiple commits are found, the output with highest index is assumed.
|
||||
const witnessCommits = this.transactions[0].outs
|
||||
.filter(out =>
|
||||
out.script.slice(0, 6).equals(Buffer.from('6a24aa21a9ed', 'hex')),
|
||||
)
|
||||
.map(out => out.script.slice(6, 38));
|
||||
if (witnessCommits.length === 0) return null;
|
||||
// Use the commit with the highest output (should only be one though)
|
||||
const result = witnessCommits[witnessCommits.length - 1];
|
||||
if (!(result instanceof Buffer && result.length === 32)) return null;
|
||||
return result;
|
||||
}
|
||||
hasWitnessCommit() {
|
||||
if (
|
||||
this.witnessCommit instanceof Buffer &&
|
||||
this.witnessCommit.length === 32
|
||||
)
|
||||
return true;
|
||||
if (this.getWitnessCommit() !== null) return true;
|
||||
return false;
|
||||
}
|
||||
hasWitness() {
|
||||
return anyTxHasWitness(this.transactions);
|
||||
}
|
||||
weight() {
|
||||
const base = this.byteLength(false, false);
|
||||
const total = this.byteLength(false, true);
|
||||
return base * 3 + total;
|
||||
}
|
||||
byteLength(headersOnly, allowWitness = true) {
|
||||
if (headersOnly || !this.transactions) return 80;
|
||||
return (
|
||||
80 +
|
||||
bufferutils_1.varuint.encodingLength(this.transactions.length) +
|
||||
this.transactions.reduce((a, x) => a + x.byteLength(allowWitness), 0)
|
||||
);
|
||||
}
|
||||
getHash() {
|
||||
return bcrypto.hash256(this.toBuffer(true));
|
||||
}
|
||||
getId() {
|
||||
return (0, bufferutils_1.reverseBuffer)(this.getHash()).toString('hex');
|
||||
}
|
||||
getUTCDate() {
|
||||
const date = new Date(0); // epoch
|
||||
date.setUTCSeconds(this.timestamp);
|
||||
return date;
|
||||
}
|
||||
// TODO: buffer, offset compatibility
|
||||
toBuffer(headersOnly) {
|
||||
const buffer = Buffer.allocUnsafe(this.byteLength(headersOnly));
|
||||
const bufferWriter = new bufferutils_1.BufferWriter(buffer);
|
||||
bufferWriter.writeInt32(this.version);
|
||||
bufferWriter.writeSlice(this.prevHash);
|
||||
bufferWriter.writeSlice(this.merkleRoot);
|
||||
bufferWriter.writeUInt32(this.timestamp);
|
||||
bufferWriter.writeUInt32(this.bits);
|
||||
bufferWriter.writeUInt32(this.nonce);
|
||||
if (headersOnly || !this.transactions) return buffer;
|
||||
bufferutils_1.varuint.encode(
|
||||
this.transactions.length,
|
||||
buffer,
|
||||
bufferWriter.offset,
|
||||
);
|
||||
bufferWriter.offset += bufferutils_1.varuint.encode.bytes;
|
||||
this.transactions.forEach(tx => {
|
||||
const txSize = tx.byteLength(); // TODO: extract from toBuffer?
|
||||
tx.toBuffer(buffer, bufferWriter.offset);
|
||||
bufferWriter.offset += txSize;
|
||||
});
|
||||
return buffer;
|
||||
}
|
||||
toHex(headersOnly) {
|
||||
return this.toBuffer(headersOnly).toString('hex');
|
||||
}
|
||||
checkTxRoots() {
|
||||
// If the Block has segwit transactions but no witness commit,
|
||||
// there's no way it can be valid, so fail the check.
|
||||
const hasWitnessCommit = this.hasWitnessCommit();
|
||||
if (!hasWitnessCommit && this.hasWitness()) return false;
|
||||
return (
|
||||
this.__checkMerkleRoot() &&
|
||||
(hasWitnessCommit ? this.__checkWitnessCommit() : true)
|
||||
);
|
||||
}
|
||||
checkProofOfWork() {
|
||||
const hash = (0, bufferutils_1.reverseBuffer)(this.getHash());
|
||||
const target = Block.calculateTarget(this.bits);
|
||||
return hash.compare(target) <= 0;
|
||||
}
|
||||
__checkMerkleRoot() {
|
||||
if (!this.transactions) throw errorMerkleNoTxes;
|
||||
const actualMerkleRoot = Block.calculateMerkleRoot(this.transactions);
|
||||
return this.merkleRoot.compare(actualMerkleRoot) === 0;
|
||||
}
|
||||
__checkWitnessCommit() {
|
||||
if (!this.transactions) throw errorMerkleNoTxes;
|
||||
if (!this.hasWitnessCommit()) throw errorWitnessNotSegwit;
|
||||
const actualWitnessCommit = Block.calculateMerkleRoot(
|
||||
this.transactions,
|
||||
true,
|
||||
);
|
||||
return this.witnessCommit.compare(actualWitnessCommit) === 0;
|
||||
}
|
||||
var Buffer = require('safe-buffer').Buffer
|
||||
var bcrypto = require('./crypto')
|
||||
var fastMerkleRoot = require('merkle-lib/fastRoot')
|
||||
var typeforce = require('typeforce')
|
||||
var types = require('./types')
|
||||
var varuint = require('varuint-bitcoin')
|
||||
|
||||
var Transaction = require('./transaction')
|
||||
|
||||
function Block () {
|
||||
this.version = 1
|
||||
this.prevHash = null
|
||||
this.merkleRoot = null
|
||||
this.timestamp = 0
|
||||
this.bits = 0
|
||||
this.nonce = 0
|
||||
}
|
||||
exports.Block = Block;
|
||||
function txesHaveWitnessCommit(transactions) {
|
||||
return (
|
||||
transactions instanceof Array &&
|
||||
transactions[0] &&
|
||||
transactions[0].ins &&
|
||||
transactions[0].ins instanceof Array &&
|
||||
transactions[0].ins[0] &&
|
||||
transactions[0].ins[0].witness &&
|
||||
transactions[0].ins[0].witness instanceof Array &&
|
||||
transactions[0].ins[0].witness.length > 0
|
||||
);
|
||||
|
||||
Block.fromBuffer = function (buffer) {
|
||||
if (buffer.length < 80) throw new Error('Buffer too small (< 80 bytes)')
|
||||
|
||||
var offset = 0
|
||||
function readSlice (n) {
|
||||
offset += n
|
||||
return buffer.slice(offset - n, offset)
|
||||
}
|
||||
|
||||
function readUInt32 () {
|
||||
var i = buffer.readUInt32LE(offset)
|
||||
offset += 4
|
||||
return i
|
||||
}
|
||||
|
||||
function readInt32 () {
|
||||
var i = buffer.readInt32LE(offset)
|
||||
offset += 4
|
||||
return i
|
||||
}
|
||||
|
||||
var block = new Block()
|
||||
block.version = readInt32()
|
||||
block.prevHash = readSlice(32)
|
||||
block.merkleRoot = readSlice(32)
|
||||
block.timestamp = readUInt32()
|
||||
block.bits = readUInt32()
|
||||
block.nonce = readUInt32()
|
||||
|
||||
if (buffer.length === 80) return block
|
||||
|
||||
function readVarInt () {
|
||||
var vi = varuint.decode(buffer, offset)
|
||||
offset += varuint.decode.bytes
|
||||
return vi
|
||||
}
|
||||
|
||||
function readTransaction () {
|
||||
var tx = Transaction.fromBuffer(buffer.slice(offset), true)
|
||||
offset += tx.byteLength()
|
||||
return tx
|
||||
}
|
||||
|
||||
var nTransactions = readVarInt()
|
||||
block.transactions = []
|
||||
|
||||
for (var i = 0; i < nTransactions; ++i) {
|
||||
var tx = readTransaction()
|
||||
block.transactions.push(tx)
|
||||
}
|
||||
|
||||
return block
|
||||
}
|
||||
function anyTxHasWitness(transactions) {
|
||||
return (
|
||||
transactions instanceof Array &&
|
||||
transactions.some(
|
||||
tx =>
|
||||
typeof tx === 'object' &&
|
||||
tx.ins instanceof Array &&
|
||||
tx.ins.some(
|
||||
input =>
|
||||
typeof input === 'object' &&
|
||||
input.witness instanceof Array &&
|
||||
input.witness.length > 0,
|
||||
),
|
||||
)
|
||||
);
|
||||
|
||||
Block.prototype.byteLength = function (headersOnly) {
|
||||
if (headersOnly || !this.transactions) return 80
|
||||
|
||||
return 80 + varuint.encodingLength(this.transactions.length) + this.transactions.reduce(function (a, x) {
|
||||
return a + x.byteLength()
|
||||
}, 0)
|
||||
}
|
||||
|
||||
Block.fromHex = function (hex) {
|
||||
return Block.fromBuffer(Buffer.from(hex, 'hex'))
|
||||
}
|
||||
|
||||
Block.prototype.getHash = function () {
|
||||
return bcrypto.hash256(this.toBuffer(true))
|
||||
}
|
||||
|
||||
Block.prototype.getId = function () {
|
||||
return this.getHash().reverse().toString('hex')
|
||||
}
|
||||
|
||||
Block.prototype.getUTCDate = function () {
|
||||
var date = new Date(0) // epoch
|
||||
date.setUTCSeconds(this.timestamp)
|
||||
|
||||
return date
|
||||
}
|
||||
|
||||
// TODO: buffer, offset compatibility
|
||||
Block.prototype.toBuffer = function (headersOnly) {
|
||||
var buffer = Buffer.allocUnsafe(this.byteLength(headersOnly))
|
||||
|
||||
var offset = 0
|
||||
function writeSlice (slice) {
|
||||
slice.copy(buffer, offset)
|
||||
offset += slice.length
|
||||
}
|
||||
|
||||
function writeInt32 (i) {
|
||||
buffer.writeInt32LE(i, offset)
|
||||
offset += 4
|
||||
}
|
||||
function writeUInt32 (i) {
|
||||
buffer.writeUInt32LE(i, offset)
|
||||
offset += 4
|
||||
}
|
||||
|
||||
writeInt32(this.version)
|
||||
writeSlice(this.prevHash)
|
||||
writeSlice(this.merkleRoot)
|
||||
writeUInt32(this.timestamp)
|
||||
writeUInt32(this.bits)
|
||||
writeUInt32(this.nonce)
|
||||
|
||||
if (headersOnly || !this.transactions) return buffer
|
||||
|
||||
varuint.encode(this.transactions.length, buffer, offset)
|
||||
offset += varuint.encode.bytes
|
||||
|
||||
this.transactions.forEach(function (tx) {
|
||||
var txSize = tx.byteLength() // TODO: extract from toBuffer?
|
||||
tx.toBuffer(buffer, offset)
|
||||
offset += txSize
|
||||
})
|
||||
|
||||
return buffer
|
||||
}
|
||||
|
||||
Block.prototype.toHex = function (headersOnly) {
|
||||
return this.toBuffer(headersOnly).toString('hex')
|
||||
}
|
||||
|
||||
Block.calculateTarget = function (bits) {
|
||||
var exponent = ((bits & 0xff000000) >> 24) - 3
|
||||
var mantissa = bits & 0x007fffff
|
||||
var target = Buffer.alloc(32, 0)
|
||||
target.writeUInt32BE(mantissa, 28 - exponent)
|
||||
return target
|
||||
}
|
||||
|
||||
Block.calculateMerkleRoot = function (transactions) {
|
||||
typeforce([{ getHash: types.Function }], transactions)
|
||||
if (transactions.length === 0) throw TypeError('Cannot compute merkle root for zero transactions')
|
||||
|
||||
var hashes = transactions.map(function (transaction) {
|
||||
return transaction.getHash()
|
||||
})
|
||||
|
||||
return fastMerkleRoot(hashes, bcrypto.hash256)
|
||||
}
|
||||
|
||||
Block.prototype.checkMerkleRoot = function () {
|
||||
if (!this.transactions) return false
|
||||
|
||||
var actualMerkleRoot = Block.calculateMerkleRoot(this.transactions)
|
||||
return this.merkleRoot.compare(actualMerkleRoot) === 0
|
||||
}
|
||||
|
||||
Block.prototype.checkProofOfWork = function () {
|
||||
var hash = this.getHash().reverse()
|
||||
var target = Block.calculateTarget(this.bits)
|
||||
|
||||
return hash.compare(target) <= 0
|
||||
}
|
||||
|
||||
module.exports = Block
|
||||
|
|
41
src/bufferutils.d.ts
vendored
41
src/bufferutils.d.ts
vendored
|
@ -1,41 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
import * as varuint from 'varuint-bitcoin';
|
||||
export { varuint };
|
||||
export declare function readUInt64LE(buffer: Buffer, offset: number): number;
|
||||
export declare function writeUInt64LE(buffer: Buffer, value: number, offset: number): number;
|
||||
export declare function reverseBuffer(buffer: Buffer): Buffer;
|
||||
export declare function cloneBuffer(buffer: Buffer): Buffer;
|
||||
/**
|
||||
* Helper class for serialization of bitcoin data types into a pre-allocated buffer.
|
||||
*/
|
||||
export declare class BufferWriter {
|
||||
buffer: Buffer;
|
||||
offset: number;
|
||||
static withCapacity(size: number): BufferWriter;
|
||||
constructor(buffer: Buffer, offset?: number);
|
||||
writeUInt8(i: number): void;
|
||||
writeInt32(i: number): void;
|
||||
writeUInt32(i: number): void;
|
||||
writeUInt64(i: number): void;
|
||||
writeVarInt(i: number): void;
|
||||
writeSlice(slice: Buffer): void;
|
||||
writeVarSlice(slice: Buffer): void;
|
||||
writeVector(vector: Buffer[]): void;
|
||||
end(): Buffer;
|
||||
}
|
||||
/**
|
||||
* Helper class for reading of bitcoin data types from a buffer.
|
||||
*/
|
||||
export declare class BufferReader {
|
||||
buffer: Buffer;
|
||||
offset: number;
|
||||
constructor(buffer: Buffer, offset?: number);
|
||||
readUInt8(): number;
|
||||
readInt32(): number;
|
||||
readUInt32(): number;
|
||||
readUInt64(): number;
|
||||
readVarInt(): number;
|
||||
readSlice(n: number): Buffer;
|
||||
readVarSlice(): Buffer;
|
||||
readVector(): Buffer[];
|
||||
}
|
|
@ -1,154 +1,56 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.BufferReader = exports.BufferWriter = exports.cloneBuffer = exports.reverseBuffer = exports.writeUInt64LE = exports.readUInt64LE = exports.varuint = void 0;
|
||||
const types = require('./types');
|
||||
const { typeforce } = types;
|
||||
const varuint = require('varuint-bitcoin');
|
||||
exports.varuint = varuint;
|
||||
var pushdata = require('pushdata-bitcoin')
|
||||
var varuint = require('varuint-bitcoin')
|
||||
|
||||
// https://github.com/feross/buffer/blob/master/index.js#L1127
|
||||
function verifuint(value, max) {
|
||||
if (typeof value !== 'number')
|
||||
throw new Error('cannot write a non-number as a number');
|
||||
if (value < 0)
|
||||
throw new Error('specified a negative value for writing an unsigned value');
|
||||
if (value > max) throw new Error('RangeError: value out of range');
|
||||
if (Math.floor(value) !== value)
|
||||
throw new Error('value has a fractional component');
|
||||
function verifuint (value, max) {
|
||||
if (typeof value !== 'number') throw new Error('cannot write a non-number as a number')
|
||||
if (value < 0) throw new Error('specified a negative value for writing an unsigned value')
|
||||
if (value > max) throw new Error('RangeError: value out of range')
|
||||
if (Math.floor(value) !== value) throw new Error('value has a fractional component')
|
||||
}
|
||||
function readUInt64LE(buffer, offset) {
|
||||
const a = buffer.readUInt32LE(offset);
|
||||
let b = buffer.readUInt32LE(offset + 4);
|
||||
b *= 0x100000000;
|
||||
verifuint(b + a, 0x001fffffffffffff);
|
||||
return b + a;
|
||||
|
||||
function readUInt64LE (buffer, offset) {
|
||||
var a = buffer.readUInt32LE(offset)
|
||||
var b = buffer.readUInt32LE(offset + 4)
|
||||
b *= 0x100000000
|
||||
|
||||
verifuint(b + a, 0x001fffffffffffff)
|
||||
|
||||
return b + a
|
||||
}
|
||||
exports.readUInt64LE = readUInt64LE;
|
||||
function writeUInt64LE(buffer, value, offset) {
|
||||
verifuint(value, 0x001fffffffffffff);
|
||||
buffer.writeInt32LE(value & -1, offset);
|
||||
buffer.writeUInt32LE(Math.floor(value / 0x100000000), offset + 4);
|
||||
return offset + 8;
|
||||
|
||||
function writeUInt64LE (buffer, value, offset) {
|
||||
verifuint(value, 0x001fffffffffffff)
|
||||
|
||||
buffer.writeInt32LE(value & -1, offset)
|
||||
buffer.writeUInt32LE(Math.floor(value / 0x100000000), offset + 4)
|
||||
return offset + 8
|
||||
}
|
||||
exports.writeUInt64LE = writeUInt64LE;
|
||||
function reverseBuffer(buffer) {
|
||||
if (buffer.length < 1) return buffer;
|
||||
let j = buffer.length - 1;
|
||||
let tmp = 0;
|
||||
for (let i = 0; i < buffer.length / 2; i++) {
|
||||
tmp = buffer[i];
|
||||
buffer[i] = buffer[j];
|
||||
buffer[j] = tmp;
|
||||
j--;
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
exports.reverseBuffer = reverseBuffer;
|
||||
function cloneBuffer(buffer) {
|
||||
const clone = Buffer.allocUnsafe(buffer.length);
|
||||
buffer.copy(clone);
|
||||
return clone;
|
||||
}
|
||||
exports.cloneBuffer = cloneBuffer;
|
||||
/**
|
||||
* Helper class for serialization of bitcoin data types into a pre-allocated buffer.
|
||||
*/
|
||||
class BufferWriter {
|
||||
constructor(buffer, offset = 0) {
|
||||
this.buffer = buffer;
|
||||
this.offset = offset;
|
||||
typeforce(types.tuple(types.Buffer, types.UInt32), [buffer, offset]);
|
||||
}
|
||||
static withCapacity(size) {
|
||||
return new BufferWriter(Buffer.alloc(size));
|
||||
}
|
||||
writeUInt8(i) {
|
||||
this.offset = this.buffer.writeUInt8(i, this.offset);
|
||||
}
|
||||
writeInt32(i) {
|
||||
this.offset = this.buffer.writeInt32LE(i, this.offset);
|
||||
}
|
||||
writeUInt32(i) {
|
||||
this.offset = this.buffer.writeUInt32LE(i, this.offset);
|
||||
}
|
||||
writeUInt64(i) {
|
||||
this.offset = writeUInt64LE(this.buffer, i, this.offset);
|
||||
}
|
||||
writeVarInt(i) {
|
||||
varuint.encode(i, this.buffer, this.offset);
|
||||
this.offset += varuint.encode.bytes;
|
||||
}
|
||||
writeSlice(slice) {
|
||||
if (this.buffer.length < this.offset + slice.length) {
|
||||
throw new Error('Cannot write slice out of bounds');
|
||||
}
|
||||
this.offset += slice.copy(this.buffer, this.offset);
|
||||
}
|
||||
writeVarSlice(slice) {
|
||||
this.writeVarInt(slice.length);
|
||||
this.writeSlice(slice);
|
||||
}
|
||||
writeVector(vector) {
|
||||
this.writeVarInt(vector.length);
|
||||
vector.forEach(buf => this.writeVarSlice(buf));
|
||||
}
|
||||
end() {
|
||||
if (this.buffer.length === this.offset) {
|
||||
return this.buffer;
|
||||
}
|
||||
throw new Error(`buffer size ${this.buffer.length}, offset ${this.offset}`);
|
||||
|
||||
// TODO: remove in 4.0.0?
|
||||
function readVarInt (buffer, offset) {
|
||||
var result = varuint.decode(buffer, offset)
|
||||
|
||||
return {
|
||||
number: result,
|
||||
size: varuint.decode.bytes
|
||||
}
|
||||
}
|
||||
exports.BufferWriter = BufferWriter;
|
||||
/**
|
||||
* Helper class for reading of bitcoin data types from a buffer.
|
||||
*/
|
||||
class BufferReader {
|
||||
constructor(buffer, offset = 0) {
|
||||
this.buffer = buffer;
|
||||
this.offset = offset;
|
||||
typeforce(types.tuple(types.Buffer, types.UInt32), [buffer, offset]);
|
||||
}
|
||||
readUInt8() {
|
||||
const result = this.buffer.readUInt8(this.offset);
|
||||
this.offset++;
|
||||
return result;
|
||||
}
|
||||
readInt32() {
|
||||
const result = this.buffer.readInt32LE(this.offset);
|
||||
this.offset += 4;
|
||||
return result;
|
||||
}
|
||||
readUInt32() {
|
||||
const result = this.buffer.readUInt32LE(this.offset);
|
||||
this.offset += 4;
|
||||
return result;
|
||||
}
|
||||
readUInt64() {
|
||||
const result = readUInt64LE(this.buffer, this.offset);
|
||||
this.offset += 8;
|
||||
return result;
|
||||
}
|
||||
readVarInt() {
|
||||
const vi = varuint.decode(this.buffer, this.offset);
|
||||
this.offset += varuint.decode.bytes;
|
||||
return vi;
|
||||
}
|
||||
readSlice(n) {
|
||||
if (this.buffer.length < this.offset + n) {
|
||||
throw new Error('Cannot read slice out of bounds');
|
||||
}
|
||||
const result = this.buffer.slice(this.offset, this.offset + n);
|
||||
this.offset += n;
|
||||
return result;
|
||||
}
|
||||
readVarSlice() {
|
||||
return this.readSlice(this.readVarInt());
|
||||
}
|
||||
readVector() {
|
||||
const count = this.readVarInt();
|
||||
const vector = [];
|
||||
for (let i = 0; i < count; i++) vector.push(this.readVarSlice());
|
||||
return vector;
|
||||
}
|
||||
|
||||
// TODO: remove in 4.0.0?
|
||||
function writeVarInt (buffer, number, offset) {
|
||||
varuint.encode(number, buffer, offset)
|
||||
return varuint.encode.bytes
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
pushDataSize: pushdata.encodingLength,
|
||||
readPushDataInt: pushdata.decode,
|
||||
readUInt64LE: readUInt64LE,
|
||||
readVarInt: readVarInt,
|
||||
varIntBuffer: varuint.encode,
|
||||
varIntSize: varuint.encodingLength,
|
||||
writePushDataInt: pushdata.encode,
|
||||
writeUInt64LE: writeUInt64LE,
|
||||
writeVarInt: writeVarInt
|
||||
}
|
||||
exports.BufferReader = BufferReader;
|
||||
|
|
10
src/crypto.d.ts
vendored
10
src/crypto.d.ts
vendored
|
@ -1,10 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
export declare function ripemd160(buffer: Buffer): Buffer;
|
||||
export declare function sha1(buffer: Buffer): Buffer;
|
||||
export declare function sha256(buffer: Buffer): Buffer;
|
||||
export declare function hash160(buffer: Buffer): Buffer;
|
||||
export declare function hash256(buffer: Buffer): Buffer;
|
||||
declare const TAGS: readonly ["BIP0340/challenge", "BIP0340/aux", "BIP0340/nonce", "TapLeaf", "TapBranch", "TapSighash", "TapTweak", "KeyAgg list", "KeyAgg coefficient"];
|
||||
export declare type TaggedHashPrefix = typeof TAGS[number];
|
||||
export declare function taggedHash(prefix: TaggedHashPrefix, data: Buffer): Buffer;
|
||||
export {};
|
|
@ -1,58 +1,29 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.taggedHash = exports.hash256 = exports.hash160 = exports.sha256 = exports.sha1 = exports.ripemd160 = void 0;
|
||||
const createHash = require('create-hash');
|
||||
function ripemd160(buffer) {
|
||||
try {
|
||||
return createHash('rmd160')
|
||||
.update(buffer)
|
||||
.digest();
|
||||
} catch (err) {
|
||||
return createHash('ripemd160')
|
||||
.update(buffer)
|
||||
.digest();
|
||||
}
|
||||
var createHash = require('create-hash')
|
||||
|
||||
function ripemd160 (buffer) {
|
||||
return createHash('rmd160').update(buffer).digest()
|
||||
}
|
||||
exports.ripemd160 = ripemd160;
|
||||
function sha1(buffer) {
|
||||
return createHash('sha1')
|
||||
.update(buffer)
|
||||
.digest();
|
||||
|
||||
function sha1 (buffer) {
|
||||
return createHash('sha1').update(buffer).digest()
|
||||
}
|
||||
exports.sha1 = sha1;
|
||||
function sha256(buffer) {
|
||||
return createHash('sha256')
|
||||
.update(buffer)
|
||||
.digest();
|
||||
|
||||
function sha256 (buffer) {
|
||||
return createHash('sha256').update(buffer).digest()
|
||||
}
|
||||
exports.sha256 = sha256;
|
||||
function hash160(buffer) {
|
||||
return ripemd160(sha256(buffer));
|
||||
|
||||
function hash160 (buffer) {
|
||||
return ripemd160(sha256(buffer))
|
||||
}
|
||||
exports.hash160 = hash160;
|
||||
function hash256(buffer) {
|
||||
return sha256(sha256(buffer));
|
||||
|
||||
function hash256 (buffer) {
|
||||
return sha256(sha256(buffer))
|
||||
}
|
||||
exports.hash256 = hash256;
|
||||
const TAGS = [
|
||||
'BIP0340/challenge',
|
||||
'BIP0340/aux',
|
||||
'BIP0340/nonce',
|
||||
'TapLeaf',
|
||||
'TapBranch',
|
||||
'TapSighash',
|
||||
'TapTweak',
|
||||
'KeyAgg list',
|
||||
'KeyAgg coefficient',
|
||||
];
|
||||
/** An object mapping tags to their tagged hash prefix of [SHA256(tag) | SHA256(tag)] */
|
||||
const TAGGED_HASH_PREFIXES = Object.fromEntries(
|
||||
TAGS.map(tag => {
|
||||
const tagHash = sha256(Buffer.from(tag));
|
||||
return [tag, Buffer.concat([tagHash, tagHash])];
|
||||
}),
|
||||
);
|
||||
function taggedHash(prefix, data) {
|
||||
return sha256(Buffer.concat([TAGGED_HASH_PREFIXES[prefix], data]));
|
||||
|
||||
module.exports = {
|
||||
hash160: hash160,
|
||||
hash256: hash256,
|
||||
ripemd160: ripemd160,
|
||||
sha1: sha1,
|
||||
sha256: sha256
|
||||
}
|
||||
exports.taggedHash = taggedHash;
|
||||
|
|
161
src/ecdsa.js
Normal file
161
src/ecdsa.js
Normal file
|
@ -0,0 +1,161 @@
|
|||
var Buffer = require('safe-buffer').Buffer
|
||||
var createHmac = require('create-hmac')
|
||||
var typeforce = require('typeforce')
|
||||
var types = require('./types')
|
||||
|
||||
var BigInteger = require('bigi')
|
||||
var ECSignature = require('./ecsignature')
|
||||
|
||||
var ZERO = Buffer.alloc(1, 0)
|
||||
var ONE = Buffer.alloc(1, 1)
|
||||
|
||||
var ecurve = require('ecurve')
|
||||
var secp256k1 = ecurve.getCurveByName('secp256k1')
|
||||
|
||||
// https://tools.ietf.org/html/rfc6979#section-3.2
|
||||
function deterministicGenerateK (hash, x, checkSig) {
|
||||
typeforce(types.tuple(
|
||||
types.Hash256bit,
|
||||
types.Buffer256bit,
|
||||
types.Function
|
||||
), arguments)
|
||||
|
||||
// Step A, ignored as hash already provided
|
||||
// Step B
|
||||
// Step C
|
||||
var k = Buffer.alloc(32, 0)
|
||||
var v = Buffer.alloc(32, 1)
|
||||
|
||||
// Step D
|
||||
k = createHmac('sha256', k)
|
||||
.update(v)
|
||||
.update(ZERO)
|
||||
.update(x)
|
||||
.update(hash)
|
||||
.digest()
|
||||
|
||||
// Step E
|
||||
v = createHmac('sha256', k).update(v).digest()
|
||||
|
||||
// Step F
|
||||
k = createHmac('sha256', k)
|
||||
.update(v)
|
||||
.update(ONE)
|
||||
.update(x)
|
||||
.update(hash)
|
||||
.digest()
|
||||
|
||||
// Step G
|
||||
v = createHmac('sha256', k).update(v).digest()
|
||||
|
||||
// Step H1/H2a, ignored as tlen === qlen (256 bit)
|
||||
// Step H2b
|
||||
v = createHmac('sha256', k).update(v).digest()
|
||||
|
||||
var T = BigInteger.fromBuffer(v)
|
||||
|
||||
// Step H3, repeat until T is within the interval [1, n - 1] and is suitable for ECDSA
|
||||
while (T.signum() <= 0 || T.compareTo(secp256k1.n) >= 0 || !checkSig(T)) {
|
||||
k = createHmac('sha256', k)
|
||||
.update(v)
|
||||
.update(ZERO)
|
||||
.digest()
|
||||
|
||||
v = createHmac('sha256', k).update(v).digest()
|
||||
|
||||
// Step H1/H2a, again, ignored as tlen === qlen (256 bit)
|
||||
// Step H2b again
|
||||
v = createHmac('sha256', k).update(v).digest()
|
||||
T = BigInteger.fromBuffer(v)
|
||||
}
|
||||
|
||||
return T
|
||||
}
|
||||
|
||||
var N_OVER_TWO = secp256k1.n.shiftRight(1)
|
||||
|
||||
function sign (hash, d) {
|
||||
typeforce(types.tuple(types.Hash256bit, types.BigInt), arguments)
|
||||
|
||||
var x = d.toBuffer(32)
|
||||
var e = BigInteger.fromBuffer(hash)
|
||||
var n = secp256k1.n
|
||||
var G = secp256k1.G
|
||||
|
||||
var r, s
|
||||
deterministicGenerateK(hash, x, function (k) {
|
||||
var Q = G.multiply(k)
|
||||
|
||||
if (secp256k1.isInfinity(Q)) return false
|
||||
|
||||
r = Q.affineX.mod(n)
|
||||
if (r.signum() === 0) return false
|
||||
|
||||
s = k.modInverse(n).multiply(e.add(d.multiply(r))).mod(n)
|
||||
if (s.signum() === 0) return false
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
// enforce low S values, see bip62: 'low s values in signatures'
|
||||
if (s.compareTo(N_OVER_TWO) > 0) {
|
||||
s = n.subtract(s)
|
||||
}
|
||||
|
||||
return new ECSignature(r, s)
|
||||
}
|
||||
|
||||
function verify (hash, signature, Q) {
|
||||
typeforce(types.tuple(
|
||||
types.Hash256bit,
|
||||
types.ECSignature,
|
||||
types.ECPoint
|
||||
), arguments)
|
||||
|
||||
var n = secp256k1.n
|
||||
var G = secp256k1.G
|
||||
|
||||
var r = signature.r
|
||||
var s = signature.s
|
||||
|
||||
// 1.4.1 Enforce r and s are both integers in the interval [1, n − 1]
|
||||
if (r.signum() <= 0 || r.compareTo(n) >= 0) return false
|
||||
if (s.signum() <= 0 || s.compareTo(n) >= 0) return false
|
||||
|
||||
// 1.4.2 H = Hash(M), already done by the user
|
||||
// 1.4.3 e = H
|
||||
var e = BigInteger.fromBuffer(hash)
|
||||
|
||||
// Compute s^-1
|
||||
var sInv = s.modInverse(n)
|
||||
|
||||
// 1.4.4 Compute u1 = es^−1 mod n
|
||||
// u2 = rs^−1 mod n
|
||||
var u1 = e.multiply(sInv).mod(n)
|
||||
var u2 = r.multiply(sInv).mod(n)
|
||||
|
||||
// 1.4.5 Compute R = (xR, yR)
|
||||
// R = u1G + u2Q
|
||||
var R = G.multiplyTwo(u1, Q, u2)
|
||||
|
||||
// 1.4.5 (cont.) Enforce R is not at infinity
|
||||
if (secp256k1.isInfinity(R)) return false
|
||||
|
||||
// 1.4.6 Convert the field element R.x to an integer
|
||||
var xR = R.affineX
|
||||
|
||||
// 1.4.7 Set v = xR mod n
|
||||
var v = xR.mod(n)
|
||||
|
||||
// 1.4.8 If v = r, output "valid", and if v != r, output "invalid"
|
||||
return v.equals(r)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
deterministicGenerateK: deterministicGenerateK,
|
||||
sign: sign,
|
||||
verify: verify,
|
||||
|
||||
// TODO: remove
|
||||
__curve: secp256k1
|
||||
}
|
131
src/ecpair.js
Normal file
131
src/ecpair.js
Normal file
|
@ -0,0 +1,131 @@
|
|||
var baddress = require('./address')
|
||||
var bcrypto = require('./crypto')
|
||||
var ecdsa = require('./ecdsa')
|
||||
var randomBytes = require('randombytes')
|
||||
var typeforce = require('typeforce')
|
||||
var types = require('./types')
|
||||
var wif = require('wif')
|
||||
|
||||
var NETWORKS = require('./networks')
|
||||
var BigInteger = require('bigi')
|
||||
|
||||
var ecurve = require('ecurve')
|
||||
var secp256k1 = ecdsa.__curve
|
||||
|
||||
function ECPair (d, Q, options) {
|
||||
if (options) {
|
||||
typeforce({
|
||||
compressed: types.maybe(types.Boolean),
|
||||
network: types.maybe(types.Network)
|
||||
}, options)
|
||||
}
|
||||
|
||||
options = options || {}
|
||||
|
||||
if (d) {
|
||||
if (d.signum() <= 0) throw new Error('Private key must be greater than 0')
|
||||
if (d.compareTo(secp256k1.n) >= 0) throw new Error('Private key must be less than the curve order')
|
||||
if (Q) throw new TypeError('Unexpected publicKey parameter')
|
||||
|
||||
this.d = d
|
||||
} else {
|
||||
typeforce(types.ECPoint, Q)
|
||||
|
||||
this.__Q = Q
|
||||
}
|
||||
|
||||
this.compressed = options.compressed === undefined ? true : options.compressed
|
||||
this.network = options.network || NETWORKS.bitcoin
|
||||
}
|
||||
|
||||
Object.defineProperty(ECPair.prototype, 'Q', {
|
||||
get: function () {
|
||||
if (!this.__Q && this.d) {
|
||||
this.__Q = secp256k1.G.multiply(this.d)
|
||||
}
|
||||
|
||||
return this.__Q
|
||||
}
|
||||
})
|
||||
|
||||
ECPair.fromPublicKeyBuffer = function (buffer, network) {
|
||||
var Q = ecurve.Point.decodeFrom(secp256k1, buffer)
|
||||
|
||||
return new ECPair(null, Q, {
|
||||
compressed: Q.compressed,
|
||||
network: network
|
||||
})
|
||||
}
|
||||
|
||||
ECPair.fromWIF = function (string, network) {
|
||||
var decoded = wif.decode(string)
|
||||
var version = decoded.version
|
||||
|
||||
// list of networks?
|
||||
if (types.Array(network)) {
|
||||
network = network.filter(function (x) {
|
||||
return version === x.wif
|
||||
}).pop()
|
||||
|
||||
if (!network) throw new Error('Unknown network version')
|
||||
|
||||
// otherwise, assume a network object (or default to bitcoin)
|
||||
} else {
|
||||
network = network || NETWORKS.bitcoin
|
||||
|
||||
if (version !== network.wif) throw new Error('Invalid network version')
|
||||
}
|
||||
|
||||
var d = BigInteger.fromBuffer(decoded.privateKey)
|
||||
|
||||
return new ECPair(d, null, {
|
||||
compressed: decoded.compressed,
|
||||
network: network
|
||||
})
|
||||
}
|
||||
|
||||
ECPair.makeRandom = function (options) {
|
||||
options = options || {}
|
||||
|
||||
var rng = options.rng || randomBytes
|
||||
|
||||
var d
|
||||
do {
|
||||
var buffer = rng(32)
|
||||
typeforce(types.Buffer256bit, buffer)
|
||||
|
||||
d = BigInteger.fromBuffer(buffer)
|
||||
} while (d.signum() <= 0 || d.compareTo(secp256k1.n) >= 0)
|
||||
|
||||
return new ECPair(d, null, options)
|
||||
}
|
||||
|
||||
ECPair.prototype.getAddress = function () {
|
||||
return baddress.toBase58Check(bcrypto.hash160(this.getPublicKeyBuffer()), this.getNetwork().pubKeyHash)
|
||||
}
|
||||
|
||||
ECPair.prototype.getNetwork = function () {
|
||||
return this.network
|
||||
}
|
||||
|
||||
ECPair.prototype.getPublicKeyBuffer = function () {
|
||||
return this.Q.getEncoded(this.compressed)
|
||||
}
|
||||
|
||||
ECPair.prototype.sign = function (hash) {
|
||||
if (!this.d) throw new Error('Missing private key')
|
||||
|
||||
return ecdsa.sign(hash, this.d)
|
||||
}
|
||||
|
||||
ECPair.prototype.toWIF = function () {
|
||||
if (!this.d) throw new Error('Missing private key')
|
||||
|
||||
return wif.encode(this.network.wif, this.d.toBuffer(32), this.compressed)
|
||||
}
|
||||
|
||||
ECPair.prototype.verify = function (hash, signature) {
|
||||
return ecdsa.verify(hash, signature, this.Q)
|
||||
}
|
||||
|
||||
module.exports = ECPair
|
97
src/ecsignature.js
Normal file
97
src/ecsignature.js
Normal file
|
@ -0,0 +1,97 @@
|
|||
var bip66 = require('bip66')
|
||||
var typeforce = require('typeforce')
|
||||
var types = require('./types')
|
||||
|
||||
var BigInteger = require('bigi')
|
||||
|
||||
function ECSignature (r, s) {
|
||||
typeforce(types.tuple(types.BigInt, types.BigInt), arguments)
|
||||
|
||||
this.r = r
|
||||
this.s = s
|
||||
}
|
||||
|
||||
ECSignature.parseCompact = function (buffer) {
|
||||
typeforce(types.BufferN(65), buffer)
|
||||
|
||||
var flagByte = buffer.readUInt8(0) - 27
|
||||
if (flagByte !== (flagByte & 7)) throw new Error('Invalid signature parameter')
|
||||
|
||||
var compressed = !!(flagByte & 4)
|
||||
var recoveryParam = flagByte & 3
|
||||
var signature = ECSignature.fromRSBuffer(buffer.slice(1))
|
||||
|
||||
return {
|
||||
compressed: compressed,
|
||||
i: recoveryParam,
|
||||
signature: signature
|
||||
}
|
||||
}
|
||||
|
||||
ECSignature.fromRSBuffer = function (buffer) {
|
||||
typeforce(types.BufferN(64), buffer)
|
||||
|
||||
var r = BigInteger.fromBuffer(buffer.slice(0, 32))
|
||||
var s = BigInteger.fromBuffer(buffer.slice(32, 64))
|
||||
return new ECSignature(r, s)
|
||||
}
|
||||
|
||||
ECSignature.fromDER = function (buffer) {
|
||||
var decode = bip66.decode(buffer)
|
||||
var r = BigInteger.fromDERInteger(decode.r)
|
||||
var s = BigInteger.fromDERInteger(decode.s)
|
||||
|
||||
return new ECSignature(r, s)
|
||||
}
|
||||
|
||||
// BIP62: 1 byte hashType flag (only 0x01, 0x02, 0x03, 0x81, 0x82 and 0x83 are allowed)
|
||||
ECSignature.parseScriptSignature = function (buffer) {
|
||||
var hashType = buffer.readUInt8(buffer.length - 1)
|
||||
var hashTypeMod = hashType & ~0x80
|
||||
|
||||
if (hashTypeMod <= 0x00 || hashTypeMod >= 0x04) throw new Error('Invalid hashType ' + hashType)
|
||||
|
||||
return {
|
||||
signature: ECSignature.fromDER(buffer.slice(0, -1)),
|
||||
hashType: hashType
|
||||
}
|
||||
}
|
||||
|
||||
ECSignature.prototype.toCompact = function (i, compressed) {
|
||||
if (compressed) {
|
||||
i += 4
|
||||
}
|
||||
|
||||
i += 27
|
||||
|
||||
var buffer = Buffer.alloc(65)
|
||||
buffer.writeUInt8(i, 0)
|
||||
this.toRSBuffer(buffer, 1)
|
||||
return buffer
|
||||
}
|
||||
|
||||
ECSignature.prototype.toDER = function () {
|
||||
var r = Buffer.from(this.r.toDERInteger())
|
||||
var s = Buffer.from(this.s.toDERInteger())
|
||||
|
||||
return bip66.encode(r, s)
|
||||
}
|
||||
|
||||
ECSignature.prototype.toRSBuffer = function (buffer, offset) {
|
||||
buffer = buffer || Buffer.alloc(64)
|
||||
this.r.toBuffer(32).copy(buffer, offset)
|
||||
this.s.toBuffer(32).copy(buffer, offset + 32)
|
||||
return buffer
|
||||
}
|
||||
|
||||
ECSignature.prototype.toScriptSignature = function (hashType) {
|
||||
var hashTypeMod = hashType & ~0x80
|
||||
if (hashTypeMod <= 0 || hashTypeMod >= 4) throw new Error('Invalid hashType ' + hashType)
|
||||
|
||||
var hashTypeBuffer = Buffer.alloc(1)
|
||||
hashTypeBuffer.writeUInt8(hashType, 0)
|
||||
|
||||
return Buffer.concat([this.toDER(), hashTypeBuffer])
|
||||
}
|
||||
|
||||
module.exports = ECSignature
|
346
src/hdnode.js
Normal file
346
src/hdnode.js
Normal file
|
@ -0,0 +1,346 @@
|
|||
var Buffer = require('safe-buffer').Buffer
|
||||
var base58check = require('bs58check')
|
||||
var bcrypto = require('./crypto')
|
||||
var baddress = require('./address')
|
||||
var createHmac = require('create-hmac')
|
||||
var typeforce = require('typeforce')
|
||||
var types = require('./types')
|
||||
var NETWORKS = require('./networks')
|
||||
|
||||
var BigInteger = require('bigi')
|
||||
var ECPair = require('./ecpair')
|
||||
|
||||
// var KeyToScript = require('./keytoscript')
|
||||
var ecurve = require('ecurve')
|
||||
var curve = ecurve.getCurveByName('secp256k1')
|
||||
|
||||
function HDNode (keyPair, chainCode, prefix) {
|
||||
typeforce(types.tuple('ECPair', types.Buffer256bit), arguments)
|
||||
if (!prefix) {
|
||||
prefix = keyPair.network.bip32
|
||||
}
|
||||
if (!keyPair.compressed) throw new TypeError('BIP32 only allows compressed keyPairs')
|
||||
|
||||
this.keyPair = keyPair
|
||||
this.chainCode = chainCode
|
||||
this.depth = 0
|
||||
this.index = 0
|
||||
this.parentFingerprint = 0x00000000
|
||||
this.prefix = prefix
|
||||
}
|
||||
|
||||
HDNode.HIGHEST_BIT = 0x80000000
|
||||
HDNode.LENGTH = 78
|
||||
HDNode.MASTER_SECRET = Buffer.from('Bitcoin seed', 'utf8')
|
||||
|
||||
HDNode.fromSeedBuffer = function (seed, network, prefix) {
|
||||
typeforce(types.tuple(types.Buffer, types.maybe(types.Network)), arguments)
|
||||
|
||||
if (seed.length < 16) throw new TypeError('Seed should be at least 128 bits')
|
||||
if (seed.length > 64) throw new TypeError('Seed should be at most 512 bits')
|
||||
|
||||
var I = createHmac('sha512', HDNode.MASTER_SECRET).update(seed).digest()
|
||||
var IL = I.slice(0, 32)
|
||||
var IR = I.slice(32)
|
||||
|
||||
// In case IL is 0 or >= n, the master key is invalid
|
||||
// This is handled by the ECPair constructor
|
||||
var pIL = BigInteger.fromBuffer(IL)
|
||||
var keyPair = new ECPair(pIL, null, {
|
||||
network: network
|
||||
})
|
||||
|
||||
return new HDNode(keyPair, IR, prefix)
|
||||
}
|
||||
|
||||
HDNode.fromSeedHex = function (hex, network, prefix) {
|
||||
return HDNode.fromSeedBuffer(Buffer.from(hex, 'hex'), network, prefix)
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} string
|
||||
* @param {Array<network>|network} networks
|
||||
* @param {Array<prefix>} prefixes - only used if networks is object/undefined(so bitcoin).
|
||||
* @returns {HDNode}
|
||||
*/
|
||||
HDNode.fromBase58 = function (string, networks, prefixes) {
|
||||
var buffer = base58check.decode(string)
|
||||
if (buffer.length !== 78) throw new Error('Invalid buffer length')
|
||||
|
||||
// 4 bytes: version bytes
|
||||
var version = buffer.readUInt32BE(0)
|
||||
var network
|
||||
|
||||
// list of networks?
|
||||
var prefix
|
||||
if (Array.isArray(networks)) {
|
||||
network = networks.filter(function (x) {
|
||||
return version === x.bip32.private ||
|
||||
version === x.bip32.public
|
||||
}).pop()
|
||||
|
||||
if (!network) throw new Error('Unknown network version')
|
||||
|
||||
// we found a network by it's bip32 prefixes, use that
|
||||
prefix = network.bip32
|
||||
|
||||
// otherwise, assume a network object (or default to bitcoin)
|
||||
} else {
|
||||
network = networks || NETWORKS.bitcoin
|
||||
if (prefixes) {
|
||||
prefix = prefixes.filter(function (x) {
|
||||
return version === x.private ||
|
||||
version === x.public
|
||||
}).pop()
|
||||
} else {
|
||||
// no special prefixes to consider, use networks bip32 prefix
|
||||
prefix = network.bip32
|
||||
}
|
||||
}
|
||||
|
||||
// sanity check the version against the prefix
|
||||
if (version !== prefix.private &&
|
||||
version !== prefix.public) throw new Error('Invalid network version')
|
||||
|
||||
// 1 byte: depth: 0x00 for master nodes, 0x01 for level-1 descendants, ...
|
||||
var depth = buffer[4]
|
||||
|
||||
// 4 bytes: the fingerprint of the parent's key (0x00000000 if master key)
|
||||
var parentFingerprint = buffer.readUInt32BE(5)
|
||||
if (depth === 0) {
|
||||
if (parentFingerprint !== 0x00000000) throw new Error('Invalid parent fingerprint')
|
||||
}
|
||||
|
||||
// 4 bytes: child number. This is the number i in xi = xpar/i, with xi the key being serialized.
|
||||
// This is encoded in MSB order. (0x00000000 if master key)
|
||||
var index = buffer.readUInt32BE(9)
|
||||
if (depth === 0 && index !== 0) throw new Error('Invalid index')
|
||||
|
||||
// 32 bytes: the chain code
|
||||
var chainCode = buffer.slice(13, 45)
|
||||
var keyPair
|
||||
|
||||
// 33 bytes: private key data (0x00 + k)
|
||||
if (version === network.bip32.private) {
|
||||
if (buffer.readUInt8(45) !== 0x00) throw new Error('Invalid private key')
|
||||
|
||||
var d = BigInteger.fromBuffer(buffer.slice(46, 78))
|
||||
keyPair = new ECPair(d, null, { network: network })
|
||||
|
||||
// 33 bytes: public key data (0x02 + X or 0x03 + X)
|
||||
} else {
|
||||
var Q = ecurve.Point.decodeFrom(curve, buffer.slice(45, 78))
|
||||
// Q.compressed is assumed, if somehow this assumption is broken, `new HDNode` will throw
|
||||
|
||||
// Verify that the X coordinate in the public point corresponds to a point on the curve.
|
||||
// If not, the extended public key is invalid.
|
||||
curve.validate(Q)
|
||||
|
||||
keyPair = new ECPair(null, Q, { network: network })
|
||||
}
|
||||
|
||||
var hd = new HDNode(keyPair, chainCode, prefix)
|
||||
hd.depth = depth
|
||||
hd.index = index
|
||||
hd.parentFingerprint = parentFingerprint
|
||||
|
||||
return hd
|
||||
}
|
||||
|
||||
HDNode.prototype.getScriptData = function () {
|
||||
return this.prefix.scriptFactory.convert(this.keyPair)
|
||||
}
|
||||
|
||||
HDNode.prototype.getAddress = function () {
|
||||
var scriptData = this.getScriptData()
|
||||
return baddress.fromOutputScript(scriptData.scriptPubKey, this.keyPair.network)
|
||||
}
|
||||
|
||||
HDNode.prototype.getIdentifier = function () {
|
||||
return bcrypto.hash160(this.keyPair.getPublicKeyBuffer())
|
||||
}
|
||||
|
||||
HDNode.prototype.getFingerprint = function () {
|
||||
return this.getIdentifier().slice(0, 4)
|
||||
}
|
||||
|
||||
HDNode.prototype.getNetwork = function () {
|
||||
return this.keyPair.getNetwork()
|
||||
}
|
||||
|
||||
HDNode.prototype.getPublicKeyBuffer = function () {
|
||||
return this.keyPair.getPublicKeyBuffer()
|
||||
}
|
||||
|
||||
HDNode.prototype.neutered = function () {
|
||||
var neuteredKeyPair = new ECPair(null, this.keyPair.Q, {
|
||||
network: this.keyPair.network
|
||||
})
|
||||
|
||||
var neutered = new HDNode(neuteredKeyPair, this.chainCode, this.prefix)
|
||||
neutered.depth = this.depth
|
||||
neutered.index = this.index
|
||||
neutered.parentFingerprint = this.parentFingerprint
|
||||
|
||||
return neutered
|
||||
}
|
||||
|
||||
HDNode.prototype.sign = function (hash) {
|
||||
return this.keyPair.sign(hash)
|
||||
}
|
||||
|
||||
HDNode.prototype.verify = function (hash, signature) {
|
||||
return this.keyPair.verify(hash, signature)
|
||||
}
|
||||
|
||||
HDNode.prototype.toBase58 = function (__isPrivate) {
|
||||
if (__isPrivate !== undefined) throw new TypeError('Unsupported argument in 2.0.0')
|
||||
|
||||
// Version
|
||||
var version = (!this.isNeutered()) ? this.prefix.private : this.prefix.public
|
||||
var buffer = Buffer.allocUnsafe(78)
|
||||
|
||||
// 4 bytes: version bytes
|
||||
buffer.writeUInt32BE(version, 0)
|
||||
|
||||
// 1 byte: depth: 0x00 for master nodes, 0x01 for level-1 descendants, ....
|
||||
buffer.writeUInt8(this.depth, 4)
|
||||
|
||||
// 4 bytes: the fingerprint of the parent's key (0x00000000 if master key)
|
||||
buffer.writeUInt32BE(this.parentFingerprint, 5)
|
||||
|
||||
// 4 bytes: child number. This is the number i in xi = xpar/i, with xi the key being serialized.
|
||||
// This is encoded in big endian. (0x00000000 if master key)
|
||||
buffer.writeUInt32BE(this.index, 9)
|
||||
|
||||
// 32 bytes: the chain code
|
||||
this.chainCode.copy(buffer, 13)
|
||||
|
||||
// 33 bytes: the public key or private key data
|
||||
if (!this.isNeutered()) {
|
||||
// 0x00 + k for private keys
|
||||
buffer.writeUInt8(0, 45)
|
||||
this.keyPair.d.toBuffer(32).copy(buffer, 46)
|
||||
|
||||
// 33 bytes: the public key
|
||||
} else {
|
||||
// X9.62 encoding for public keys
|
||||
this.keyPair.getPublicKeyBuffer().copy(buffer, 45)
|
||||
}
|
||||
|
||||
return base58check.encode(buffer)
|
||||
}
|
||||
|
||||
// https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#child-key-derivation-ckd-functions
|
||||
HDNode.prototype.derive = function (index) {
|
||||
typeforce(types.UInt32, index)
|
||||
|
||||
var isHardened = index >= HDNode.HIGHEST_BIT
|
||||
var data = Buffer.allocUnsafe(37)
|
||||
|
||||
// Hardened child
|
||||
if (isHardened) {
|
||||
if (this.isNeutered()) throw new TypeError('Could not derive hardened child key')
|
||||
|
||||
// data = 0x00 || ser256(kpar) || ser32(index)
|
||||
data[0] = 0x00
|
||||
this.keyPair.d.toBuffer(32).copy(data, 1)
|
||||
data.writeUInt32BE(index, 33)
|
||||
|
||||
// Normal child
|
||||
} else {
|
||||
// data = serP(point(kpar)) || ser32(index)
|
||||
// = serP(Kpar) || ser32(index)
|
||||
this.keyPair.getPublicKeyBuffer().copy(data, 0)
|
||||
data.writeUInt32BE(index, 33)
|
||||
}
|
||||
|
||||
var I = createHmac('sha512', this.chainCode).update(data).digest()
|
||||
var IL = I.slice(0, 32)
|
||||
var IR = I.slice(32)
|
||||
|
||||
var pIL = BigInteger.fromBuffer(IL)
|
||||
|
||||
// In case parse256(IL) >= n, proceed with the next value for i
|
||||
if (pIL.compareTo(curve.n) >= 0) {
|
||||
return this.derive(index + 1)
|
||||
}
|
||||
|
||||
// Private parent key -> private child key
|
||||
var derivedKeyPair
|
||||
if (!this.isNeutered()) {
|
||||
// ki = parse256(IL) + kpar (mod n)
|
||||
var ki = pIL.add(this.keyPair.d).mod(curve.n)
|
||||
|
||||
// In case ki == 0, proceed with the next value for i
|
||||
if (ki.signum() === 0) {
|
||||
return this.derive(index + 1)
|
||||
}
|
||||
|
||||
derivedKeyPair = new ECPair(ki, null, {
|
||||
network: this.keyPair.network
|
||||
})
|
||||
|
||||
// Public parent key -> public child key
|
||||
} else {
|
||||
// Ki = point(parse256(IL)) + Kpar
|
||||
// = G*IL + Kpar
|
||||
var Ki = curve.G.multiply(pIL).add(this.keyPair.Q)
|
||||
|
||||
// In case Ki is the point at infinity, proceed with the next value for i
|
||||
if (curve.isInfinity(Ki)) {
|
||||
return this.derive(index + 1)
|
||||
}
|
||||
|
||||
derivedKeyPair = new ECPair(null, Ki, {
|
||||
network: this.keyPair.network
|
||||
})
|
||||
}
|
||||
|
||||
var hd = new HDNode(derivedKeyPair, IR, this.prefix)
|
||||
hd.depth = this.depth + 1
|
||||
hd.index = index
|
||||
hd.parentFingerprint = this.getFingerprint().readUInt32BE(0)
|
||||
|
||||
return hd
|
||||
}
|
||||
|
||||
HDNode.prototype.deriveHardened = function (index) {
|
||||
typeforce(types.UInt31, index)
|
||||
|
||||
// Only derives hardened private keys by default
|
||||
return this.derive(index + HDNode.HIGHEST_BIT)
|
||||
}
|
||||
|
||||
// Private === not neutered
|
||||
// Public === neutered
|
||||
HDNode.prototype.isNeutered = function () {
|
||||
return !(this.keyPair.d)
|
||||
}
|
||||
|
||||
HDNode.prototype.derivePath = function (path) {
|
||||
typeforce(types.BIP32Path, path)
|
||||
|
||||
var splitPath = path.split('/')
|
||||
if (splitPath[0] === 'm') {
|
||||
if (this.parentFingerprint) {
|
||||
throw new Error('Not a master node')
|
||||
}
|
||||
|
||||
splitPath = splitPath.slice(1)
|
||||
}
|
||||
|
||||
return splitPath.reduce(function (prevHd, indexStr) {
|
||||
var index
|
||||
if (indexStr.slice(-1) === "'") {
|
||||
index = parseInt(indexStr.slice(0, -1), 10)
|
||||
return prevHd.deriveHardened(index)
|
||||
} else {
|
||||
index = parseInt(indexStr, 10)
|
||||
return prevHd.derive(index)
|
||||
}
|
||||
}, this)
|
||||
}
|
||||
|
||||
module.exports = HDNode
|
14
src/index.d.ts
vendored
14
src/index.d.ts
vendored
|
@ -1,14 +0,0 @@
|
|||
import * as address from './address';
|
||||
import * as crypto from './crypto';
|
||||
import * as networks from './networks';
|
||||
import * as payments from './payments';
|
||||
import * as script from './script';
|
||||
export { address, crypto, networks, payments, script };
|
||||
export { Block } from './block';
|
||||
export { TaggedHashPrefix } from './crypto';
|
||||
export { Psbt, PsbtTxInput, PsbtTxOutput, Signer, SignerAsync, HDSigner, HDSignerAsync, } from './psbt';
|
||||
export { OPS as opcodes } from './ops';
|
||||
export { Transaction } from './transaction';
|
||||
export { Network } from './networks';
|
||||
export { Payment, PaymentCreator, PaymentOpts, Stack, StackElement, } from './payments';
|
||||
export { Input as TxInput, Output as TxOutput } from './transaction';
|
64
src/index.js
64
src/index.js
|
@ -1,41 +1,23 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.Transaction = exports.opcodes = exports.Psbt = exports.Block = exports.script = exports.payments = exports.networks = exports.crypto = exports.address = void 0;
|
||||
const address = require('./address');
|
||||
exports.address = address;
|
||||
const crypto = require('./crypto');
|
||||
exports.crypto = crypto;
|
||||
const networks = require('./networks');
|
||||
exports.networks = networks;
|
||||
const payments = require('./payments');
|
||||
exports.payments = payments;
|
||||
const script = require('./script');
|
||||
exports.script = script;
|
||||
var block_1 = require('./block');
|
||||
Object.defineProperty(exports, 'Block', {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return block_1.Block;
|
||||
},
|
||||
});
|
||||
var psbt_1 = require('./psbt');
|
||||
Object.defineProperty(exports, 'Psbt', {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return psbt_1.Psbt;
|
||||
},
|
||||
});
|
||||
var ops_1 = require('./ops');
|
||||
Object.defineProperty(exports, 'opcodes', {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return ops_1.OPS;
|
||||
},
|
||||
});
|
||||
var transaction_1 = require('./transaction');
|
||||
Object.defineProperty(exports, 'Transaction', {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return transaction_1.Transaction;
|
||||
},
|
||||
});
|
||||
var script = require('./script')
|
||||
|
||||
var templates = require('./templates')
|
||||
for (var key in templates) {
|
||||
script[key] = templates[key]
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
bufferutils: require('./bufferutils'), // TODO: remove in 4.0.0
|
||||
|
||||
Block: require('./block'),
|
||||
ECPair: require('./ecpair'),
|
||||
ECSignature: require('./ecsignature'),
|
||||
HDNode: require('./hdnode'),
|
||||
Transaction: require('./transaction'),
|
||||
TransactionBuilder: require('./transaction_builder'),
|
||||
|
||||
address: require('./address'),
|
||||
crypto: require('./crypto'),
|
||||
networks: require('./networks'),
|
||||
opcodes: require('bitcoin-ops'),
|
||||
script: script
|
||||
}
|
||||
|
|
124
src/keytoscript.js
Normal file
124
src/keytoscript.js
Normal file
|
@ -0,0 +1,124 @@
|
|||
var bcrypto = require('./crypto')
|
||||
var btemplates = require('./templates')
|
||||
|
||||
function checkAllowedP2sh (keyFactory) {
|
||||
if (!(keyFactory instanceof P2pkhFactory ||
|
||||
keyFactory instanceof P2wpkhFactory ||
|
||||
keyFactory instanceof P2pkFactory
|
||||
)) {
|
||||
throw new Error('Unsupported script factory for P2SH')
|
||||
}
|
||||
}
|
||||
|
||||
function checkAllowedP2wsh (keyFactory) {
|
||||
if (!(keyFactory instanceof P2pkhFactory ||
|
||||
keyFactory instanceof P2pkFactory
|
||||
)) {
|
||||
throw new Error('Unsupported script factory for P2SH')
|
||||
}
|
||||
}
|
||||
|
||||
var P2pkFactory = function () {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {bitcoin.ECPair} key
|
||||
*/
|
||||
P2pkFactory.prototype.convert = function (key) {
|
||||
return {
|
||||
scriptPubKey: btemplates.pubKey.output.encode(key.getPublicKeyBuffer()),
|
||||
signData: {}
|
||||
}
|
||||
}
|
||||
|
||||
var P2pkhFactory = function () {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {bitcoin.ECPair} key
|
||||
*/
|
||||
P2pkhFactory.prototype.convert = function (key) {
|
||||
var hash160 = bcrypto.hash160(key.getPublicKeyBuffer())
|
||||
return {
|
||||
scriptPubKey: btemplates.pubKeyHash.output.encode(hash160),
|
||||
signData: {}
|
||||
}
|
||||
}
|
||||
|
||||
var P2wpkhFactory = function () {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {bitcoin.ECPair} key
|
||||
*/
|
||||
P2wpkhFactory.prototype.convert = function (key) {
|
||||
var hash160 = bcrypto.hash160(key.getPublicKeyBuffer())
|
||||
return {
|
||||
scriptPubKey: btemplates.witnessPubKeyHash.output.encode(hash160),
|
||||
signData: {}
|
||||
}
|
||||
}
|
||||
|
||||
var P2shFactory = function (keyFactory) {
|
||||
checkAllowedP2sh(keyFactory)
|
||||
this.factory = keyFactory
|
||||
}
|
||||
|
||||
P2shFactory.prototype.convert = function (key) {
|
||||
var detail = this.factory.convert(key)
|
||||
var hash160 = bcrypto.hash160(detail.scriptPubKey)
|
||||
return {
|
||||
scriptPubKey: btemplates.scriptHash.output.encode(hash160),
|
||||
signData: {
|
||||
redeemScript: detail.scriptPubKey
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var P2wshFactory = function (keyFactory) {
|
||||
checkAllowedP2wsh(keyFactory)
|
||||
this.factory = keyFactory
|
||||
}
|
||||
|
||||
P2wshFactory.prototype.convert = function (key) {
|
||||
var detail = this.factory.convert(key)
|
||||
var hash160 = bcrypto.hash160(detail.scriptPubKey)
|
||||
return {
|
||||
scriptPubKey: btemplates.scriptHash.output.encode(hash160),
|
||||
signData: {
|
||||
redeemScript: detail.scriptPubKey
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var P2shP2wshFactory = function (keyFactory) {
|
||||
checkAllowedP2wsh(keyFactory)
|
||||
this.factory = keyFactory
|
||||
}
|
||||
|
||||
P2shP2wshFactory.prototype.convert = function (key) {
|
||||
var detail = this.factory.convert(key)
|
||||
var sha256 = bcrypto.sha256(detail.scriptPubKey)
|
||||
var wp = btemplates.witnessScriptHash.output.encode(sha256)
|
||||
var hash160 = bcrypto.hash160(wp)
|
||||
var spk = btemplates.scriptHash.output.encode(hash160)
|
||||
return {
|
||||
scriptPubKey: spk,
|
||||
signData: {
|
||||
redeemScript: wp,
|
||||
witnessScript: detail.scriptPubKey
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
P2pkhFactory: P2pkhFactory,
|
||||
P2wpkhFactory: P2wpkhFactory,
|
||||
P2pkFactory: P2pkFactory,
|
||||
P2shFactory: P2shFactory,
|
||||
P2wshFactory: P2wshFactory,
|
||||
P2shP2wshFactory: P2shP2wshFactory
|
||||
}
|
2
src/merkle.d.ts
vendored
2
src/merkle.d.ts
vendored
|
@ -1,2 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
export declare function fastMerkleRoot(values: Buffer[], digestFn: (b: Buffer) => Buffer): Buffer;
|
|
@ -1,22 +0,0 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.fastMerkleRoot = void 0;
|
||||
function fastMerkleRoot(values, digestFn) {
|
||||
if (!Array.isArray(values)) throw TypeError('Expected values Array');
|
||||
if (typeof digestFn !== 'function')
|
||||
throw TypeError('Expected digest Function');
|
||||
let length = values.length;
|
||||
const results = values.concat();
|
||||
while (length > 1) {
|
||||
let j = 0;
|
||||
for (let i = 0; i < length; i += 2, ++j) {
|
||||
const left = results[i];
|
||||
const right = i + 1 === length ? left : results[i + 1];
|
||||
const data = Buffer.concat([left, right]);
|
||||
results[j] = digestFn(data);
|
||||
}
|
||||
length = j;
|
||||
}
|
||||
return results[0];
|
||||
}
|
||||
exports.fastMerkleRoot = fastMerkleRoot;
|
16
src/networks.d.ts
vendored
16
src/networks.d.ts
vendored
|
@ -1,16 +0,0 @@
|
|||
export interface Network {
|
||||
messagePrefix: string;
|
||||
bech32: string;
|
||||
bip32: Bip32;
|
||||
pubKeyHash: number;
|
||||
scriptHash: number;
|
||||
wif: number;
|
||||
}
|
||||
interface Bip32 {
|
||||
public: number;
|
||||
private: number;
|
||||
}
|
||||
export declare const bitcoin: Network;
|
||||
export declare const regtest: Network;
|
||||
export declare const testnet: Network;
|
||||
export {};
|
|
@ -1,36 +1,54 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.testnet = exports.regtest = exports.bitcoin = void 0;
|
||||
exports.bitcoin = {
|
||||
messagePrefix: '\x18Bitcoin Signed Message:\n',
|
||||
bech32: 'bc',
|
||||
bip32: {
|
||||
public: 0x0488b21e,
|
||||
private: 0x0488ade4,
|
||||
var KeyToScript = require('./keytoscript')
|
||||
// https://en.bitcoin.it/wiki/List_of_address_prefixes
|
||||
// Dogecoin BIP32 is a proposed standard: https://bitcointalk.org/index.php?topic=409731
|
||||
|
||||
var p2pkh = new KeyToScript.P2pkhFactory()
|
||||
var p2wpkh = new KeyToScript.P2wpkhFactory()
|
||||
|
||||
module.exports = {
|
||||
bitcoin: {
|
||||
messagePrefix: '\x18Bitcoin Signed Message:\n',
|
||||
bech32: 'bc',
|
||||
bip32: {
|
||||
public: 0x0488b21e,
|
||||
private: 0x0488ade4,
|
||||
scriptFactory: p2pkh
|
||||
},
|
||||
bip49: {
|
||||
private: 0x049d7878,
|
||||
public: 0x049d7cb2,
|
||||
scriptFactory: new KeyToScript.P2shFactory(p2wpkh)
|
||||
},
|
||||
bip84: {
|
||||
private: 0x04b2430c,
|
||||
public: 0x04b24746,
|
||||
scriptFactory: p2wpkh
|
||||
},
|
||||
pubKeyHash: 0x00,
|
||||
scriptHash: 0x05,
|
||||
wif: 0x80
|
||||
},
|
||||
pubKeyHash: 0x00,
|
||||
scriptHash: 0x05,
|
||||
wif: 0x80,
|
||||
};
|
||||
exports.regtest = {
|
||||
messagePrefix: '\x18Bitcoin Signed Message:\n',
|
||||
bech32: 'bcrt',
|
||||
bip32: {
|
||||
public: 0x043587cf,
|
||||
private: 0x04358394,
|
||||
testnet: {
|
||||
messagePrefix: '\x18Bitcoin Signed Message:\n',
|
||||
bech32: 'tb',
|
||||
bip32: {
|
||||
public: 0x043587cf,
|
||||
private: 0x04358394,
|
||||
scriptFactory: p2pkh
|
||||
},
|
||||
pubKeyHash: 0x6f,
|
||||
scriptHash: 0xc4,
|
||||
wif: 0xef
|
||||
},
|
||||
pubKeyHash: 0x6f,
|
||||
scriptHash: 0xc4,
|
||||
wif: 0xef,
|
||||
};
|
||||
exports.testnet = {
|
||||
messagePrefix: '\x18Bitcoin Signed Message:\n',
|
||||
bech32: 'tb',
|
||||
bip32: {
|
||||
public: 0x043587cf,
|
||||
private: 0x04358394,
|
||||
},
|
||||
pubKeyHash: 0x6f,
|
||||
scriptHash: 0xc4,
|
||||
wif: 0xef,
|
||||
};
|
||||
litecoin: {
|
||||
messagePrefix: '\x19Litecoin Signed Message:\n',
|
||||
bip32: {
|
||||
public: 0x019da462,
|
||||
private: 0x019d9cfe,
|
||||
scriptFactory: p2pkh
|
||||
},
|
||||
pubKeyHash: 0x30,
|
||||
scriptHash: 0x32,
|
||||
wif: 0xb0
|
||||
}
|
||||
}
|
||||
|
|
7
src/ops.d.ts
vendored
7
src/ops.d.ts
vendored
|
@ -1,7 +0,0 @@
|
|||
declare const OPS: {
|
||||
[key: string]: number;
|
||||
};
|
||||
declare const REVERSE_OPS: {
|
||||
[key: number]: string;
|
||||
};
|
||||
export { OPS, REVERSE_OPS };
|
130
src/ops.js
130
src/ops.js
|
@ -1,130 +0,0 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.REVERSE_OPS = exports.OPS = void 0;
|
||||
const OPS = {
|
||||
OP_FALSE: 0,
|
||||
OP_0: 0,
|
||||
OP_PUSHDATA1: 76,
|
||||
OP_PUSHDATA2: 77,
|
||||
OP_PUSHDATA4: 78,
|
||||
OP_1NEGATE: 79,
|
||||
OP_RESERVED: 80,
|
||||
OP_TRUE: 81,
|
||||
OP_1: 81,
|
||||
OP_2: 82,
|
||||
OP_3: 83,
|
||||
OP_4: 84,
|
||||
OP_5: 85,
|
||||
OP_6: 86,
|
||||
OP_7: 87,
|
||||
OP_8: 88,
|
||||
OP_9: 89,
|
||||
OP_10: 90,
|
||||
OP_11: 91,
|
||||
OP_12: 92,
|
||||
OP_13: 93,
|
||||
OP_14: 94,
|
||||
OP_15: 95,
|
||||
OP_16: 96,
|
||||
OP_NOP: 97,
|
||||
OP_VER: 98,
|
||||
OP_IF: 99,
|
||||
OP_NOTIF: 100,
|
||||
OP_VERIF: 101,
|
||||
OP_VERNOTIF: 102,
|
||||
OP_ELSE: 103,
|
||||
OP_ENDIF: 104,
|
||||
OP_VERIFY: 105,
|
||||
OP_RETURN: 106,
|
||||
OP_TOALTSTACK: 107,
|
||||
OP_FROMALTSTACK: 108,
|
||||
OP_2DROP: 109,
|
||||
OP_2DUP: 110,
|
||||
OP_3DUP: 111,
|
||||
OP_2OVER: 112,
|
||||
OP_2ROT: 113,
|
||||
OP_2SWAP: 114,
|
||||
OP_IFDUP: 115,
|
||||
OP_DEPTH: 116,
|
||||
OP_DROP: 117,
|
||||
OP_DUP: 118,
|
||||
OP_NIP: 119,
|
||||
OP_OVER: 120,
|
||||
OP_PICK: 121,
|
||||
OP_ROLL: 122,
|
||||
OP_ROT: 123,
|
||||
OP_SWAP: 124,
|
||||
OP_TUCK: 125,
|
||||
OP_CAT: 126,
|
||||
OP_SUBSTR: 127,
|
||||
OP_LEFT: 128,
|
||||
OP_RIGHT: 129,
|
||||
OP_SIZE: 130,
|
||||
OP_INVERT: 131,
|
||||
OP_AND: 132,
|
||||
OP_OR: 133,
|
||||
OP_XOR: 134,
|
||||
OP_EQUAL: 135,
|
||||
OP_EQUALVERIFY: 136,
|
||||
OP_RESERVED1: 137,
|
||||
OP_RESERVED2: 138,
|
||||
OP_1ADD: 139,
|
||||
OP_1SUB: 140,
|
||||
OP_2MUL: 141,
|
||||
OP_2DIV: 142,
|
||||
OP_NEGATE: 143,
|
||||
OP_ABS: 144,
|
||||
OP_NOT: 145,
|
||||
OP_0NOTEQUAL: 146,
|
||||
OP_ADD: 147,
|
||||
OP_SUB: 148,
|
||||
OP_MUL: 149,
|
||||
OP_DIV: 150,
|
||||
OP_MOD: 151,
|
||||
OP_LSHIFT: 152,
|
||||
OP_RSHIFT: 153,
|
||||
OP_BOOLAND: 154,
|
||||
OP_BOOLOR: 155,
|
||||
OP_NUMEQUAL: 156,
|
||||
OP_NUMEQUALVERIFY: 157,
|
||||
OP_NUMNOTEQUAL: 158,
|
||||
OP_LESSTHAN: 159,
|
||||
OP_GREATERTHAN: 160,
|
||||
OP_LESSTHANOREQUAL: 161,
|
||||
OP_GREATERTHANOREQUAL: 162,
|
||||
OP_MIN: 163,
|
||||
OP_MAX: 164,
|
||||
OP_WITHIN: 165,
|
||||
OP_RIPEMD160: 166,
|
||||
OP_SHA1: 167,
|
||||
OP_SHA256: 168,
|
||||
OP_HASH160: 169,
|
||||
OP_HASH256: 170,
|
||||
OP_CODESEPARATOR: 171,
|
||||
OP_CHECKSIG: 172,
|
||||
OP_CHECKSIGVERIFY: 173,
|
||||
OP_CHECKMULTISIG: 174,
|
||||
OP_CHECKMULTISIGVERIFY: 175,
|
||||
OP_NOP1: 176,
|
||||
OP_NOP2: 177,
|
||||
OP_CHECKLOCKTIMEVERIFY: 177,
|
||||
OP_NOP3: 178,
|
||||
OP_CHECKSEQUENCEVERIFY: 178,
|
||||
OP_NOP4: 179,
|
||||
OP_NOP5: 180,
|
||||
OP_NOP6: 181,
|
||||
OP_NOP7: 182,
|
||||
OP_NOP8: 183,
|
||||
OP_NOP9: 184,
|
||||
OP_NOP10: 185,
|
||||
OP_PUBKEYHASH: 253,
|
||||
OP_PUBKEY: 254,
|
||||
OP_INVALIDOPCODE: 255,
|
||||
};
|
||||
exports.OPS = OPS;
|
||||
const REVERSE_OPS = {};
|
||||
exports.REVERSE_OPS = REVERSE_OPS;
|
||||
for (const op of Object.keys(OPS)) {
|
||||
const code = OPS[op];
|
||||
REVERSE_OPS[code] = op;
|
||||
}
|
2
src/payments/embed.d.ts
vendored
2
src/payments/embed.d.ts
vendored
|
@ -1,2 +0,0 @@
|
|||
import { Payment, PaymentOpts } from './index';
|
||||
export declare function p2data(a: Payment, opts?: PaymentOpts): Payment;
|
|
@ -1,52 +0,0 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.p2data = void 0;
|
||||
const networks_1 = require('../networks');
|
||||
const bscript = require('../script');
|
||||
const types_1 = require('../types');
|
||||
const lazy = require('./lazy');
|
||||
const OPS = bscript.OPS;
|
||||
function stacksEqual(a, b) {
|
||||
if (a.length !== b.length) return false;
|
||||
return a.every((x, i) => {
|
||||
return x.equals(b[i]);
|
||||
});
|
||||
}
|
||||
// output: OP_RETURN ...
|
||||
function p2data(a, opts) {
|
||||
if (!a.data && !a.output) throw new TypeError('Not enough data');
|
||||
opts = Object.assign({ validate: true }, opts || {});
|
||||
(0, types_1.typeforce)(
|
||||
{
|
||||
network: types_1.typeforce.maybe(types_1.typeforce.Object),
|
||||
output: types_1.typeforce.maybe(types_1.typeforce.Buffer),
|
||||
data: types_1.typeforce.maybe(
|
||||
types_1.typeforce.arrayOf(types_1.typeforce.Buffer),
|
||||
),
|
||||
},
|
||||
a,
|
||||
);
|
||||
const network = a.network || networks_1.bitcoin;
|
||||
const o = { name: 'embed', network };
|
||||
lazy.prop(o, 'output', () => {
|
||||
if (!a.data) return;
|
||||
return bscript.compile([OPS.OP_RETURN].concat(a.data));
|
||||
});
|
||||
lazy.prop(o, 'data', () => {
|
||||
if (!a.output) return;
|
||||
return bscript.decompile(a.output).slice(1);
|
||||
});
|
||||
// extended validation
|
||||
if (opts.validate) {
|
||||
if (a.output) {
|
||||
const chunks = bscript.decompile(a.output);
|
||||
if (chunks[0] !== OPS.OP_RETURN) throw new TypeError('Output is invalid');
|
||||
if (!chunks.slice(1).every(types_1.typeforce.Buffer))
|
||||
throw new TypeError('Output is invalid');
|
||||
if (a.data && !stacksEqual(a.data, o.data))
|
||||
throw new TypeError('Data mismatch');
|
||||
}
|
||||
}
|
||||
return Object.assign(o, a);
|
||||
}
|
||||
exports.p2data = p2data;
|
36
src/payments/index.d.ts
vendored
36
src/payments/index.d.ts
vendored
|
@ -1,36 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
import { Network } from '../networks';
|
||||
import { p2data as embed } from './embed';
|
||||
import { p2ms } from './p2ms';
|
||||
import { p2pk } from './p2pk';
|
||||
import { p2pkh } from './p2pkh';
|
||||
import { p2sh } from './p2sh';
|
||||
import { p2wpkh } from './p2wpkh';
|
||||
import { p2wsh } from './p2wsh';
|
||||
export interface Payment {
|
||||
name?: string;
|
||||
network?: Network;
|
||||
output?: Buffer;
|
||||
data?: Buffer[];
|
||||
m?: number;
|
||||
n?: number;
|
||||
pubkeys?: Buffer[];
|
||||
input?: Buffer;
|
||||
signatures?: Buffer[];
|
||||
pubkey?: Buffer;
|
||||
signature?: Buffer;
|
||||
address?: string;
|
||||
hash?: Buffer;
|
||||
redeem?: Payment;
|
||||
witness?: Buffer[];
|
||||
}
|
||||
export declare type PaymentCreator = (a: Payment, opts?: PaymentOpts) => Payment;
|
||||
export declare type PaymentFunction = () => Payment;
|
||||
export interface PaymentOpts {
|
||||
validate?: boolean;
|
||||
allowIncomplete?: boolean;
|
||||
}
|
||||
export declare type StackElement = Buffer | number;
|
||||
export declare type Stack = StackElement[];
|
||||
export declare type StackFunction = () => Stack;
|
||||
export { embed, p2ms, p2pk, p2pkh, p2sh, p2wpkh, p2wsh };
|
|
@ -1,54 +0,0 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.p2wsh = exports.p2wpkh = exports.p2sh = exports.p2pkh = exports.p2pk = exports.p2ms = exports.embed = void 0;
|
||||
const embed_1 = require('./embed');
|
||||
Object.defineProperty(exports, 'embed', {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return embed_1.p2data;
|
||||
},
|
||||
});
|
||||
const p2ms_1 = require('./p2ms');
|
||||
Object.defineProperty(exports, 'p2ms', {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return p2ms_1.p2ms;
|
||||
},
|
||||
});
|
||||
const p2pk_1 = require('./p2pk');
|
||||
Object.defineProperty(exports, 'p2pk', {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return p2pk_1.p2pk;
|
||||
},
|
||||
});
|
||||
const p2pkh_1 = require('./p2pkh');
|
||||
Object.defineProperty(exports, 'p2pkh', {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return p2pkh_1.p2pkh;
|
||||
},
|
||||
});
|
||||
const p2sh_1 = require('./p2sh');
|
||||
Object.defineProperty(exports, 'p2sh', {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return p2sh_1.p2sh;
|
||||
},
|
||||
});
|
||||
const p2wpkh_1 = require('./p2wpkh');
|
||||
Object.defineProperty(exports, 'p2wpkh', {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return p2wpkh_1.p2wpkh;
|
||||
},
|
||||
});
|
||||
const p2wsh_1 = require('./p2wsh');
|
||||
Object.defineProperty(exports, 'p2wsh', {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return p2wsh_1.p2wsh;
|
||||
},
|
||||
});
|
||||
// TODO
|
||||
// witness commitment
|
2
src/payments/lazy.d.ts
vendored
2
src/payments/lazy.d.ts
vendored
|
@ -1,2 +0,0 @@
|
|||
export declare function prop(object: {}, name: string, f: () => any): void;
|
||||
export declare function value<T>(f: () => T): () => T;
|
|
@ -1,32 +0,0 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.value = exports.prop = void 0;
|
||||
function prop(object, name, f) {
|
||||
Object.defineProperty(object, name, {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
const _value = f.call(this);
|
||||
this[name] = _value;
|
||||
return _value;
|
||||
},
|
||||
set(_value) {
|
||||
Object.defineProperty(this, name, {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value: _value,
|
||||
writable: true,
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
exports.prop = prop;
|
||||
function value(f) {
|
||||
let _value;
|
||||
return () => {
|
||||
if (_value !== undefined) return _value;
|
||||
_value = f();
|
||||
return _value;
|
||||
};
|
||||
}
|
||||
exports.value = value;
|
2
src/payments/p2ms.d.ts
vendored
2
src/payments/p2ms.d.ts
vendored
|
@ -1,2 +0,0 @@
|
|||
import { Payment, PaymentOpts } from './index';
|
||||
export declare function p2ms(a: Payment, opts?: PaymentOpts): Payment;
|
|
@ -1,150 +0,0 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.p2ms = void 0;
|
||||
const networks_1 = require('../networks');
|
||||
const bscript = require('../script');
|
||||
const types_1 = require('../types');
|
||||
const lazy = require('./lazy');
|
||||
const OPS = bscript.OPS;
|
||||
const OP_INT_BASE = OPS.OP_RESERVED; // OP_1 - 1
|
||||
function stacksEqual(a, b) {
|
||||
if (a.length !== b.length) return false;
|
||||
return a.every((x, i) => {
|
||||
return x.equals(b[i]);
|
||||
});
|
||||
}
|
||||
// input: OP_0 [signatures ...]
|
||||
// output: m [pubKeys ...] n OP_CHECKMULTISIG
|
||||
function p2ms(a, opts) {
|
||||
if (
|
||||
!a.input &&
|
||||
!a.output &&
|
||||
!(a.pubkeys && a.m !== undefined) &&
|
||||
!a.signatures
|
||||
)
|
||||
throw new TypeError('Not enough data');
|
||||
opts = Object.assign({ validate: true }, opts || {});
|
||||
function isAcceptableSignature(x) {
|
||||
return (
|
||||
bscript.isCanonicalScriptSignature(x) ||
|
||||
(opts.allowIncomplete && x === OPS.OP_0) !== undefined
|
||||
);
|
||||
}
|
||||
(0, types_1.typeforce)(
|
||||
{
|
||||
network: types_1.typeforce.maybe(types_1.typeforce.Object),
|
||||
m: types_1.typeforce.maybe(types_1.typeforce.Number),
|
||||
n: types_1.typeforce.maybe(types_1.typeforce.Number),
|
||||
output: types_1.typeforce.maybe(types_1.typeforce.Buffer),
|
||||
pubkeys: types_1.typeforce.maybe(
|
||||
types_1.typeforce.arrayOf(types_1.isPoint),
|
||||
),
|
||||
signatures: types_1.typeforce.maybe(
|
||||
types_1.typeforce.arrayOf(isAcceptableSignature),
|
||||
),
|
||||
input: types_1.typeforce.maybe(types_1.typeforce.Buffer),
|
||||
},
|
||||
a,
|
||||
);
|
||||
const network = a.network || networks_1.bitcoin;
|
||||
const o = { network };
|
||||
let chunks = [];
|
||||
let decoded = false;
|
||||
function decode(output) {
|
||||
if (decoded) return;
|
||||
decoded = true;
|
||||
chunks = bscript.decompile(output);
|
||||
o.m = chunks[0] - OP_INT_BASE;
|
||||
o.n = chunks[chunks.length - 2] - OP_INT_BASE;
|
||||
o.pubkeys = chunks.slice(1, -2);
|
||||
}
|
||||
lazy.prop(o, 'output', () => {
|
||||
if (!a.m) return;
|
||||
if (!o.n) return;
|
||||
if (!a.pubkeys) return;
|
||||
return bscript.compile(
|
||||
[].concat(
|
||||
OP_INT_BASE + a.m,
|
||||
a.pubkeys,
|
||||
OP_INT_BASE + o.n,
|
||||
OPS.OP_CHECKMULTISIG,
|
||||
),
|
||||
);
|
||||
});
|
||||
lazy.prop(o, 'm', () => {
|
||||
if (!o.output) return;
|
||||
decode(o.output);
|
||||
return o.m;
|
||||
});
|
||||
lazy.prop(o, 'n', () => {
|
||||
if (!o.pubkeys) return;
|
||||
return o.pubkeys.length;
|
||||
});
|
||||
lazy.prop(o, 'pubkeys', () => {
|
||||
if (!a.output) return;
|
||||
decode(a.output);
|
||||
return o.pubkeys;
|
||||
});
|
||||
lazy.prop(o, 'signatures', () => {
|
||||
if (!a.input) return;
|
||||
return bscript.decompile(a.input).slice(1);
|
||||
});
|
||||
lazy.prop(o, 'input', () => {
|
||||
if (!a.signatures) return;
|
||||
return bscript.compile([OPS.OP_0].concat(a.signatures));
|
||||
});
|
||||
lazy.prop(o, 'witness', () => {
|
||||
if (!o.input) return;
|
||||
return [];
|
||||
});
|
||||
lazy.prop(o, 'name', () => {
|
||||
if (!o.m || !o.n) return;
|
||||
return `p2ms(${o.m} of ${o.n})`;
|
||||
});
|
||||
// extended validation
|
||||
if (opts.validate) {
|
||||
if (a.output) {
|
||||
decode(a.output);
|
||||
if (!types_1.typeforce.Number(chunks[0]))
|
||||
throw new TypeError('Output is invalid');
|
||||
if (!types_1.typeforce.Number(chunks[chunks.length - 2]))
|
||||
throw new TypeError('Output is invalid');
|
||||
if (chunks[chunks.length - 1] !== OPS.OP_CHECKMULTISIG)
|
||||
throw new TypeError('Output is invalid');
|
||||
if (o.m <= 0 || o.n > 16 || o.m > o.n || o.n !== chunks.length - 3)
|
||||
throw new TypeError('Output is invalid');
|
||||
if (!o.pubkeys.every(x => (0, types_1.isPoint)(x)))
|
||||
throw new TypeError('Output is invalid');
|
||||
if (a.m !== undefined && a.m !== o.m) throw new TypeError('m mismatch');
|
||||
if (a.n !== undefined && a.n !== o.n) throw new TypeError('n mismatch');
|
||||
if (a.pubkeys && !stacksEqual(a.pubkeys, o.pubkeys))
|
||||
throw new TypeError('Pubkeys mismatch');
|
||||
}
|
||||
if (a.pubkeys) {
|
||||
if (a.n !== undefined && a.n !== a.pubkeys.length)
|
||||
throw new TypeError('Pubkey count mismatch');
|
||||
o.n = a.pubkeys.length;
|
||||
if (o.n < o.m) throw new TypeError('Pubkey count cannot be less than m');
|
||||
}
|
||||
if (a.signatures) {
|
||||
if (a.signatures.length < o.m)
|
||||
throw new TypeError('Not enough signatures provided');
|
||||
if (a.signatures.length > o.m)
|
||||
throw new TypeError('Too many signatures provided');
|
||||
}
|
||||
if (a.input) {
|
||||
if (a.input[0] !== OPS.OP_0) throw new TypeError('Input is invalid');
|
||||
if (
|
||||
o.signatures.length === 0 ||
|
||||
!o.signatures.every(isAcceptableSignature)
|
||||
)
|
||||
throw new TypeError('Input has invalid signature(s)');
|
||||
if (a.signatures && !stacksEqual(a.signatures, o.signatures))
|
||||
throw new TypeError('Signature mismatch');
|
||||
if (a.m !== undefined && a.m !== a.signatures.length)
|
||||
throw new TypeError('Signature count mismatch');
|
||||
}
|
||||
}
|
||||
return Object.assign(o, a);
|
||||
}
|
||||
exports.p2ms = p2ms;
|
2
src/payments/p2pk.d.ts
vendored
2
src/payments/p2pk.d.ts
vendored
|
@ -1,2 +0,0 @@
|
|||
import { Payment, PaymentOpts } from './index';
|
||||
export declare function p2pk(a: Payment, opts?: PaymentOpts): Payment;
|
|
@ -1,72 +0,0 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.p2pk = void 0;
|
||||
const networks_1 = require('../networks');
|
||||
const bscript = require('../script');
|
||||
const types_1 = require('../types');
|
||||
const lazy = require('./lazy');
|
||||
const OPS = bscript.OPS;
|
||||
// input: {signature}
|
||||
// output: {pubKey} OP_CHECKSIG
|
||||
function p2pk(a, opts) {
|
||||
if (!a.input && !a.output && !a.pubkey && !a.input && !a.signature)
|
||||
throw new TypeError('Not enough data');
|
||||
opts = Object.assign({ validate: true }, opts || {});
|
||||
(0, types_1.typeforce)(
|
||||
{
|
||||
network: types_1.typeforce.maybe(types_1.typeforce.Object),
|
||||
output: types_1.typeforce.maybe(types_1.typeforce.Buffer),
|
||||
pubkey: types_1.typeforce.maybe(types_1.isPoint),
|
||||
signature: types_1.typeforce.maybe(bscript.isCanonicalScriptSignature),
|
||||
input: types_1.typeforce.maybe(types_1.typeforce.Buffer),
|
||||
},
|
||||
a,
|
||||
);
|
||||
const _chunks = lazy.value(() => {
|
||||
return bscript.decompile(a.input);
|
||||
});
|
||||
const network = a.network || networks_1.bitcoin;
|
||||
const o = { name: 'p2pk', network };
|
||||
lazy.prop(o, 'output', () => {
|
||||
if (!a.pubkey) return;
|
||||
return bscript.compile([a.pubkey, OPS.OP_CHECKSIG]);
|
||||
});
|
||||
lazy.prop(o, 'pubkey', () => {
|
||||
if (!a.output) return;
|
||||
return a.output.slice(1, -1);
|
||||
});
|
||||
lazy.prop(o, 'signature', () => {
|
||||
if (!a.input) return;
|
||||
return _chunks()[0];
|
||||
});
|
||||
lazy.prop(o, 'input', () => {
|
||||
if (!a.signature) return;
|
||||
return bscript.compile([a.signature]);
|
||||
});
|
||||
lazy.prop(o, 'witness', () => {
|
||||
if (!o.input) return;
|
||||
return [];
|
||||
});
|
||||
// extended validation
|
||||
if (opts.validate) {
|
||||
if (a.output) {
|
||||
if (a.output[a.output.length - 1] !== OPS.OP_CHECKSIG)
|
||||
throw new TypeError('Output is invalid');
|
||||
if (!(0, types_1.isPoint)(o.pubkey))
|
||||
throw new TypeError('Output pubkey is invalid');
|
||||
if (a.pubkey && !a.pubkey.equals(o.pubkey))
|
||||
throw new TypeError('Pubkey mismatch');
|
||||
}
|
||||
if (a.signature) {
|
||||
if (a.input && !a.input.equals(o.input))
|
||||
throw new TypeError('Signature mismatch');
|
||||
}
|
||||
if (a.input) {
|
||||
if (_chunks().length !== 1) throw new TypeError('Input is invalid');
|
||||
if (!bscript.isCanonicalScriptSignature(o.signature))
|
||||
throw new TypeError('Input has invalid signature');
|
||||
}
|
||||
}
|
||||
return Object.assign(o, a);
|
||||
}
|
||||
exports.p2pk = p2pk;
|
2
src/payments/p2pkh.d.ts
vendored
2
src/payments/p2pkh.d.ts
vendored
|
@ -1,2 +0,0 @@
|
|||
import { Payment, PaymentOpts } from './index';
|
||||
export declare function p2pkh(a: Payment, opts?: PaymentOpts): Payment;
|
|
@ -1,132 +0,0 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.p2pkh = void 0;
|
||||
const bcrypto = require('../crypto');
|
||||
const networks_1 = require('../networks');
|
||||
const bscript = require('../script');
|
||||
const types_1 = require('../types');
|
||||
const lazy = require('./lazy');
|
||||
const bs58check = require('bs58check');
|
||||
const OPS = bscript.OPS;
|
||||
// input: {signature} {pubkey}
|
||||
// output: OP_DUP OP_HASH160 {hash160(pubkey)} OP_EQUALVERIFY OP_CHECKSIG
|
||||
function p2pkh(a, opts) {
|
||||
if (!a.address && !a.hash && !a.output && !a.pubkey && !a.input)
|
||||
throw new TypeError('Not enough data');
|
||||
opts = Object.assign({ validate: true }, opts || {});
|
||||
(0, types_1.typeforce)(
|
||||
{
|
||||
network: types_1.typeforce.maybe(types_1.typeforce.Object),
|
||||
address: types_1.typeforce.maybe(types_1.typeforce.String),
|
||||
hash: types_1.typeforce.maybe(types_1.typeforce.BufferN(20)),
|
||||
output: types_1.typeforce.maybe(types_1.typeforce.BufferN(25)),
|
||||
pubkey: types_1.typeforce.maybe(types_1.isPoint),
|
||||
signature: types_1.typeforce.maybe(bscript.isCanonicalScriptSignature),
|
||||
input: types_1.typeforce.maybe(types_1.typeforce.Buffer),
|
||||
},
|
||||
a,
|
||||
);
|
||||
const _address = lazy.value(() => {
|
||||
const payload = bs58check.decode(a.address);
|
||||
const version = payload.readUInt8(0);
|
||||
const hash = payload.slice(1);
|
||||
return { version, hash };
|
||||
});
|
||||
const _chunks = lazy.value(() => {
|
||||
return bscript.decompile(a.input);
|
||||
});
|
||||
const network = a.network || networks_1.bitcoin;
|
||||
const o = { name: 'p2pkh', network };
|
||||
lazy.prop(o, 'address', () => {
|
||||
if (!o.hash) return;
|
||||
const payload = Buffer.allocUnsafe(21);
|
||||
payload.writeUInt8(network.pubKeyHash, 0);
|
||||
o.hash.copy(payload, 1);
|
||||
return bs58check.encode(payload);
|
||||
});
|
||||
lazy.prop(o, 'hash', () => {
|
||||
if (a.output) return a.output.slice(3, 23);
|
||||
if (a.address) return _address().hash;
|
||||
if (a.pubkey || o.pubkey) return bcrypto.hash160(a.pubkey || o.pubkey);
|
||||
});
|
||||
lazy.prop(o, 'output', () => {
|
||||
if (!o.hash) return;
|
||||
return bscript.compile([
|
||||
OPS.OP_DUP,
|
||||
OPS.OP_HASH160,
|
||||
o.hash,
|
||||
OPS.OP_EQUALVERIFY,
|
||||
OPS.OP_CHECKSIG,
|
||||
]);
|
||||
});
|
||||
lazy.prop(o, 'pubkey', () => {
|
||||
if (!a.input) return;
|
||||
return _chunks()[1];
|
||||
});
|
||||
lazy.prop(o, 'signature', () => {
|
||||
if (!a.input) return;
|
||||
return _chunks()[0];
|
||||
});
|
||||
lazy.prop(o, 'input', () => {
|
||||
if (!a.pubkey) return;
|
||||
if (!a.signature) return;
|
||||
return bscript.compile([a.signature, a.pubkey]);
|
||||
});
|
||||
lazy.prop(o, 'witness', () => {
|
||||
if (!o.input) return;
|
||||
return [];
|
||||
});
|
||||
// extended validation
|
||||
if (opts.validate) {
|
||||
let hash = Buffer.from([]);
|
||||
if (a.address) {
|
||||
if (_address().version !== network.pubKeyHash)
|
||||
throw new TypeError('Invalid version or Network mismatch');
|
||||
if (_address().hash.length !== 20) throw new TypeError('Invalid address');
|
||||
hash = _address().hash;
|
||||
}
|
||||
if (a.hash) {
|
||||
if (hash.length > 0 && !hash.equals(a.hash))
|
||||
throw new TypeError('Hash mismatch');
|
||||
else hash = a.hash;
|
||||
}
|
||||
if (a.output) {
|
||||
if (
|
||||
a.output.length !== 25 ||
|
||||
a.output[0] !== OPS.OP_DUP ||
|
||||
a.output[1] !== OPS.OP_HASH160 ||
|
||||
a.output[2] !== 0x14 ||
|
||||
a.output[23] !== OPS.OP_EQUALVERIFY ||
|
||||
a.output[24] !== OPS.OP_CHECKSIG
|
||||
)
|
||||
throw new TypeError('Output is invalid');
|
||||
const hash2 = a.output.slice(3, 23);
|
||||
if (hash.length > 0 && !hash.equals(hash2))
|
||||
throw new TypeError('Hash mismatch');
|
||||
else hash = hash2;
|
||||
}
|
||||
if (a.pubkey) {
|
||||
const pkh = bcrypto.hash160(a.pubkey);
|
||||
if (hash.length > 0 && !hash.equals(pkh))
|
||||
throw new TypeError('Hash mismatch');
|
||||
else hash = pkh;
|
||||
}
|
||||
if (a.input) {
|
||||
const chunks = _chunks();
|
||||
if (chunks.length !== 2) throw new TypeError('Input is invalid');
|
||||
if (!bscript.isCanonicalScriptSignature(chunks[0]))
|
||||
throw new TypeError('Input has invalid signature');
|
||||
if (!(0, types_1.isPoint)(chunks[1]))
|
||||
throw new TypeError('Input has invalid pubkey');
|
||||
if (a.signature && !a.signature.equals(chunks[0]))
|
||||
throw new TypeError('Signature mismatch');
|
||||
if (a.pubkey && !a.pubkey.equals(chunks[1]))
|
||||
throw new TypeError('Pubkey mismatch');
|
||||
const pkh = bcrypto.hash160(chunks[1]);
|
||||
if (hash.length > 0 && !hash.equals(pkh))
|
||||
throw new TypeError('Hash mismatch');
|
||||
}
|
||||
}
|
||||
return Object.assign(o, a);
|
||||
}
|
||||
exports.p2pkh = p2pkh;
|
2
src/payments/p2sh.d.ts
vendored
2
src/payments/p2sh.d.ts
vendored
|
@ -1,2 +0,0 @@
|
|||
import { Payment, PaymentOpts } from './index';
|
||||
export declare function p2sh(a: Payment, opts?: PaymentOpts): Payment;
|
|
@ -1,189 +0,0 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.p2sh = void 0;
|
||||
const bcrypto = require('../crypto');
|
||||
const networks_1 = require('../networks');
|
||||
const bscript = require('../script');
|
||||
const types_1 = require('../types');
|
||||
const lazy = require('./lazy');
|
||||
const bs58check = require('bs58check');
|
||||
const OPS = bscript.OPS;
|
||||
function stacksEqual(a, b) {
|
||||
if (a.length !== b.length) return false;
|
||||
return a.every((x, i) => {
|
||||
return x.equals(b[i]);
|
||||
});
|
||||
}
|
||||
// input: [redeemScriptSig ...] {redeemScript}
|
||||
// witness: <?>
|
||||
// output: OP_HASH160 {hash160(redeemScript)} OP_EQUAL
|
||||
function p2sh(a, opts) {
|
||||
if (!a.address && !a.hash && !a.output && !a.redeem && !a.input)
|
||||
throw new TypeError('Not enough data');
|
||||
opts = Object.assign({ validate: true }, opts || {});
|
||||
(0, types_1.typeforce)(
|
||||
{
|
||||
network: types_1.typeforce.maybe(types_1.typeforce.Object),
|
||||
address: types_1.typeforce.maybe(types_1.typeforce.String),
|
||||
hash: types_1.typeforce.maybe(types_1.typeforce.BufferN(20)),
|
||||
output: types_1.typeforce.maybe(types_1.typeforce.BufferN(23)),
|
||||
redeem: types_1.typeforce.maybe({
|
||||
network: types_1.typeforce.maybe(types_1.typeforce.Object),
|
||||
output: types_1.typeforce.maybe(types_1.typeforce.Buffer),
|
||||
input: types_1.typeforce.maybe(types_1.typeforce.Buffer),
|
||||
witness: types_1.typeforce.maybe(
|
||||
types_1.typeforce.arrayOf(types_1.typeforce.Buffer),
|
||||
),
|
||||
}),
|
||||
input: types_1.typeforce.maybe(types_1.typeforce.Buffer),
|
||||
witness: types_1.typeforce.maybe(
|
||||
types_1.typeforce.arrayOf(types_1.typeforce.Buffer),
|
||||
),
|
||||
},
|
||||
a,
|
||||
);
|
||||
let network = a.network;
|
||||
if (!network) {
|
||||
network = (a.redeem && a.redeem.network) || networks_1.bitcoin;
|
||||
}
|
||||
const o = { network };
|
||||
const _address = lazy.value(() => {
|
||||
const payload = bs58check.decode(a.address);
|
||||
const version = payload.readUInt8(0);
|
||||
const hash = payload.slice(1);
|
||||
return { version, hash };
|
||||
});
|
||||
const _chunks = lazy.value(() => {
|
||||
return bscript.decompile(a.input);
|
||||
});
|
||||
const _redeem = lazy.value(() => {
|
||||
const chunks = _chunks();
|
||||
return {
|
||||
network,
|
||||
output: chunks[chunks.length - 1],
|
||||
input: bscript.compile(chunks.slice(0, -1)),
|
||||
witness: a.witness || [],
|
||||
};
|
||||
});
|
||||
// output dependents
|
||||
lazy.prop(o, 'address', () => {
|
||||
if (!o.hash) return;
|
||||
const payload = Buffer.allocUnsafe(21);
|
||||
payload.writeUInt8(o.network.scriptHash, 0);
|
||||
o.hash.copy(payload, 1);
|
||||
return bs58check.encode(payload);
|
||||
});
|
||||
lazy.prop(o, 'hash', () => {
|
||||
// in order of least effort
|
||||
if (a.output) return a.output.slice(2, 22);
|
||||
if (a.address) return _address().hash;
|
||||
if (o.redeem && o.redeem.output) return bcrypto.hash160(o.redeem.output);
|
||||
});
|
||||
lazy.prop(o, 'output', () => {
|
||||
if (!o.hash) return;
|
||||
return bscript.compile([OPS.OP_HASH160, o.hash, OPS.OP_EQUAL]);
|
||||
});
|
||||
// input dependents
|
||||
lazy.prop(o, 'redeem', () => {
|
||||
if (!a.input) return;
|
||||
return _redeem();
|
||||
});
|
||||
lazy.prop(o, 'input', () => {
|
||||
if (!a.redeem || !a.redeem.input || !a.redeem.output) return;
|
||||
return bscript.compile(
|
||||
[].concat(bscript.decompile(a.redeem.input), a.redeem.output),
|
||||
);
|
||||
});
|
||||
lazy.prop(o, 'witness', () => {
|
||||
if (o.redeem && o.redeem.witness) return o.redeem.witness;
|
||||
if (o.input) return [];
|
||||
});
|
||||
lazy.prop(o, 'name', () => {
|
||||
const nameParts = ['p2sh'];
|
||||
if (o.redeem !== undefined && o.redeem.name !== undefined)
|
||||
nameParts.push(o.redeem.name);
|
||||
return nameParts.join('-');
|
||||
});
|
||||
if (opts.validate) {
|
||||
let hash = Buffer.from([]);
|
||||
if (a.address) {
|
||||
if (_address().version !== network.scriptHash)
|
||||
throw new TypeError('Invalid version or Network mismatch');
|
||||
if (_address().hash.length !== 20) throw new TypeError('Invalid address');
|
||||
hash = _address().hash;
|
||||
}
|
||||
if (a.hash) {
|
||||
if (hash.length > 0 && !hash.equals(a.hash))
|
||||
throw new TypeError('Hash mismatch');
|
||||
else hash = a.hash;
|
||||
}
|
||||
if (a.output) {
|
||||
if (
|
||||
a.output.length !== 23 ||
|
||||
a.output[0] !== OPS.OP_HASH160 ||
|
||||
a.output[1] !== 0x14 ||
|
||||
a.output[22] !== OPS.OP_EQUAL
|
||||
)
|
||||
throw new TypeError('Output is invalid');
|
||||
const hash2 = a.output.slice(2, 22);
|
||||
if (hash.length > 0 && !hash.equals(hash2))
|
||||
throw new TypeError('Hash mismatch');
|
||||
else hash = hash2;
|
||||
}
|
||||
// inlined to prevent 'no-inner-declarations' failing
|
||||
const checkRedeem = redeem => {
|
||||
// is the redeem output empty/invalid?
|
||||
if (redeem.output) {
|
||||
const decompile = bscript.decompile(redeem.output);
|
||||
if (!decompile || decompile.length < 1)
|
||||
throw new TypeError('Redeem.output too short');
|
||||
// match hash against other sources
|
||||
const hash2 = bcrypto.hash160(redeem.output);
|
||||
if (hash.length > 0 && !hash.equals(hash2))
|
||||
throw new TypeError('Hash mismatch');
|
||||
else hash = hash2;
|
||||
}
|
||||
if (redeem.input) {
|
||||
const hasInput = redeem.input.length > 0;
|
||||
const hasWitness = redeem.witness && redeem.witness.length > 0;
|
||||
if (!hasInput && !hasWitness) throw new TypeError('Empty input');
|
||||
if (hasInput && hasWitness)
|
||||
throw new TypeError('Input and witness provided');
|
||||
if (hasInput) {
|
||||
const richunks = bscript.decompile(redeem.input);
|
||||
if (!bscript.isPushOnly(richunks))
|
||||
throw new TypeError('Non push-only scriptSig');
|
||||
}
|
||||
}
|
||||
};
|
||||
if (a.input) {
|
||||
const chunks = _chunks();
|
||||
if (!chunks || chunks.length < 1) throw new TypeError('Input too short');
|
||||
if (!Buffer.isBuffer(_redeem().output))
|
||||
throw new TypeError('Input is invalid');
|
||||
checkRedeem(_redeem());
|
||||
}
|
||||
if (a.redeem) {
|
||||
if (a.redeem.network && a.redeem.network !== network)
|
||||
throw new TypeError('Network mismatch');
|
||||
if (a.input) {
|
||||
const redeem = _redeem();
|
||||
if (a.redeem.output && !a.redeem.output.equals(redeem.output))
|
||||
throw new TypeError('Redeem.output mismatch');
|
||||
if (a.redeem.input && !a.redeem.input.equals(redeem.input))
|
||||
throw new TypeError('Redeem.input mismatch');
|
||||
}
|
||||
checkRedeem(a.redeem);
|
||||
}
|
||||
if (a.witness) {
|
||||
if (
|
||||
a.redeem &&
|
||||
a.redeem.witness &&
|
||||
!stacksEqual(a.redeem.witness, a.witness)
|
||||
)
|
||||
throw new TypeError('Witness and redeem.witness mismatch');
|
||||
}
|
||||
}
|
||||
return Object.assign(o, a);
|
||||
}
|
||||
exports.p2sh = p2sh;
|
2
src/payments/p2wpkh.d.ts
vendored
2
src/payments/p2wpkh.d.ts
vendored
|
@ -1,2 +0,0 @@
|
|||
import { Payment, PaymentOpts } from './index';
|
||||
export declare function p2wpkh(a: Payment, opts?: PaymentOpts): Payment;
|
|
@ -1,132 +0,0 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.p2wpkh = void 0;
|
||||
const bcrypto = require('../crypto');
|
||||
const networks_1 = require('../networks');
|
||||
const bscript = require('../script');
|
||||
const types_1 = require('../types');
|
||||
const lazy = require('./lazy');
|
||||
const bech32_1 = require('bech32');
|
||||
const OPS = bscript.OPS;
|
||||
const EMPTY_BUFFER = Buffer.alloc(0);
|
||||
// witness: {signature} {pubKey}
|
||||
// input: <>
|
||||
// output: OP_0 {pubKeyHash}
|
||||
function p2wpkh(a, opts) {
|
||||
if (!a.address && !a.hash && !a.output && !a.pubkey && !a.witness)
|
||||
throw new TypeError('Not enough data');
|
||||
opts = Object.assign({ validate: true }, opts || {});
|
||||
(0, types_1.typeforce)(
|
||||
{
|
||||
address: types_1.typeforce.maybe(types_1.typeforce.String),
|
||||
hash: types_1.typeforce.maybe(types_1.typeforce.BufferN(20)),
|
||||
input: types_1.typeforce.maybe(types_1.typeforce.BufferN(0)),
|
||||
network: types_1.typeforce.maybe(types_1.typeforce.Object),
|
||||
output: types_1.typeforce.maybe(types_1.typeforce.BufferN(22)),
|
||||
pubkey: types_1.typeforce.maybe(types_1.isPoint),
|
||||
signature: types_1.typeforce.maybe(bscript.isCanonicalScriptSignature),
|
||||
witness: types_1.typeforce.maybe(
|
||||
types_1.typeforce.arrayOf(types_1.typeforce.Buffer),
|
||||
),
|
||||
},
|
||||
a,
|
||||
);
|
||||
const _address = lazy.value(() => {
|
||||
const result = bech32_1.bech32.decode(a.address);
|
||||
const version = result.words.shift();
|
||||
const data = bech32_1.bech32.fromWords(result.words);
|
||||
return {
|
||||
version,
|
||||
prefix: result.prefix,
|
||||
data: Buffer.from(data),
|
||||
};
|
||||
});
|
||||
const network = a.network || networks_1.bitcoin;
|
||||
const o = { name: 'p2wpkh', network };
|
||||
lazy.prop(o, 'address', () => {
|
||||
if (!o.hash) return;
|
||||
const words = bech32_1.bech32.toWords(o.hash);
|
||||
words.unshift(0x00);
|
||||
return bech32_1.bech32.encode(network.bech32, words);
|
||||
});
|
||||
lazy.prop(o, 'hash', () => {
|
||||
if (a.output) return a.output.slice(2, 22);
|
||||
if (a.address) return _address().data;
|
||||
if (a.pubkey || o.pubkey) return bcrypto.hash160(a.pubkey || o.pubkey);
|
||||
});
|
||||
lazy.prop(o, 'output', () => {
|
||||
if (!o.hash) return;
|
||||
return bscript.compile([OPS.OP_0, o.hash]);
|
||||
});
|
||||
lazy.prop(o, 'pubkey', () => {
|
||||
if (a.pubkey) return a.pubkey;
|
||||
if (!a.witness) return;
|
||||
return a.witness[1];
|
||||
});
|
||||
lazy.prop(o, 'signature', () => {
|
||||
if (!a.witness) return;
|
||||
return a.witness[0];
|
||||
});
|
||||
lazy.prop(o, 'input', () => {
|
||||
if (!o.witness) return;
|
||||
return EMPTY_BUFFER;
|
||||
});
|
||||
lazy.prop(o, 'witness', () => {
|
||||
if (!a.pubkey) return;
|
||||
if (!a.signature) return;
|
||||
return [a.signature, a.pubkey];
|
||||
});
|
||||
// extended validation
|
||||
if (opts.validate) {
|
||||
let hash = Buffer.from([]);
|
||||
if (a.address) {
|
||||
if (network && network.bech32 !== _address().prefix)
|
||||
throw new TypeError('Invalid prefix or Network mismatch');
|
||||
if (_address().version !== 0x00)
|
||||
throw new TypeError('Invalid address version');
|
||||
if (_address().data.length !== 20)
|
||||
throw new TypeError('Invalid address data');
|
||||
hash = _address().data;
|
||||
}
|
||||
if (a.hash) {
|
||||
if (hash.length > 0 && !hash.equals(a.hash))
|
||||
throw new TypeError('Hash mismatch');
|
||||
else hash = a.hash;
|
||||
}
|
||||
if (a.output) {
|
||||
if (
|
||||
a.output.length !== 22 ||
|
||||
a.output[0] !== OPS.OP_0 ||
|
||||
a.output[1] !== 0x14
|
||||
)
|
||||
throw new TypeError('Output is invalid');
|
||||
if (hash.length > 0 && !hash.equals(a.output.slice(2)))
|
||||
throw new TypeError('Hash mismatch');
|
||||
else hash = a.output.slice(2);
|
||||
}
|
||||
if (a.pubkey) {
|
||||
const pkh = bcrypto.hash160(a.pubkey);
|
||||
if (hash.length > 0 && !hash.equals(pkh))
|
||||
throw new TypeError('Hash mismatch');
|
||||
else hash = pkh;
|
||||
if (!(0, types_1.isPoint)(a.pubkey) || a.pubkey.length !== 33)
|
||||
throw new TypeError('Invalid pubkey for p2wpkh');
|
||||
}
|
||||
if (a.witness) {
|
||||
if (a.witness.length !== 2) throw new TypeError('Witness is invalid');
|
||||
if (!bscript.isCanonicalScriptSignature(a.witness[0]))
|
||||
throw new TypeError('Witness has invalid signature');
|
||||
if (!(0, types_1.isPoint)(a.witness[1]) || a.witness[1].length !== 33)
|
||||
throw new TypeError('Witness has invalid pubkey');
|
||||
if (a.signature && !a.signature.equals(a.witness[0]))
|
||||
throw new TypeError('Signature mismatch');
|
||||
if (a.pubkey && !a.pubkey.equals(a.witness[1]))
|
||||
throw new TypeError('Pubkey mismatch');
|
||||
const pkh = bcrypto.hash160(a.witness[1]);
|
||||
if (hash.length > 0 && !hash.equals(pkh))
|
||||
throw new TypeError('Hash mismatch');
|
||||
}
|
||||
}
|
||||
return Object.assign(o, a);
|
||||
}
|
||||
exports.p2wpkh = p2wpkh;
|
2
src/payments/p2wsh.d.ts
vendored
2
src/payments/p2wsh.d.ts
vendored
|
@ -1,2 +0,0 @@
|
|||
import { Payment, PaymentOpts } from './index';
|
||||
export declare function p2wsh(a: Payment, opts?: PaymentOpts): Payment;
|
|
@ -1,212 +0,0 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.p2wsh = void 0;
|
||||
const bcrypto = require('../crypto');
|
||||
const networks_1 = require('../networks');
|
||||
const bscript = require('../script');
|
||||
const types_1 = require('../types');
|
||||
const lazy = require('./lazy');
|
||||
const bech32_1 = require('bech32');
|
||||
const OPS = bscript.OPS;
|
||||
const EMPTY_BUFFER = Buffer.alloc(0);
|
||||
function stacksEqual(a, b) {
|
||||
if (a.length !== b.length) return false;
|
||||
return a.every((x, i) => {
|
||||
return x.equals(b[i]);
|
||||
});
|
||||
}
|
||||
function chunkHasUncompressedPubkey(chunk) {
|
||||
if (
|
||||
Buffer.isBuffer(chunk) &&
|
||||
chunk.length === 65 &&
|
||||
chunk[0] === 0x04 &&
|
||||
(0, types_1.isPoint)(chunk)
|
||||
) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// input: <>
|
||||
// witness: [redeemScriptSig ...] {redeemScript}
|
||||
// output: OP_0 {sha256(redeemScript)}
|
||||
function p2wsh(a, opts) {
|
||||
if (!a.address && !a.hash && !a.output && !a.redeem && !a.witness)
|
||||
throw new TypeError('Not enough data');
|
||||
opts = Object.assign({ validate: true }, opts || {});
|
||||
(0, types_1.typeforce)(
|
||||
{
|
||||
network: types_1.typeforce.maybe(types_1.typeforce.Object),
|
||||
address: types_1.typeforce.maybe(types_1.typeforce.String),
|
||||
hash: types_1.typeforce.maybe(types_1.typeforce.BufferN(32)),
|
||||
output: types_1.typeforce.maybe(types_1.typeforce.BufferN(34)),
|
||||
redeem: types_1.typeforce.maybe({
|
||||
input: types_1.typeforce.maybe(types_1.typeforce.Buffer),
|
||||
network: types_1.typeforce.maybe(types_1.typeforce.Object),
|
||||
output: types_1.typeforce.maybe(types_1.typeforce.Buffer),
|
||||
witness: types_1.typeforce.maybe(
|
||||
types_1.typeforce.arrayOf(types_1.typeforce.Buffer),
|
||||
),
|
||||
}),
|
||||
input: types_1.typeforce.maybe(types_1.typeforce.BufferN(0)),
|
||||
witness: types_1.typeforce.maybe(
|
||||
types_1.typeforce.arrayOf(types_1.typeforce.Buffer),
|
||||
),
|
||||
},
|
||||
a,
|
||||
);
|
||||
const _address = lazy.value(() => {
|
||||
const result = bech32_1.bech32.decode(a.address);
|
||||
const version = result.words.shift();
|
||||
const data = bech32_1.bech32.fromWords(result.words);
|
||||
return {
|
||||
version,
|
||||
prefix: result.prefix,
|
||||
data: Buffer.from(data),
|
||||
};
|
||||
});
|
||||
const _rchunks = lazy.value(() => {
|
||||
return bscript.decompile(a.redeem.input);
|
||||
});
|
||||
let network = a.network;
|
||||
if (!network) {
|
||||
network = (a.redeem && a.redeem.network) || networks_1.bitcoin;
|
||||
}
|
||||
const o = { network };
|
||||
lazy.prop(o, 'address', () => {
|
||||
if (!o.hash) return;
|
||||
const words = bech32_1.bech32.toWords(o.hash);
|
||||
words.unshift(0x00);
|
||||
return bech32_1.bech32.encode(network.bech32, words);
|
||||
});
|
||||
lazy.prop(o, 'hash', () => {
|
||||
if (a.output) return a.output.slice(2);
|
||||
if (a.address) return _address().data;
|
||||
if (o.redeem && o.redeem.output) return bcrypto.sha256(o.redeem.output);
|
||||
});
|
||||
lazy.prop(o, 'output', () => {
|
||||
if (!o.hash) return;
|
||||
return bscript.compile([OPS.OP_0, o.hash]);
|
||||
});
|
||||
lazy.prop(o, 'redeem', () => {
|
||||
if (!a.witness) return;
|
||||
return {
|
||||
output: a.witness[a.witness.length - 1],
|
||||
input: EMPTY_BUFFER,
|
||||
witness: a.witness.slice(0, -1),
|
||||
};
|
||||
});
|
||||
lazy.prop(o, 'input', () => {
|
||||
if (!o.witness) return;
|
||||
return EMPTY_BUFFER;
|
||||
});
|
||||
lazy.prop(o, 'witness', () => {
|
||||
// transform redeem input to witness stack?
|
||||
if (
|
||||
a.redeem &&
|
||||
a.redeem.input &&
|
||||
a.redeem.input.length > 0 &&
|
||||
a.redeem.output &&
|
||||
a.redeem.output.length > 0
|
||||
) {
|
||||
const stack = bscript.toStack(_rchunks());
|
||||
// assign, and blank the existing input
|
||||
o.redeem = Object.assign({ witness: stack }, a.redeem);
|
||||
o.redeem.input = EMPTY_BUFFER;
|
||||
return [].concat(stack, a.redeem.output);
|
||||
}
|
||||
if (!a.redeem) return;
|
||||
if (!a.redeem.output) return;
|
||||
if (!a.redeem.witness) return;
|
||||
return [].concat(a.redeem.witness, a.redeem.output);
|
||||
});
|
||||
lazy.prop(o, 'name', () => {
|
||||
const nameParts = ['p2wsh'];
|
||||
if (o.redeem !== undefined && o.redeem.name !== undefined)
|
||||
nameParts.push(o.redeem.name);
|
||||
return nameParts.join('-');
|
||||
});
|
||||
// extended validation
|
||||
if (opts.validate) {
|
||||
let hash = Buffer.from([]);
|
||||
if (a.address) {
|
||||
if (_address().prefix !== network.bech32)
|
||||
throw new TypeError('Invalid prefix or Network mismatch');
|
||||
if (_address().version !== 0x00)
|
||||
throw new TypeError('Invalid address version');
|
||||
if (_address().data.length !== 32)
|
||||
throw new TypeError('Invalid address data');
|
||||
hash = _address().data;
|
||||
}
|
||||
if (a.hash) {
|
||||
if (hash.length > 0 && !hash.equals(a.hash))
|
||||
throw new TypeError('Hash mismatch');
|
||||
else hash = a.hash;
|
||||
}
|
||||
if (a.output) {
|
||||
if (
|
||||
a.output.length !== 34 ||
|
||||
a.output[0] !== OPS.OP_0 ||
|
||||
a.output[1] !== 0x20
|
||||
)
|
||||
throw new TypeError('Output is invalid');
|
||||
const hash2 = a.output.slice(2);
|
||||
if (hash.length > 0 && !hash.equals(hash2))
|
||||
throw new TypeError('Hash mismatch');
|
||||
else hash = hash2;
|
||||
}
|
||||
if (a.redeem) {
|
||||
if (a.redeem.network && a.redeem.network !== network)
|
||||
throw new TypeError('Network mismatch');
|
||||
// is there two redeem sources?
|
||||
if (
|
||||
a.redeem.input &&
|
||||
a.redeem.input.length > 0 &&
|
||||
a.redeem.witness &&
|
||||
a.redeem.witness.length > 0
|
||||
)
|
||||
throw new TypeError('Ambiguous witness source');
|
||||
// is the redeem output non-empty?
|
||||
if (a.redeem.output) {
|
||||
if (bscript.decompile(a.redeem.output).length === 0)
|
||||
throw new TypeError('Redeem.output is invalid');
|
||||
// match hash against other sources
|
||||
const hash2 = bcrypto.sha256(a.redeem.output);
|
||||
if (hash.length > 0 && !hash.equals(hash2))
|
||||
throw new TypeError('Hash mismatch');
|
||||
else hash = hash2;
|
||||
}
|
||||
if (a.redeem.input && !bscript.isPushOnly(_rchunks()))
|
||||
throw new TypeError('Non push-only scriptSig');
|
||||
if (
|
||||
a.witness &&
|
||||
a.redeem.witness &&
|
||||
!stacksEqual(a.witness, a.redeem.witness)
|
||||
)
|
||||
throw new TypeError('Witness and redeem.witness mismatch');
|
||||
if (
|
||||
(a.redeem.input && _rchunks().some(chunkHasUncompressedPubkey)) ||
|
||||
(a.redeem.output &&
|
||||
(bscript.decompile(a.redeem.output) || []).some(
|
||||
chunkHasUncompressedPubkey,
|
||||
))
|
||||
) {
|
||||
throw new TypeError(
|
||||
'redeem.input or redeem.output contains uncompressed pubkey',
|
||||
);
|
||||
}
|
||||
}
|
||||
if (a.witness && a.witness.length > 0) {
|
||||
const wScript = a.witness[a.witness.length - 1];
|
||||
if (a.redeem && a.redeem.output && !a.redeem.output.equals(wScript))
|
||||
throw new TypeError('Witness and redeem.output mismatch');
|
||||
if (
|
||||
a.witness.some(chunkHasUncompressedPubkey) ||
|
||||
(bscript.decompile(wScript) || []).some(chunkHasUncompressedPubkey)
|
||||
)
|
||||
throw new TypeError('Witness contains uncompressed pubkey');
|
||||
}
|
||||
}
|
||||
return Object.assign(o, a);
|
||||
}
|
||||
exports.p2wsh = p2wsh;
|
182
src/psbt.d.ts
vendored
182
src/psbt.d.ts
vendored
|
@ -1,182 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
import { Psbt as PsbtBase } from 'bip174';
|
||||
import { KeyValue, PsbtGlobalUpdate, PsbtInput, PsbtInputUpdate, PsbtOutput, PsbtOutputUpdate } from 'bip174/src/lib/interfaces';
|
||||
import { Network } from './networks';
|
||||
import { Transaction } from './transaction';
|
||||
export interface TransactionInput {
|
||||
hash: string | Buffer;
|
||||
index: number;
|
||||
sequence?: number;
|
||||
}
|
||||
export interface PsbtTxInput extends TransactionInput {
|
||||
hash: Buffer;
|
||||
}
|
||||
export interface TransactionOutput {
|
||||
script: Buffer;
|
||||
value: number;
|
||||
}
|
||||
export interface PsbtTxOutput extends TransactionOutput {
|
||||
address: string | undefined;
|
||||
}
|
||||
export declare type ValidateSigFunction = (pubkey: Buffer, msghash: Buffer, signature: Buffer) => boolean;
|
||||
/**
|
||||
* Psbt class can parse and generate a PSBT binary based off of the BIP174.
|
||||
* There are 6 roles that this class fulfills. (Explained in BIP174)
|
||||
*
|
||||
* Creator: This can be done with `new Psbt()`
|
||||
* Updater: This can be done with `psbt.addInput(input)`, `psbt.addInputs(inputs)`,
|
||||
* `psbt.addOutput(output)`, `psbt.addOutputs(outputs)` when you are looking to
|
||||
* add new inputs and outputs to the PSBT, and `psbt.updateGlobal(itemObject)`,
|
||||
* `psbt.updateInput(itemObject)`, `psbt.updateOutput(itemObject)`
|
||||
* addInput requires hash: Buffer | string; and index: number; as attributes
|
||||
* and can also include any attributes that are used in updateInput method.
|
||||
* addOutput requires script: Buffer; and value: number; and likewise can include
|
||||
* data for updateOutput.
|
||||
* For a list of what attributes should be what types. Check the bip174 library.
|
||||
* Also, check the integration tests for some examples of usage.
|
||||
* Signer: There are a few methods. signAllInputs and signAllInputsAsync, which will search all input
|
||||
* information for your pubkey or pubkeyhash, and only sign inputs where it finds
|
||||
* your info. Or you can explicitly sign a specific input with signInput and
|
||||
* signInputAsync. For the async methods you can create a SignerAsync object
|
||||
* and use something like a hardware wallet to sign with. (You must implement this)
|
||||
* Combiner: psbts can be combined easily with `psbt.combine(psbt2, psbt3, psbt4 ...)`
|
||||
* the psbt calling combine will always have precedence when a conflict occurs.
|
||||
* Combine checks if the internal bitcoin transaction is the same, so be sure that
|
||||
* all sequences, version, locktime, etc. are the same before combining.
|
||||
* Input Finalizer: This role is fairly important. Not only does it need to construct
|
||||
* the input scriptSigs and witnesses, but it SHOULD verify the signatures etc.
|
||||
* Before running `psbt.finalizeAllInputs()` please run `psbt.validateSignaturesOfAllInputs()`
|
||||
* Running any finalize method will delete any data in the input(s) that are no longer
|
||||
* needed due to the finalized scripts containing the information.
|
||||
* Transaction Extractor: This role will perform some checks before returning a
|
||||
* Transaction object. Such as fee rate not being larger than maximumFeeRate etc.
|
||||
*/
|
||||
export declare class Psbt {
|
||||
readonly data: PsbtBase;
|
||||
static fromBase64(data: string, opts?: PsbtOptsOptional): Psbt;
|
||||
static fromHex(data: string, opts?: PsbtOptsOptional): Psbt;
|
||||
static fromBuffer(buffer: Buffer, opts?: PsbtOptsOptional): Psbt;
|
||||
private __CACHE;
|
||||
private opts;
|
||||
constructor(opts?: PsbtOptsOptional, data?: PsbtBase);
|
||||
get inputCount(): number;
|
||||
get version(): number;
|
||||
set version(version: number);
|
||||
get locktime(): number;
|
||||
set locktime(locktime: number);
|
||||
get txInputs(): PsbtTxInput[];
|
||||
get txOutputs(): PsbtTxOutput[];
|
||||
combine(...those: Psbt[]): this;
|
||||
clone(): Psbt;
|
||||
setMaximumFeeRate(satoshiPerByte: number): void;
|
||||
setVersion(version: number): this;
|
||||
setLocktime(locktime: number): this;
|
||||
setInputSequence(inputIndex: number, sequence: number): this;
|
||||
addInputs(inputDatas: PsbtInputExtended[]): this;
|
||||
addInput(inputData: PsbtInputExtended): this;
|
||||
addOutputs(outputDatas: PsbtOutputExtended[]): this;
|
||||
addOutput(outputData: PsbtOutputExtended): this;
|
||||
extractTransaction(disableFeeCheck?: boolean): Transaction;
|
||||
getFeeRate(): number;
|
||||
getFee(): number;
|
||||
finalizeAllInputs(): this;
|
||||
finalizeInput(inputIndex: number, finalScriptsFunc?: FinalScriptsFunc): this;
|
||||
getInputType(inputIndex: number): AllScriptType;
|
||||
inputHasPubkey(inputIndex: number, pubkey: Buffer): boolean;
|
||||
inputHasHDKey(inputIndex: number, root: HDSigner): boolean;
|
||||
outputHasPubkey(outputIndex: number, pubkey: Buffer): boolean;
|
||||
outputHasHDKey(outputIndex: number, root: HDSigner): boolean;
|
||||
validateSignaturesOfAllInputs(validator: ValidateSigFunction): boolean;
|
||||
validateSignaturesOfInput(inputIndex: number, validator: ValidateSigFunction, pubkey?: Buffer): boolean;
|
||||
signAllInputsHD(hdKeyPair: HDSigner, sighashTypes?: number[]): this;
|
||||
signAllInputsHDAsync(hdKeyPair: HDSigner | HDSignerAsync, sighashTypes?: number[]): Promise<void>;
|
||||
signInputHD(inputIndex: number, hdKeyPair: HDSigner, sighashTypes?: number[]): this;
|
||||
signInputHDAsync(inputIndex: number, hdKeyPair: HDSigner | HDSignerAsync, sighashTypes?: number[]): Promise<void>;
|
||||
signAllInputs(keyPair: Signer, sighashTypes?: number[]): this;
|
||||
signAllInputsAsync(keyPair: Signer | SignerAsync, sighashTypes?: number[]): Promise<void>;
|
||||
signInput(inputIndex: number, keyPair: Signer, sighashTypes?: number[]): this;
|
||||
signInputAsync(inputIndex: number, keyPair: Signer | SignerAsync, sighashTypes?: number[]): Promise<void>;
|
||||
toBuffer(): Buffer;
|
||||
toHex(): string;
|
||||
toBase64(): string;
|
||||
updateGlobal(updateData: PsbtGlobalUpdate): this;
|
||||
updateInput(inputIndex: number, updateData: PsbtInputUpdate): this;
|
||||
updateOutput(outputIndex: number, updateData: PsbtOutputUpdate): this;
|
||||
addUnknownKeyValToGlobal(keyVal: KeyValue): this;
|
||||
addUnknownKeyValToInput(inputIndex: number, keyVal: KeyValue): this;
|
||||
addUnknownKeyValToOutput(outputIndex: number, keyVal: KeyValue): this;
|
||||
clearFinalizedInput(inputIndex: number): this;
|
||||
}
|
||||
interface PsbtOptsOptional {
|
||||
network?: Network;
|
||||
maximumFeeRate?: number;
|
||||
}
|
||||
interface PsbtInputExtended extends PsbtInput, TransactionInput {
|
||||
}
|
||||
declare type PsbtOutputExtended = PsbtOutputExtendedAddress | PsbtOutputExtendedScript;
|
||||
interface PsbtOutputExtendedAddress extends PsbtOutput {
|
||||
address: string;
|
||||
value: number;
|
||||
}
|
||||
interface PsbtOutputExtendedScript extends PsbtOutput {
|
||||
script: Buffer;
|
||||
value: number;
|
||||
}
|
||||
interface HDSignerBase {
|
||||
/**
|
||||
* DER format compressed publicKey buffer
|
||||
*/
|
||||
publicKey: Buffer;
|
||||
/**
|
||||
* The first 4 bytes of the sha256-ripemd160 of the publicKey
|
||||
*/
|
||||
fingerprint: Buffer;
|
||||
}
|
||||
export interface HDSigner extends HDSignerBase {
|
||||
/**
|
||||
* The path string must match /^m(\/\d+'?)+$/
|
||||
* ex. m/44'/0'/0'/1/23 levels with ' must be hard derivations
|
||||
*/
|
||||
derivePath(path: string): HDSigner;
|
||||
/**
|
||||
* Input hash (the "message digest") for the signature algorithm
|
||||
* Return a 64 byte signature (32 byte r and 32 byte s in that order)
|
||||
*/
|
||||
sign(hash: Buffer): Buffer;
|
||||
}
|
||||
/**
|
||||
* Same as above but with async sign method
|
||||
*/
|
||||
export interface HDSignerAsync extends HDSignerBase {
|
||||
derivePath(path: string): HDSignerAsync;
|
||||
sign(hash: Buffer): Promise<Buffer>;
|
||||
}
|
||||
export interface Signer {
|
||||
publicKey: Buffer;
|
||||
network?: any;
|
||||
sign(hash: Buffer, lowR?: boolean): Buffer;
|
||||
getPublicKey?(): Buffer;
|
||||
}
|
||||
export interface SignerAsync {
|
||||
publicKey: Buffer;
|
||||
network?: any;
|
||||
sign(hash: Buffer, lowR?: boolean): Promise<Buffer>;
|
||||
getPublicKey?(): Buffer;
|
||||
}
|
||||
/**
|
||||
* This function must do two things:
|
||||
* 1. Check if the `input` can be finalized. If it can not be finalized, throw.
|
||||
* ie. `Can not finalize input #${inputIndex}`
|
||||
* 2. Create the finalScriptSig and finalScriptWitness Buffers.
|
||||
*/
|
||||
declare type FinalScriptsFunc = (inputIndex: number, // Which input is it?
|
||||
input: PsbtInput, // The PSBT input contents
|
||||
script: Buffer, // The "meaningful" locking script Buffer (redeemScript for P2SH etc.)
|
||||
isSegwit: boolean, // Is it segwit?
|
||||
isP2SH: boolean, // Is it P2SH?
|
||||
isP2WSH: boolean) => {
|
||||
finalScriptSig: Buffer | undefined;
|
||||
finalScriptWitness: Buffer | undefined;
|
||||
};
|
||||
declare type AllScriptType = 'witnesspubkeyhash' | 'pubkeyhash' | 'multisig' | 'pubkey' | 'nonstandard' | 'p2sh-witnesspubkeyhash' | 'p2sh-pubkeyhash' | 'p2sh-multisig' | 'p2sh-pubkey' | 'p2sh-nonstandard' | 'p2wsh-pubkeyhash' | 'p2wsh-multisig' | 'p2wsh-pubkey' | 'p2wsh-nonstandard' | 'p2sh-p2wsh-pubkeyhash' | 'p2sh-p2wsh-multisig' | 'p2sh-p2wsh-pubkey' | 'p2sh-p2wsh-nonstandard';
|
||||
export {};
|
1411
src/psbt.js
1411
src/psbt.js
File diff suppressed because it is too large
Load diff
8
src/push_data.d.ts
vendored
8
src/push_data.d.ts
vendored
|
@ -1,8 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
export declare function encodingLength(i: number): number;
|
||||
export declare function encode(buffer: Buffer, num: number, offset: number): number;
|
||||
export declare function decode(buffer: Buffer, offset: number): {
|
||||
opcode: number;
|
||||
number: number;
|
||||
size: number;
|
||||
} | null;
|
|
@ -1,61 +0,0 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.decode = exports.encode = exports.encodingLength = void 0;
|
||||
const ops_1 = require('./ops');
|
||||
function encodingLength(i) {
|
||||
return i < ops_1.OPS.OP_PUSHDATA1 ? 1 : i <= 0xff ? 2 : i <= 0xffff ? 3 : 5;
|
||||
}
|
||||
exports.encodingLength = encodingLength;
|
||||
function encode(buffer, num, offset) {
|
||||
const size = encodingLength(num);
|
||||
// ~6 bit
|
||||
if (size === 1) {
|
||||
buffer.writeUInt8(num, offset);
|
||||
// 8 bit
|
||||
} else if (size === 2) {
|
||||
buffer.writeUInt8(ops_1.OPS.OP_PUSHDATA1, offset);
|
||||
buffer.writeUInt8(num, offset + 1);
|
||||
// 16 bit
|
||||
} else if (size === 3) {
|
||||
buffer.writeUInt8(ops_1.OPS.OP_PUSHDATA2, offset);
|
||||
buffer.writeUInt16LE(num, offset + 1);
|
||||
// 32 bit
|
||||
} else {
|
||||
buffer.writeUInt8(ops_1.OPS.OP_PUSHDATA4, offset);
|
||||
buffer.writeUInt32LE(num, offset + 1);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
exports.encode = encode;
|
||||
function decode(buffer, offset) {
|
||||
const opcode = buffer.readUInt8(offset);
|
||||
let num;
|
||||
let size;
|
||||
// ~6 bit
|
||||
if (opcode < ops_1.OPS.OP_PUSHDATA1) {
|
||||
num = opcode;
|
||||
size = 1;
|
||||
// 8 bit
|
||||
} else if (opcode === ops_1.OPS.OP_PUSHDATA1) {
|
||||
if (offset + 2 > buffer.length) return null;
|
||||
num = buffer.readUInt8(offset + 1);
|
||||
size = 2;
|
||||
// 16 bit
|
||||
} else if (opcode === ops_1.OPS.OP_PUSHDATA2) {
|
||||
if (offset + 3 > buffer.length) return null;
|
||||
num = buffer.readUInt16LE(offset + 1);
|
||||
size = 3;
|
||||
// 32 bit
|
||||
} else {
|
||||
if (offset + 5 > buffer.length) return null;
|
||||
if (opcode !== ops_1.OPS.OP_PUSHDATA4) throw new Error('Unexpected opcode');
|
||||
num = buffer.readUInt32LE(offset + 1);
|
||||
size = 5;
|
||||
}
|
||||
return {
|
||||
opcode,
|
||||
number: num,
|
||||
size,
|
||||
};
|
||||
}
|
||||
exports.decode = decode;
|
17
src/script.d.ts
vendored
17
src/script.d.ts
vendored
|
@ -1,17 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
import { OPS } from './ops';
|
||||
import { Stack } from './payments';
|
||||
import * as scriptNumber from './script_number';
|
||||
import * as scriptSignature from './script_signature';
|
||||
export { OPS };
|
||||
export declare function isPushOnly(value: Stack): boolean;
|
||||
export declare function compile(chunks: Buffer | Stack): Buffer;
|
||||
export declare function decompile(buffer: Buffer | Array<number | Buffer>): Array<number | Buffer> | null;
|
||||
export declare function toASM(chunks: Buffer | Array<number | Buffer>): string;
|
||||
export declare function fromASM(asm: string): Buffer;
|
||||
export declare function toStack(chunks: Buffer | Array<number | Buffer>): Buffer[];
|
||||
export declare function isCanonicalPubKey(buffer: Buffer): boolean;
|
||||
export declare function isDefinedHashType(hashType: number): boolean;
|
||||
export declare function isCanonicalScriptSignature(buffer: Buffer): boolean;
|
||||
export declare const number: typeof scriptNumber;
|
||||
export declare const signature: typeof scriptSignature;
|
326
src/script.js
326
src/script.js
|
@ -1,182 +1,214 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.signature = exports.number = exports.isCanonicalScriptSignature = exports.isDefinedHashType = exports.isCanonicalPubKey = exports.toStack = exports.fromASM = exports.toASM = exports.decompile = exports.compile = exports.isPushOnly = exports.OPS = void 0;
|
||||
const bip66 = require('./bip66');
|
||||
const ops_1 = require('./ops');
|
||||
Object.defineProperty(exports, 'OPS', {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return ops_1.OPS;
|
||||
},
|
||||
});
|
||||
const pushdata = require('./push_data');
|
||||
const scriptNumber = require('./script_number');
|
||||
const scriptSignature = require('./script_signature');
|
||||
const types = require('./types');
|
||||
const { typeforce } = types;
|
||||
const OP_INT_BASE = ops_1.OPS.OP_RESERVED; // OP_1 - 1
|
||||
function isOPInt(value) {
|
||||
return (
|
||||
types.Number(value) &&
|
||||
(value === ops_1.OPS.OP_0 ||
|
||||
(value >= ops_1.OPS.OP_1 && value <= ops_1.OPS.OP_16) ||
|
||||
value === ops_1.OPS.OP_1NEGATE)
|
||||
);
|
||||
var Buffer = require('safe-buffer').Buffer
|
||||
var bip66 = require('bip66')
|
||||
var pushdata = require('pushdata-bitcoin')
|
||||
var typeforce = require('typeforce')
|
||||
var types = require('./types')
|
||||
var scriptNumber = require('./script_number')
|
||||
|
||||
var OPS = require('bitcoin-ops')
|
||||
var REVERSE_OPS = require('bitcoin-ops/map')
|
||||
var OP_INT_BASE = OPS.OP_RESERVED // OP_1 - 1
|
||||
|
||||
function isOPInt (value) {
|
||||
return types.Number(value) &&
|
||||
((value === OPS.OP_0) ||
|
||||
(value >= OPS.OP_1 && value <= OPS.OP_16) ||
|
||||
(value === OPS.OP_1NEGATE))
|
||||
}
|
||||
function isPushOnlyChunk(value) {
|
||||
return types.Buffer(value) || isOPInt(value);
|
||||
|
||||
function isPushOnlyChunk (value) {
|
||||
return types.Buffer(value) || isOPInt(value)
|
||||
}
|
||||
function isPushOnly(value) {
|
||||
return types.Array(value) && value.every(isPushOnlyChunk);
|
||||
|
||||
function isPushOnly (value) {
|
||||
return types.Array(value) && value.every(isPushOnlyChunk)
|
||||
}
|
||||
exports.isPushOnly = isPushOnly;
|
||||
function asMinimalOP(buffer) {
|
||||
if (buffer.length === 0) return ops_1.OPS.OP_0;
|
||||
if (buffer.length !== 1) return;
|
||||
if (buffer[0] >= 1 && buffer[0] <= 16) return OP_INT_BASE + buffer[0];
|
||||
if (buffer[0] === 0x81) return ops_1.OPS.OP_1NEGATE;
|
||||
|
||||
function asMinimalOP (buffer) {
|
||||
if (buffer.length === 0) return OPS.OP_0
|
||||
if (buffer.length !== 1) return
|
||||
if (buffer[0] >= 1 && buffer[0] <= 16) return OP_INT_BASE + buffer[0]
|
||||
if (buffer[0] === 0x81) return OPS.OP_1NEGATE
|
||||
}
|
||||
function chunksIsBuffer(buf) {
|
||||
return Buffer.isBuffer(buf);
|
||||
}
|
||||
function chunksIsArray(buf) {
|
||||
return types.Array(buf);
|
||||
}
|
||||
function singleChunkIsBuffer(buf) {
|
||||
return Buffer.isBuffer(buf);
|
||||
}
|
||||
function compile(chunks) {
|
||||
|
||||
function compile (chunks) {
|
||||
// TODO: remove me
|
||||
if (chunksIsBuffer(chunks)) return chunks;
|
||||
typeforce(types.Array, chunks);
|
||||
const bufferSize = chunks.reduce((accum, chunk) => {
|
||||
if (Buffer.isBuffer(chunks)) return chunks
|
||||
|
||||
typeforce(types.Array, chunks)
|
||||
|
||||
var bufferSize = chunks.reduce(function (accum, chunk) {
|
||||
// data chunk
|
||||
if (singleChunkIsBuffer(chunk)) {
|
||||
if (Buffer.isBuffer(chunk)) {
|
||||
// adhere to BIP62.3, minimal push policy
|
||||
if (chunk.length === 1 && asMinimalOP(chunk) !== undefined) {
|
||||
return accum + 1;
|
||||
return accum + 1
|
||||
}
|
||||
return accum + pushdata.encodingLength(chunk.length) + chunk.length;
|
||||
|
||||
return accum + pushdata.encodingLength(chunk.length) + chunk.length
|
||||
}
|
||||
|
||||
// opcode
|
||||
return accum + 1;
|
||||
}, 0.0);
|
||||
const buffer = Buffer.allocUnsafe(bufferSize);
|
||||
let offset = 0;
|
||||
chunks.forEach(chunk => {
|
||||
return accum + 1
|
||||
}, 0.0)
|
||||
|
||||
var buffer = Buffer.allocUnsafe(bufferSize)
|
||||
var offset = 0
|
||||
|
||||
chunks.forEach(function (chunk) {
|
||||
// data chunk
|
||||
if (singleChunkIsBuffer(chunk)) {
|
||||
if (Buffer.isBuffer(chunk)) {
|
||||
// adhere to BIP62.3, minimal push policy
|
||||
const opcode = asMinimalOP(chunk);
|
||||
var opcode = asMinimalOP(chunk)
|
||||
if (opcode !== undefined) {
|
||||
buffer.writeUInt8(opcode, offset);
|
||||
offset += 1;
|
||||
return;
|
||||
buffer.writeUInt8(opcode, offset)
|
||||
offset += 1
|
||||
return
|
||||
}
|
||||
offset += pushdata.encode(buffer, chunk.length, offset);
|
||||
chunk.copy(buffer, offset);
|
||||
offset += chunk.length;
|
||||
// opcode
|
||||
|
||||
offset += pushdata.encode(buffer, chunk.length, offset)
|
||||
chunk.copy(buffer, offset)
|
||||
offset += chunk.length
|
||||
|
||||
// opcode
|
||||
} else {
|
||||
buffer.writeUInt8(chunk, offset);
|
||||
offset += 1;
|
||||
buffer.writeUInt8(chunk, offset)
|
||||
offset += 1
|
||||
}
|
||||
});
|
||||
if (offset !== buffer.length) throw new Error('Could not decode chunks');
|
||||
return buffer;
|
||||
})
|
||||
|
||||
if (offset !== buffer.length) throw new Error('Could not decode chunks')
|
||||
return buffer
|
||||
}
|
||||
exports.compile = compile;
|
||||
function decompile(buffer) {
|
||||
|
||||
function decompile (buffer) {
|
||||
// TODO: remove me
|
||||
if (chunksIsArray(buffer)) return buffer;
|
||||
typeforce(types.Buffer, buffer);
|
||||
const chunks = [];
|
||||
let i = 0;
|
||||
if (types.Array(buffer)) return buffer
|
||||
|
||||
typeforce(types.Buffer, buffer)
|
||||
|
||||
var chunks = []
|
||||
var i = 0
|
||||
|
||||
while (i < buffer.length) {
|
||||
const opcode = buffer[i];
|
||||
var opcode = buffer[i]
|
||||
|
||||
// data chunk
|
||||
if (opcode > ops_1.OPS.OP_0 && opcode <= ops_1.OPS.OP_PUSHDATA4) {
|
||||
const d = pushdata.decode(buffer, i);
|
||||
// did reading a pushDataInt fail?
|
||||
if (d === null) return null;
|
||||
i += d.size;
|
||||
// attempt to read too much data?
|
||||
if (i + d.number > buffer.length) return null;
|
||||
const data = buffer.slice(i, i + d.number);
|
||||
i += d.number;
|
||||
if ((opcode > OPS.OP_0) && (opcode <= OPS.OP_PUSHDATA4)) {
|
||||
var d = pushdata.decode(buffer, i)
|
||||
|
||||
// did reading a pushDataInt fail? empty script
|
||||
if (d === null) return []
|
||||
i += d.size
|
||||
|
||||
// attempt to read too much data? empty script
|
||||
if (i + d.number > buffer.length) return []
|
||||
|
||||
var data = buffer.slice(i, i + d.number)
|
||||
i += d.number
|
||||
|
||||
// decompile minimally
|
||||
const op = asMinimalOP(data);
|
||||
var op = asMinimalOP(data)
|
||||
if (op !== undefined) {
|
||||
chunks.push(op);
|
||||
chunks.push(op)
|
||||
} else {
|
||||
chunks.push(data);
|
||||
chunks.push(data)
|
||||
}
|
||||
// opcode
|
||||
|
||||
// opcode
|
||||
} else {
|
||||
chunks.push(opcode);
|
||||
i += 1;
|
||||
chunks.push(opcode)
|
||||
|
||||
i += 1
|
||||
}
|
||||
}
|
||||
return chunks;
|
||||
}
|
||||
exports.decompile = decompile;
|
||||
function toASM(chunks) {
|
||||
if (chunksIsBuffer(chunks)) {
|
||||
chunks = decompile(chunks);
|
||||
}
|
||||
|
||||
return chunks
|
||||
.map(chunk => {
|
||||
// data?
|
||||
if (singleChunkIsBuffer(chunk)) {
|
||||
const op = asMinimalOP(chunk);
|
||||
if (op === undefined) return chunk.toString('hex');
|
||||
chunk = op;
|
||||
}
|
||||
// opcode!
|
||||
return ops_1.REVERSE_OPS[chunk];
|
||||
})
|
||||
.join(' ');
|
||||
}
|
||||
exports.toASM = toASM;
|
||||
function fromASM(asm) {
|
||||
typeforce(types.String, asm);
|
||||
return compile(
|
||||
asm.split(' ').map(chunkStr => {
|
||||
// opcode?
|
||||
if (ops_1.OPS[chunkStr] !== undefined) return ops_1.OPS[chunkStr];
|
||||
typeforce(types.Hex, chunkStr);
|
||||
// data!
|
||||
return Buffer.from(chunkStr, 'hex');
|
||||
}),
|
||||
);
|
||||
|
||||
function toASM (chunks) {
|
||||
if (Buffer.isBuffer(chunks)) {
|
||||
chunks = decompile(chunks)
|
||||
}
|
||||
|
||||
return chunks.map(function (chunk) {
|
||||
// data?
|
||||
if (Buffer.isBuffer(chunk)) {
|
||||
var op = asMinimalOP(chunk)
|
||||
if (op === undefined) return chunk.toString('hex')
|
||||
chunk = op
|
||||
}
|
||||
|
||||
// opcode!
|
||||
return REVERSE_OPS[chunk]
|
||||
}).join(' ')
|
||||
}
|
||||
exports.fromASM = fromASM;
|
||||
function toStack(chunks) {
|
||||
chunks = decompile(chunks);
|
||||
typeforce(isPushOnly, chunks);
|
||||
return chunks.map(op => {
|
||||
if (singleChunkIsBuffer(op)) return op;
|
||||
if (op === ops_1.OPS.OP_0) return Buffer.allocUnsafe(0);
|
||||
return scriptNumber.encode(op - OP_INT_BASE);
|
||||
});
|
||||
|
||||
function fromASM (asm) {
|
||||
typeforce(types.String, asm)
|
||||
|
||||
return compile(asm.split(' ').map(function (chunkStr) {
|
||||
// opcode?
|
||||
if (OPS[chunkStr] !== undefined) return OPS[chunkStr]
|
||||
typeforce(types.Hex, chunkStr)
|
||||
|
||||
// data!
|
||||
return Buffer.from(chunkStr, 'hex')
|
||||
}))
|
||||
}
|
||||
exports.toStack = toStack;
|
||||
function isCanonicalPubKey(buffer) {
|
||||
return types.isPoint(buffer);
|
||||
|
||||
function toStack (chunks) {
|
||||
chunks = decompile(chunks)
|
||||
typeforce(isPushOnly, chunks)
|
||||
|
||||
return chunks.map(function (op) {
|
||||
if (Buffer.isBuffer(op)) return op
|
||||
if (op === OPS.OP_0) return Buffer.allocUnsafe(0)
|
||||
|
||||
return scriptNumber.encode(op - OP_INT_BASE)
|
||||
})
|
||||
}
|
||||
exports.isCanonicalPubKey = isCanonicalPubKey;
|
||||
function isDefinedHashType(hashType) {
|
||||
const hashTypeMod = hashType & ~0x80;
|
||||
// return hashTypeMod > SIGHASH_ALL && hashTypeMod < SIGHASH_SINGLE
|
||||
return hashTypeMod > 0x00 && hashTypeMod < 0x04;
|
||||
|
||||
function isCanonicalPubKey (buffer) {
|
||||
if (!Buffer.isBuffer(buffer)) return false
|
||||
if (buffer.length < 33) return false
|
||||
|
||||
switch (buffer[0]) {
|
||||
case 0x02:
|
||||
case 0x03:
|
||||
return buffer.length === 33
|
||||
case 0x04:
|
||||
return buffer.length === 65
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
exports.isDefinedHashType = isDefinedHashType;
|
||||
function isCanonicalScriptSignature(buffer) {
|
||||
if (!Buffer.isBuffer(buffer)) return false;
|
||||
if (!isDefinedHashType(buffer[buffer.length - 1])) return false;
|
||||
return bip66.check(buffer.slice(0, -1));
|
||||
|
||||
function isDefinedHashType (hashType) {
|
||||
var hashTypeMod = hashType & ~0x80
|
||||
|
||||
// return hashTypeMod > SIGHASH_ALL && hashTypeMod < SIGHASH_SINGLE
|
||||
return hashTypeMod > 0x00 && hashTypeMod < 0x04
|
||||
}
|
||||
|
||||
function isCanonicalSignature (buffer) {
|
||||
if (!Buffer.isBuffer(buffer)) return false
|
||||
if (!isDefinedHashType(buffer[buffer.length - 1])) return false
|
||||
|
||||
return bip66.check(buffer.slice(0, -1))
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
compile: compile,
|
||||
decompile: decompile,
|
||||
fromASM: fromASM,
|
||||
toASM: toASM,
|
||||
toStack: toStack,
|
||||
|
||||
number: require('./script_number'),
|
||||
|
||||
isCanonicalPubKey: isCanonicalPubKey,
|
||||
isCanonicalSignature: isCanonicalSignature,
|
||||
isPushOnly: isPushOnly,
|
||||
isDefinedHashType: isDefinedHashType
|
||||
}
|
||||
exports.isCanonicalScriptSignature = isCanonicalScriptSignature;
|
||||
// tslint:disable-next-line variable-name
|
||||
exports.number = scriptNumber;
|
||||
exports.signature = scriptSignature;
|
||||
|
|
3
src/script_number.d.ts
vendored
3
src/script_number.d.ts
vendored
|
@ -1,3 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
export declare function decode(buffer: Buffer, maxLength?: number, minimal?: boolean): number;
|
||||
export declare function encode(_number: number): Buffer;
|
|
@ -1,62 +1,68 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.encode = exports.decode = void 0;
|
||||
function decode(buffer, maxLength, minimal) {
|
||||
maxLength = maxLength || 4;
|
||||
minimal = minimal === undefined ? true : minimal;
|
||||
const length = buffer.length;
|
||||
if (length === 0) return 0;
|
||||
if (length > maxLength) throw new TypeError('Script number overflow');
|
||||
var Buffer = require('safe-buffer').Buffer
|
||||
|
||||
function decode (buffer, maxLength, minimal) {
|
||||
maxLength = maxLength || 4
|
||||
minimal = minimal === undefined ? true : minimal
|
||||
|
||||
var length = buffer.length
|
||||
if (length === 0) return 0
|
||||
if (length > maxLength) throw new TypeError('Script number overflow')
|
||||
if (minimal) {
|
||||
if ((buffer[length - 1] & 0x7f) === 0) {
|
||||
if (length <= 1 || (buffer[length - 2] & 0x80) === 0)
|
||||
throw new Error('Non-minimally encoded script number');
|
||||
if (length <= 1 || (buffer[length - 2] & 0x80) === 0) throw new Error('Non-minimally encoded script number')
|
||||
}
|
||||
}
|
||||
|
||||
// 40-bit
|
||||
if (length === 5) {
|
||||
const a = buffer.readUInt32LE(0);
|
||||
const b = buffer.readUInt8(4);
|
||||
if (b & 0x80) return -((b & ~0x80) * 0x100000000 + a);
|
||||
return b * 0x100000000 + a;
|
||||
var a = buffer.readUInt32LE(0)
|
||||
var b = buffer.readUInt8(4)
|
||||
|
||||
if (b & 0x80) return -(((b & ~0x80) * 0x100000000) + a)
|
||||
return (b * 0x100000000) + a
|
||||
}
|
||||
|
||||
var result = 0
|
||||
|
||||
// 32-bit / 24-bit / 16-bit / 8-bit
|
||||
let result = 0;
|
||||
for (let i = 0; i < length; ++i) {
|
||||
result |= buffer[i] << (8 * i);
|
||||
for (var i = 0; i < length; ++i) {
|
||||
result |= buffer[i] << (8 * i)
|
||||
}
|
||||
if (buffer[length - 1] & 0x80)
|
||||
return -(result & ~(0x80 << (8 * (length - 1))));
|
||||
return result;
|
||||
|
||||
if (buffer[length - 1] & 0x80) return -(result & ~(0x80 << (8 * (length - 1))))
|
||||
return result
|
||||
}
|
||||
exports.decode = decode;
|
||||
function scriptNumSize(i) {
|
||||
return i > 0x7fffffff
|
||||
? 5
|
||||
: i > 0x7fffff
|
||||
? 4
|
||||
: i > 0x7fff
|
||||
? 3
|
||||
: i > 0x7f
|
||||
? 2
|
||||
: i > 0x00
|
||||
? 1
|
||||
: 0;
|
||||
|
||||
function scriptNumSize (i) {
|
||||
return i > 0x7fffffff ? 5
|
||||
: i > 0x7fffff ? 4
|
||||
: i > 0x7fff ? 3
|
||||
: i > 0x7f ? 2
|
||||
: i > 0x00 ? 1
|
||||
: 0
|
||||
}
|
||||
function encode(_number) {
|
||||
let value = Math.abs(_number);
|
||||
const size = scriptNumSize(value);
|
||||
const buffer = Buffer.allocUnsafe(size);
|
||||
const negative = _number < 0;
|
||||
for (let i = 0; i < size; ++i) {
|
||||
buffer.writeUInt8(value & 0xff, i);
|
||||
value >>= 8;
|
||||
|
||||
function encode (number) {
|
||||
var value = Math.abs(number)
|
||||
var size = scriptNumSize(value)
|
||||
var buffer = Buffer.allocUnsafe(size)
|
||||
var negative = number < 0
|
||||
|
||||
for (var i = 0; i < size; ++i) {
|
||||
buffer.writeUInt8(value & 0xff, i)
|
||||
value >>= 8
|
||||
}
|
||||
|
||||
if (buffer[size - 1] & 0x80) {
|
||||
buffer.writeUInt8(negative ? 0x80 : 0x00, size - 1);
|
||||
buffer.writeUInt8(negative ? 0x80 : 0x00, size - 1)
|
||||
} else if (negative) {
|
||||
buffer[size - 1] |= 0x80;
|
||||
buffer[size - 1] |= 0x80
|
||||
}
|
||||
return buffer;
|
||||
|
||||
return buffer
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
decode: decode,
|
||||
encode: encode
|
||||
}
|
||||
exports.encode = encode;
|
||||
|
|
8
src/script_signature.d.ts
vendored
8
src/script_signature.d.ts
vendored
|
@ -1,8 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
interface ScriptSignature {
|
||||
signature: Buffer;
|
||||
hashType: number;
|
||||
}
|
||||
export declare function decode(buffer: Buffer): ScriptSignature;
|
||||
export declare function encode(signature: Buffer, hashType: number): Buffer;
|
||||
export {};
|
|
@ -1,53 +0,0 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.encode = exports.decode = void 0;
|
||||
const bip66 = require('./bip66');
|
||||
const types = require('./types');
|
||||
const { typeforce } = types;
|
||||
const ZERO = Buffer.alloc(1, 0);
|
||||
function toDER(x) {
|
||||
let i = 0;
|
||||
while (x[i] === 0) ++i;
|
||||
if (i === x.length) return ZERO;
|
||||
x = x.slice(i);
|
||||
if (x[0] & 0x80) return Buffer.concat([ZERO, x], 1 + x.length);
|
||||
return x;
|
||||
}
|
||||
function fromDER(x) {
|
||||
if (x[0] === 0x00) x = x.slice(1);
|
||||
const buffer = Buffer.alloc(32, 0);
|
||||
const bstart = Math.max(0, 32 - x.length);
|
||||
x.copy(buffer, bstart);
|
||||
return buffer;
|
||||
}
|
||||
// BIP62: 1 byte hashType flag (only 0x01, 0x02, 0x03, 0x81, 0x82 and 0x83 are allowed)
|
||||
function decode(buffer) {
|
||||
const hashType = buffer.readUInt8(buffer.length - 1);
|
||||
const hashTypeMod = hashType & ~0x80;
|
||||
if (hashTypeMod <= 0 || hashTypeMod >= 4)
|
||||
throw new Error('Invalid hashType ' + hashType);
|
||||
const decoded = bip66.decode(buffer.slice(0, -1));
|
||||
const r = fromDER(decoded.r);
|
||||
const s = fromDER(decoded.s);
|
||||
const signature = Buffer.concat([r, s], 64);
|
||||
return { signature, hashType };
|
||||
}
|
||||
exports.decode = decode;
|
||||
function encode(signature, hashType) {
|
||||
typeforce(
|
||||
{
|
||||
signature: types.BufferN(64),
|
||||
hashType: types.UInt8,
|
||||
},
|
||||
{ signature, hashType },
|
||||
);
|
||||
const hashTypeMod = hashType & ~0x80;
|
||||
if (hashTypeMod <= 0 || hashTypeMod >= 4)
|
||||
throw new Error('Invalid hashType ' + hashType);
|
||||
const hashTypeBuffer = Buffer.allocUnsafe(1);
|
||||
hashTypeBuffer.writeUInt8(hashType, 0);
|
||||
const r = toDER(signature.slice(0, 32));
|
||||
const s = toDER(signature.slice(32, 64));
|
||||
return Buffer.concat([bip66.encode(r, s), hashTypeBuffer]);
|
||||
}
|
||||
exports.encode = encode;
|
74
src/templates/index.js
Normal file
74
src/templates/index.js
Normal file
|
@ -0,0 +1,74 @@
|
|||
var decompile = require('../script').decompile
|
||||
var multisig = require('./multisig')
|
||||
var nullData = require('./nulldata')
|
||||
var pubKey = require('./pubkey')
|
||||
var pubKeyHash = require('./pubkeyhash')
|
||||
var scriptHash = require('./scripthash')
|
||||
var witnessPubKeyHash = require('./witnesspubkeyhash')
|
||||
var witnessScriptHash = require('./witnessscripthash')
|
||||
var witnessCommitment = require('./witnesscommitment')
|
||||
|
||||
var types = {
|
||||
MULTISIG: 'multisig',
|
||||
NONSTANDARD: 'nonstandard',
|
||||
NULLDATA: 'nulldata',
|
||||
P2PK: 'pubkey',
|
||||
P2PKH: 'pubkeyhash',
|
||||
P2SH: 'scripthash',
|
||||
P2WPKH: 'witnesspubkeyhash',
|
||||
P2WSH: 'witnessscripthash',
|
||||
WITNESS_COMMITMENT: 'witnesscommitment'
|
||||
}
|
||||
|
||||
function classifyOutput (script) {
|
||||
if (witnessPubKeyHash.output.check(script)) return types.P2WPKH
|
||||
if (witnessScriptHash.output.check(script)) return types.P2WSH
|
||||
if (pubKeyHash.output.check(script)) return types.P2PKH
|
||||
if (scriptHash.output.check(script)) return types.P2SH
|
||||
|
||||
// XXX: optimization, below functions .decompile before use
|
||||
var chunks = decompile(script)
|
||||
if (multisig.output.check(chunks)) return types.MULTISIG
|
||||
if (pubKey.output.check(chunks)) return types.P2PK
|
||||
if (witnessCommitment.output.check(chunks)) return types.WITNESS_COMMITMENT
|
||||
if (nullData.output.check(chunks)) return types.NULLDATA
|
||||
|
||||
return types.NONSTANDARD
|
||||
}
|
||||
|
||||
function classifyInput (script, allowIncomplete) {
|
||||
// XXX: optimization, below functions .decompile before use
|
||||
var chunks = decompile(script)
|
||||
|
||||
if (pubKeyHash.input.check(chunks)) return types.P2PKH
|
||||
if (scriptHash.input.check(chunks, allowIncomplete)) return types.P2SH
|
||||
if (multisig.input.check(chunks, allowIncomplete)) return types.MULTISIG
|
||||
if (pubKey.input.check(chunks)) return types.P2PK
|
||||
|
||||
return types.NONSTANDARD
|
||||
}
|
||||
|
||||
function classifyWitness (script, allowIncomplete) {
|
||||
// XXX: optimization, below functions .decompile before use
|
||||
var chunks = decompile(script)
|
||||
|
||||
if (witnessPubKeyHash.input.check(chunks)) return types.P2WPKH
|
||||
if (witnessScriptHash.input.check(chunks, allowIncomplete)) return types.P2WSH
|
||||
|
||||
return types.NONSTANDARD
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
classifyInput: classifyInput,
|
||||
classifyOutput: classifyOutput,
|
||||
classifyWitness: classifyWitness,
|
||||
multisig: multisig,
|
||||
nullData: nullData,
|
||||
pubKey: pubKey,
|
||||
pubKeyHash: pubKeyHash,
|
||||
scriptHash: scriptHash,
|
||||
witnessPubKeyHash: witnessPubKeyHash,
|
||||
witnessScriptHash: witnessScriptHash,
|
||||
witnessCommitment: witnessCommitment,
|
||||
types: types
|
||||
}
|
4
src/templates/multisig/index.js
Normal file
4
src/templates/multisig/index.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
module.exports = {
|
||||
input: require('./input'),
|
||||
output: require('./output')
|
||||
}
|
72
src/templates/multisig/input.js
Normal file
72
src/templates/multisig/input.js
Normal file
|
@ -0,0 +1,72 @@
|
|||
// OP_0 [signatures ...]
|
||||
|
||||
var Buffer = require('safe-buffer').Buffer
|
||||
var bscript = require('../../script')
|
||||
var p2mso = require('./output')
|
||||
var typeforce = require('typeforce')
|
||||
var OPS = require('bitcoin-ops')
|
||||
|
||||
function partialSignature (value) {
|
||||
return value === OPS.OP_0 || bscript.isCanonicalSignature(value)
|
||||
}
|
||||
|
||||
function check (script, allowIncomplete) {
|
||||
var chunks = bscript.decompile(script)
|
||||
if (chunks.length < 2) return false
|
||||
if (chunks[0] !== OPS.OP_0) return false
|
||||
|
||||
if (allowIncomplete) {
|
||||
return chunks.slice(1).every(partialSignature)
|
||||
}
|
||||
|
||||
return chunks.slice(1).every(bscript.isCanonicalSignature)
|
||||
}
|
||||
check.toJSON = function () { return 'multisig input' }
|
||||
|
||||
var EMPTY_BUFFER = Buffer.allocUnsafe(0)
|
||||
|
||||
function encodeStack (signatures, scriptPubKey) {
|
||||
typeforce([partialSignature], signatures)
|
||||
|
||||
if (scriptPubKey) {
|
||||
var scriptData = p2mso.decode(scriptPubKey)
|
||||
|
||||
if (signatures.length < scriptData.m) {
|
||||
throw new TypeError('Not enough signatures provided')
|
||||
}
|
||||
|
||||
if (signatures.length > scriptData.pubKeys.length) {
|
||||
throw new TypeError('Too many signatures provided')
|
||||
}
|
||||
}
|
||||
|
||||
return [].concat(EMPTY_BUFFER, signatures.map(function (sig) {
|
||||
if (sig === OPS.OP_0) {
|
||||
return EMPTY_BUFFER
|
||||
}
|
||||
return sig
|
||||
}))
|
||||
}
|
||||
|
||||
function encode (signatures, scriptPubKey) {
|
||||
return bscript.compile(encodeStack(signatures, scriptPubKey))
|
||||
}
|
||||
|
||||
function decodeStack (stack, allowIncomplete) {
|
||||
typeforce(typeforce.Array, stack)
|
||||
typeforce(check, stack, allowIncomplete)
|
||||
return stack.slice(1)
|
||||
}
|
||||
|
||||
function decode (buffer, allowIncomplete) {
|
||||
var stack = bscript.decompile(buffer)
|
||||
return decodeStack(stack, allowIncomplete)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check: check,
|
||||
decode: decode,
|
||||
decodeStack: decodeStack,
|
||||
encode: encode,
|
||||
encodeStack: encodeStack
|
||||
}
|
64
src/templates/multisig/output.js
Normal file
64
src/templates/multisig/output.js
Normal file
|
@ -0,0 +1,64 @@
|
|||
// m [pubKeys ...] n OP_CHECKMULTISIG
|
||||
|
||||
var bscript = require('../../script')
|
||||
var types = require('../../types')
|
||||
var typeforce = require('typeforce')
|
||||
var OPS = require('bitcoin-ops')
|
||||
var OP_INT_BASE = OPS.OP_RESERVED // OP_1 - 1
|
||||
|
||||
function check (script, allowIncomplete) {
|
||||
var chunks = bscript.decompile(script)
|
||||
|
||||
if (chunks.length < 4) return false
|
||||
if (chunks[chunks.length - 1] !== OPS.OP_CHECKMULTISIG) return false
|
||||
if (!types.Number(chunks[0])) return false
|
||||
if (!types.Number(chunks[chunks.length - 2])) return false
|
||||
var m = chunks[0] - OP_INT_BASE
|
||||
var n = chunks[chunks.length - 2] - OP_INT_BASE
|
||||
|
||||
if (m <= 0) return false
|
||||
if (n > 16) return false
|
||||
if (m > n) return false
|
||||
if (n !== chunks.length - 3) return false
|
||||
if (allowIncomplete) return true
|
||||
|
||||
var keys = chunks.slice(1, -2)
|
||||
return keys.every(bscript.isCanonicalPubKey)
|
||||
}
|
||||
check.toJSON = function () { return 'multi-sig output' }
|
||||
|
||||
function encode (m, pubKeys) {
|
||||
typeforce({
|
||||
m: types.Number,
|
||||
pubKeys: [bscript.isCanonicalPubKey]
|
||||
}, {
|
||||
m: m,
|
||||
pubKeys: pubKeys
|
||||
})
|
||||
|
||||
var n = pubKeys.length
|
||||
if (n < m) throw new TypeError('Not enough pubKeys provided')
|
||||
|
||||
return bscript.compile([].concat(
|
||||
OP_INT_BASE + m,
|
||||
pubKeys,
|
||||
OP_INT_BASE + n,
|
||||
OPS.OP_CHECKMULTISIG
|
||||
))
|
||||
}
|
||||
|
||||
function decode (buffer, allowIncomplete) {
|
||||
var chunks = bscript.decompile(buffer)
|
||||
typeforce(check, chunks, allowIncomplete)
|
||||
|
||||
return {
|
||||
m: chunks[0] - OP_INT_BASE,
|
||||
pubKeys: chunks.slice(1, -2)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check: check,
|
||||
decode: decode,
|
||||
encode: encode
|
||||
}
|
34
src/templates/nulldata.js
Normal file
34
src/templates/nulldata.js
Normal file
|
@ -0,0 +1,34 @@
|
|||
// OP_RETURN {data}
|
||||
|
||||
var bscript = require('../script')
|
||||
var types = require('../types')
|
||||
var typeforce = require('typeforce')
|
||||
var OPS = require('bitcoin-ops')
|
||||
|
||||
function check (script) {
|
||||
var buffer = bscript.compile(script)
|
||||
|
||||
return buffer.length > 1 &&
|
||||
buffer[0] === OPS.OP_RETURN
|
||||
}
|
||||
check.toJSON = function () { return 'null data output' }
|
||||
|
||||
function encode (data) {
|
||||
typeforce(types.Buffer, data)
|
||||
|
||||
return bscript.compile([OPS.OP_RETURN, data])
|
||||
}
|
||||
|
||||
function decode (buffer) {
|
||||
typeforce(check, buffer)
|
||||
|
||||
return buffer.slice(2)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
output: {
|
||||
check: check,
|
||||
decode: decode,
|
||||
encode: encode
|
||||
}
|
||||
}
|
4
src/templates/pubkey/index.js
Normal file
4
src/templates/pubkey/index.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
module.exports = {
|
||||
input: require('./input'),
|
||||
output: require('./output')
|
||||
}
|
40
src/templates/pubkey/input.js
Normal file
40
src/templates/pubkey/input.js
Normal file
|
@ -0,0 +1,40 @@
|
|||
// {signature}
|
||||
|
||||
var bscript = require('../../script')
|
||||
var typeforce = require('typeforce')
|
||||
|
||||
function check (script) {
|
||||
var chunks = bscript.decompile(script)
|
||||
|
||||
return chunks.length === 1 &&
|
||||
bscript.isCanonicalSignature(chunks[0])
|
||||
}
|
||||
check.toJSON = function () { return 'pubKey input' }
|
||||
|
||||
function encodeStack (signature) {
|
||||
typeforce(bscript.isCanonicalSignature, signature)
|
||||
return [signature]
|
||||
}
|
||||
|
||||
function encode (signature) {
|
||||
return bscript.compile(encodeStack(signature))
|
||||
}
|
||||
|
||||
function decodeStack (stack) {
|
||||
typeforce(typeforce.Array, stack)
|
||||
typeforce(check, stack)
|
||||
return stack[0]
|
||||
}
|
||||
|
||||
function decode (buffer) {
|
||||
var stack = bscript.decompile(buffer)
|
||||
return decodeStack(stack)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check: check,
|
||||
decode: decode,
|
||||
decodeStack: decodeStack,
|
||||
encode: encode,
|
||||
encodeStack: encodeStack
|
||||
}
|
33
src/templates/pubkey/output.js
Normal file
33
src/templates/pubkey/output.js
Normal file
|
@ -0,0 +1,33 @@
|
|||
// {pubKey} OP_CHECKSIG
|
||||
|
||||
var bscript = require('../../script')
|
||||
var typeforce = require('typeforce')
|
||||
var OPS = require('bitcoin-ops')
|
||||
|
||||
function check (script) {
|
||||
var chunks = bscript.decompile(script)
|
||||
|
||||
return chunks.length === 2 &&
|
||||
bscript.isCanonicalPubKey(chunks[0]) &&
|
||||
chunks[1] === OPS.OP_CHECKSIG
|
||||
}
|
||||
check.toJSON = function () { return 'pubKey output' }
|
||||
|
||||
function encode (pubKey) {
|
||||
typeforce(bscript.isCanonicalPubKey, pubKey)
|
||||
|
||||
return bscript.compile([pubKey, OPS.OP_CHECKSIG])
|
||||
}
|
||||
|
||||
function decode (buffer) {
|
||||
var chunks = bscript.decompile(buffer)
|
||||
typeforce(check, chunks)
|
||||
|
||||
return chunks[0]
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check: check,
|
||||
decode: decode,
|
||||
encode: encode
|
||||
}
|
4
src/templates/pubkeyhash/index.js
Normal file
4
src/templates/pubkeyhash/index.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
module.exports = {
|
||||
input: require('./input'),
|
||||
output: require('./output')
|
||||
}
|
52
src/templates/pubkeyhash/input.js
Normal file
52
src/templates/pubkeyhash/input.js
Normal file
|
@ -0,0 +1,52 @@
|
|||
// {signature} {pubKey}
|
||||
|
||||
var bscript = require('../../script')
|
||||
var typeforce = require('typeforce')
|
||||
|
||||
function check (script) {
|
||||
var chunks = bscript.decompile(script)
|
||||
|
||||
return chunks.length === 2 &&
|
||||
bscript.isCanonicalSignature(chunks[0]) &&
|
||||
bscript.isCanonicalPubKey(chunks[1])
|
||||
}
|
||||
check.toJSON = function () { return 'pubKeyHash input' }
|
||||
|
||||
function encodeStack (signature, pubKey) {
|
||||
typeforce({
|
||||
signature: bscript.isCanonicalSignature,
|
||||
pubKey: bscript.isCanonicalPubKey
|
||||
}, {
|
||||
signature: signature,
|
||||
pubKey: pubKey
|
||||
})
|
||||
|
||||
return [signature, pubKey]
|
||||
}
|
||||
|
||||
function encode (signature, pubKey) {
|
||||
return bscript.compile(encodeStack(signature, pubKey))
|
||||
}
|
||||
|
||||
function decodeStack (stack) {
|
||||
typeforce(typeforce.Array, stack)
|
||||
typeforce(check, stack)
|
||||
|
||||
return {
|
||||
signature: stack[0],
|
||||
pubKey: stack[1]
|
||||
}
|
||||
}
|
||||
|
||||
function decode (buffer) {
|
||||
var stack = bscript.decompile(buffer)
|
||||
return decodeStack(stack)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check: check,
|
||||
decode: decode,
|
||||
decodeStack: decodeStack,
|
||||
encode: encode,
|
||||
encodeStack: encodeStack
|
||||
}
|
42
src/templates/pubkeyhash/output.js
Normal file
42
src/templates/pubkeyhash/output.js
Normal file
|
@ -0,0 +1,42 @@
|
|||
// OP_DUP OP_HASH160 {pubKeyHash} OP_EQUALVERIFY OP_CHECKSIG
|
||||
|
||||
var bscript = require('../../script')
|
||||
var types = require('../../types')
|
||||
var typeforce = require('typeforce')
|
||||
var OPS = require('bitcoin-ops')
|
||||
|
||||
function check (script) {
|
||||
var buffer = bscript.compile(script)
|
||||
|
||||
return buffer.length === 25 &&
|
||||
buffer[0] === OPS.OP_DUP &&
|
||||
buffer[1] === OPS.OP_HASH160 &&
|
||||
buffer[2] === 0x14 &&
|
||||
buffer[23] === OPS.OP_EQUALVERIFY &&
|
||||
buffer[24] === OPS.OP_CHECKSIG
|
||||
}
|
||||
check.toJSON = function () { return 'pubKeyHash output' }
|
||||
|
||||
function encode (pubKeyHash) {
|
||||
typeforce(types.Hash160bit, pubKeyHash)
|
||||
|
||||
return bscript.compile([
|
||||
OPS.OP_DUP,
|
||||
OPS.OP_HASH160,
|
||||
pubKeyHash,
|
||||
OPS.OP_EQUALVERIFY,
|
||||
OPS.OP_CHECKSIG
|
||||
])
|
||||
}
|
||||
|
||||
function decode (buffer) {
|
||||
typeforce(check, buffer)
|
||||
|
||||
return buffer.slice(3, 23)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check: check,
|
||||
decode: decode,
|
||||
encode: encode
|
||||
}
|
4
src/templates/scripthash/index.js
Normal file
4
src/templates/scripthash/index.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
module.exports = {
|
||||
input: require('./input'),
|
||||
output: require('./output')
|
||||
}
|
85
src/templates/scripthash/input.js
Normal file
85
src/templates/scripthash/input.js
Normal file
|
@ -0,0 +1,85 @@
|
|||
// <scriptSig> {serialized scriptPubKey script}
|
||||
|
||||
var Buffer = require('safe-buffer').Buffer
|
||||
var bscript = require('../../script')
|
||||
var typeforce = require('typeforce')
|
||||
|
||||
var p2ms = require('../multisig/')
|
||||
var p2pk = require('../pubkey/')
|
||||
var p2pkh = require('../pubkeyhash/')
|
||||
var p2wpkho = require('../witnesspubkeyhash/output')
|
||||
var p2wsho = require('../witnessscripthash/output')
|
||||
|
||||
function check (script, allowIncomplete) {
|
||||
var chunks = bscript.decompile(script)
|
||||
if (chunks.length < 1) return false
|
||||
|
||||
var lastChunk = chunks[chunks.length - 1]
|
||||
if (!Buffer.isBuffer(lastChunk)) return false
|
||||
|
||||
var scriptSigChunks = bscript.decompile(bscript.compile(chunks.slice(0, -1)))
|
||||
var redeemScriptChunks = bscript.decompile(lastChunk)
|
||||
|
||||
// is redeemScript a valid script?
|
||||
if (redeemScriptChunks.length === 0) return false
|
||||
|
||||
// is redeemScriptSig push only?
|
||||
if (!bscript.isPushOnly(scriptSigChunks)) return false
|
||||
|
||||
// is witness?
|
||||
if (chunks.length === 1) {
|
||||
return p2wsho.check(redeemScriptChunks) ||
|
||||
p2wpkho.check(redeemScriptChunks)
|
||||
}
|
||||
|
||||
// match types
|
||||
if (p2pkh.input.check(scriptSigChunks) &&
|
||||
p2pkh.output.check(redeemScriptChunks)) return true
|
||||
|
||||
if (p2ms.input.check(scriptSigChunks, allowIncomplete) &&
|
||||
p2ms.output.check(redeemScriptChunks)) return true
|
||||
|
||||
if (p2pk.input.check(scriptSigChunks) &&
|
||||
p2pk.output.check(redeemScriptChunks)) return true
|
||||
|
||||
return false
|
||||
}
|
||||
check.toJSON = function () { return 'scriptHash input' }
|
||||
|
||||
function encodeStack (redeemScriptStack, redeemScript) {
|
||||
var serializedScriptPubKey = bscript.compile(redeemScript)
|
||||
|
||||
return [].concat(redeemScriptStack, serializedScriptPubKey)
|
||||
}
|
||||
|
||||
function encode (redeemScriptSig, redeemScript) {
|
||||
var redeemScriptStack = bscript.decompile(redeemScriptSig)
|
||||
|
||||
return bscript.compile(encodeStack(redeemScriptStack, redeemScript))
|
||||
}
|
||||
|
||||
function decodeStack (stack) {
|
||||
typeforce(typeforce.Array, stack)
|
||||
typeforce(check, stack)
|
||||
|
||||
return {
|
||||
redeemScriptStack: stack.slice(0, -1),
|
||||
redeemScript: stack[stack.length - 1]
|
||||
}
|
||||
}
|
||||
|
||||
function decode (buffer) {
|
||||
var stack = bscript.decompile(buffer)
|
||||
var result = decodeStack(stack)
|
||||
result.redeemScriptSig = bscript.compile(result.redeemScriptStack)
|
||||
delete result.redeemScriptStack
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check: check,
|
||||
decode: decode,
|
||||
decodeStack: decodeStack,
|
||||
encode: encode,
|
||||
encodeStack: encodeStack
|
||||
}
|
34
src/templates/scripthash/output.js
Normal file
34
src/templates/scripthash/output.js
Normal file
|
@ -0,0 +1,34 @@
|
|||
// OP_HASH160 {scriptHash} OP_EQUAL
|
||||
|
||||
var bscript = require('../../script')
|
||||
var types = require('../../types')
|
||||
var typeforce = require('typeforce')
|
||||
var OPS = require('bitcoin-ops')
|
||||
|
||||
function check (script) {
|
||||
var buffer = bscript.compile(script)
|
||||
|
||||
return buffer.length === 23 &&
|
||||
buffer[0] === OPS.OP_HASH160 &&
|
||||
buffer[1] === 0x14 &&
|
||||
buffer[22] === OPS.OP_EQUAL
|
||||
}
|
||||
check.toJSON = function () { return 'scriptHash output' }
|
||||
|
||||
function encode (scriptHash) {
|
||||
typeforce(types.Hash160bit, scriptHash)
|
||||
|
||||
return bscript.compile([OPS.OP_HASH160, scriptHash, OPS.OP_EQUAL])
|
||||
}
|
||||
|
||||
function decode (buffer) {
|
||||
typeforce(check, buffer)
|
||||
|
||||
return buffer.slice(2, 22)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check: check,
|
||||
decode: decode,
|
||||
encode: encode
|
||||
}
|
3
src/templates/witnesscommitment/index.js
Normal file
3
src/templates/witnesscommitment/index.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
module.exports = {
|
||||
output: require('./output')
|
||||
}
|
42
src/templates/witnesscommitment/output.js
Normal file
42
src/templates/witnesscommitment/output.js
Normal file
|
@ -0,0 +1,42 @@
|
|||
// OP_RETURN {aa21a9ed} {commitment}
|
||||
|
||||
var Buffer = require('safe-buffer').Buffer
|
||||
var bscript = require('../../script')
|
||||
var types = require('../../types')
|
||||
var typeforce = require('typeforce')
|
||||
var OPS = require('bitcoin-ops')
|
||||
|
||||
var HEADER = Buffer.from('aa21a9ed', 'hex')
|
||||
|
||||
function check (script) {
|
||||
var buffer = bscript.compile(script)
|
||||
|
||||
return buffer.length > 37 &&
|
||||
buffer[0] === OPS.OP_RETURN &&
|
||||
buffer[1] === 0x24 &&
|
||||
buffer.slice(2, 6).equals(HEADER)
|
||||
}
|
||||
|
||||
check.toJSON = function () { return 'Witness commitment output' }
|
||||
|
||||
function encode (commitment) {
|
||||
typeforce(types.Hash256bit, commitment)
|
||||
|
||||
var buffer = Buffer.allocUnsafe(36)
|
||||
HEADER.copy(buffer, 0)
|
||||
commitment.copy(buffer, 4)
|
||||
|
||||
return bscript.compile([OPS.OP_RETURN, buffer])
|
||||
}
|
||||
|
||||
function decode (buffer) {
|
||||
typeforce(check, buffer)
|
||||
|
||||
return bscript.decompile(buffer)[1].slice(4, 36)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check: check,
|
||||
decode: decode,
|
||||
encode: encode
|
||||
}
|
4
src/templates/witnesspubkeyhash/index.js
Normal file
4
src/templates/witnesspubkeyhash/index.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
module.exports = {
|
||||
input: require('./input'),
|
||||
output: require('./output')
|
||||
}
|
45
src/templates/witnesspubkeyhash/input.js
Normal file
45
src/templates/witnesspubkeyhash/input.js
Normal file
|
@ -0,0 +1,45 @@
|
|||
// {signature} {pubKey}
|
||||
|
||||
var bscript = require('../../script')
|
||||
var typeforce = require('typeforce')
|
||||
|
||||
function isCompressedCanonicalPubKey (pubKey) {
|
||||
return bscript.isCanonicalPubKey(pubKey) && pubKey.length === 33
|
||||
}
|
||||
|
||||
function check (script) {
|
||||
var chunks = bscript.decompile(script)
|
||||
|
||||
return chunks.length === 2 &&
|
||||
bscript.isCanonicalSignature(chunks[0]) &&
|
||||
isCompressedCanonicalPubKey(chunks[1])
|
||||
}
|
||||
check.toJSON = function () { return 'witnessPubKeyHash input' }
|
||||
|
||||
function encodeStack (signature, pubKey) {
|
||||
typeforce({
|
||||
signature: bscript.isCanonicalSignature,
|
||||
pubKey: isCompressedCanonicalPubKey
|
||||
}, {
|
||||
signature: signature,
|
||||
pubKey: pubKey
|
||||
})
|
||||
|
||||
return [signature, pubKey]
|
||||
}
|
||||
|
||||
function decodeStack (stack) {
|
||||
typeforce(typeforce.Array, stack)
|
||||
typeforce(check, stack)
|
||||
|
||||
return {
|
||||
signature: stack[0],
|
||||
pubKey: stack[1]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check: check,
|
||||
decodeStack: decodeStack,
|
||||
encodeStack: encodeStack
|
||||
}
|
33
src/templates/witnesspubkeyhash/output.js
Normal file
33
src/templates/witnesspubkeyhash/output.js
Normal file
|
@ -0,0 +1,33 @@
|
|||
// OP_0 {pubKeyHash}
|
||||
|
||||
var bscript = require('../../script')
|
||||
var types = require('../../types')
|
||||
var typeforce = require('typeforce')
|
||||
var OPS = require('bitcoin-ops')
|
||||
|
||||
function check (script) {
|
||||
var buffer = bscript.compile(script)
|
||||
|
||||
return buffer.length === 22 &&
|
||||
buffer[0] === OPS.OP_0 &&
|
||||
buffer[1] === 0x14
|
||||
}
|
||||
check.toJSON = function () { return 'Witness pubKeyHash output' }
|
||||
|
||||
function encode (pubKeyHash) {
|
||||
typeforce(types.Hash160bit, pubKeyHash)
|
||||
|
||||
return bscript.compile([OPS.OP_0, pubKeyHash])
|
||||
}
|
||||
|
||||
function decode (buffer) {
|
||||
typeforce(check, buffer)
|
||||
|
||||
return buffer.slice(2)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check: check,
|
||||
decode: decode,
|
||||
encode: encode
|
||||
}
|
4
src/templates/witnessscripthash/index.js
Normal file
4
src/templates/witnessscripthash/index.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
module.exports = {
|
||||
input: require('./input'),
|
||||
output: require('./output')
|
||||
}
|
64
src/templates/witnessscripthash/input.js
Normal file
64
src/templates/witnessscripthash/input.js
Normal file
|
@ -0,0 +1,64 @@
|
|||
// <scriptSig> {serialized scriptPubKey script}
|
||||
|
||||
var bscript = require('../../script')
|
||||
var types = require('../../types')
|
||||
var typeforce = require('typeforce')
|
||||
|
||||
var p2ms = require('../multisig/')
|
||||
var p2pk = require('../pubkey/')
|
||||
var p2pkh = require('../pubkeyhash/')
|
||||
|
||||
function check (chunks, allowIncomplete) {
|
||||
typeforce(types.Array, chunks)
|
||||
if (chunks.length < 1) return false
|
||||
|
||||
var witnessScript = chunks[chunks.length - 1]
|
||||
if (!Buffer.isBuffer(witnessScript)) return false
|
||||
|
||||
var witnessScriptChunks = bscript.decompile(witnessScript)
|
||||
|
||||
// is witnessScript a valid script?
|
||||
if (witnessScriptChunks.length === 0) return false
|
||||
|
||||
var witnessRawScriptSig = bscript.compile(chunks.slice(0, -1))
|
||||
|
||||
// match types
|
||||
if (p2pkh.input.check(witnessRawScriptSig) &&
|
||||
p2pkh.output.check(witnessScriptChunks)) return true
|
||||
|
||||
if (p2ms.input.check(witnessRawScriptSig, allowIncomplete) &&
|
||||
p2ms.output.check(witnessScriptChunks)) return true
|
||||
|
||||
if (p2pk.input.check(witnessRawScriptSig) &&
|
||||
p2pk.output.check(witnessScriptChunks)) return true
|
||||
|
||||
return false
|
||||
}
|
||||
check.toJSON = function () { return 'witnessScriptHash input' }
|
||||
|
||||
function encodeStack (witnessData, witnessScript) {
|
||||
typeforce({
|
||||
witnessData: [types.Buffer],
|
||||
witnessScript: types.Buffer
|
||||
}, {
|
||||
witnessData: witnessData,
|
||||
witnessScript: witnessScript
|
||||
})
|
||||
|
||||
return [].concat(witnessData, witnessScript)
|
||||
}
|
||||
|
||||
function decodeStack (stack) {
|
||||
typeforce(typeforce.Array, stack)
|
||||
typeforce(check, stack)
|
||||
return {
|
||||
witnessData: stack.slice(0, -1),
|
||||
witnessScript: stack[stack.length - 1]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check: check,
|
||||
decodeStack: decodeStack,
|
||||
encodeStack: encodeStack
|
||||
}
|
33
src/templates/witnessscripthash/output.js
Normal file
33
src/templates/witnessscripthash/output.js
Normal file
|
@ -0,0 +1,33 @@
|
|||
// OP_0 {scriptHash}
|
||||
|
||||
var bscript = require('../../script')
|
||||
var types = require('../../types')
|
||||
var typeforce = require('typeforce')
|
||||
var OPS = require('bitcoin-ops')
|
||||
|
||||
function check (script) {
|
||||
var buffer = bscript.compile(script)
|
||||
|
||||
return buffer.length === 34 &&
|
||||
buffer[0] === OPS.OP_0 &&
|
||||
buffer[1] === 0x20
|
||||
}
|
||||
check.toJSON = function () { return 'Witness scriptHash output' }
|
||||
|
||||
function encode (scriptHash) {
|
||||
typeforce(types.Hash256bit, scriptHash)
|
||||
|
||||
return bscript.compile([OPS.OP_0, scriptHash])
|
||||
}
|
||||
|
||||
function decode (buffer) {
|
||||
typeforce(check, buffer)
|
||||
|
||||
return buffer.slice(2)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check: check,
|
||||
decode: decode,
|
||||
encode: encode
|
||||
}
|
57
src/transaction.d.ts
vendored
57
src/transaction.d.ts
vendored
|
@ -1,57 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
export interface Output {
|
||||
script: Buffer;
|
||||
value: number;
|
||||
}
|
||||
export interface Input {
|
||||
hash: Buffer;
|
||||
index: number;
|
||||
script: Buffer;
|
||||
sequence: number;
|
||||
witness: Buffer[];
|
||||
}
|
||||
export declare class Transaction {
|
||||
static readonly DEFAULT_SEQUENCE = 4294967295;
|
||||
static readonly SIGHASH_DEFAULT = 0;
|
||||
static readonly SIGHASH_ALL = 1;
|
||||
static readonly SIGHASH_NONE = 2;
|
||||
static readonly SIGHASH_SINGLE = 3;
|
||||
static readonly SIGHASH_ANYONECANPAY = 128;
|
||||
static readonly SIGHASH_OUTPUT_MASK = 3;
|
||||
static readonly SIGHASH_INPUT_MASK = 128;
|
||||
static readonly ADVANCED_TRANSACTION_MARKER = 0;
|
||||
static readonly ADVANCED_TRANSACTION_FLAG = 1;
|
||||
static fromBuffer(buffer: Buffer, _NO_STRICT?: boolean): Transaction;
|
||||
static fromHex(hex: string): Transaction;
|
||||
static isCoinbaseHash(buffer: Buffer): boolean;
|
||||
version: number;
|
||||
locktime: number;
|
||||
ins: Input[];
|
||||
outs: Output[];
|
||||
isCoinbase(): boolean;
|
||||
addInput(hash: Buffer, index: number, sequence?: number, scriptSig?: Buffer): number;
|
||||
addOutput(scriptPubKey: Buffer, value: number): number;
|
||||
hasWitnesses(): boolean;
|
||||
weight(): number;
|
||||
virtualSize(): number;
|
||||
byteLength(_ALLOW_WITNESS?: boolean): number;
|
||||
clone(): Transaction;
|
||||
/**
|
||||
* Hash transaction for signing a specific input.
|
||||
*
|
||||
* Bitcoin uses a different hash for each signed transaction input.
|
||||
* This method copies the transaction, makes the necessary changes based on the
|
||||
* hashType, and then hashes the result.
|
||||
* This hash can then be used to sign the provided transaction input.
|
||||
*/
|
||||
hashForSignature(inIndex: number, prevOutScript: Buffer, hashType: number): Buffer;
|
||||
hashForWitnessV1(inIndex: number, prevOutScripts: Buffer[], values: number[], hashType: number, leafHash?: Buffer, annex?: Buffer): Buffer;
|
||||
hashForWitnessV0(inIndex: number, prevOutScript: Buffer, value: number, hashType: number): Buffer;
|
||||
getHash(forWitness?: boolean): Buffer;
|
||||
getId(): string;
|
||||
toBuffer(buffer?: Buffer, initialOffset?: number): Buffer;
|
||||
toHex(): string;
|
||||
setInputScript(index: number, scriptSig: Buffer): void;
|
||||
setWitness(index: number, witness: Buffer[]): void;
|
||||
private __toBuffer;
|
||||
}
|
1022
src/transaction.js
1022
src/transaction.js
File diff suppressed because it is too large
Load diff
784
src/transaction_builder.js
Normal file
784
src/transaction_builder.js
Normal file
|
@ -0,0 +1,784 @@
|
|||
var Buffer = require('safe-buffer').Buffer
|
||||
var baddress = require('./address')
|
||||
var bcrypto = require('./crypto')
|
||||
var bscript = require('./script')
|
||||
var btemplates = require('./templates')
|
||||
var networks = require('./networks')
|
||||
var ops = require('bitcoin-ops')
|
||||
var typeforce = require('typeforce')
|
||||
var types = require('./types')
|
||||
var scriptTypes = btemplates.types
|
||||
var SIGNABLE = [btemplates.types.P2PKH, btemplates.types.P2PK, btemplates.types.MULTISIG]
|
||||
var P2SH = SIGNABLE.concat([btemplates.types.P2WPKH, btemplates.types.P2WSH])
|
||||
|
||||
var ECPair = require('./ecpair')
|
||||
var ECSignature = require('./ecsignature')
|
||||
var Transaction = require('./transaction')
|
||||
|
||||
function supportedType (type) {
|
||||
return SIGNABLE.indexOf(type) !== -1
|
||||
}
|
||||
|
||||
function supportedP2SHType (type) {
|
||||
return P2SH.indexOf(type) !== -1
|
||||
}
|
||||
|
||||
function extractChunks (type, chunks, script) {
|
||||
var pubKeys = []
|
||||
var signatures = []
|
||||
switch (type) {
|
||||
case scriptTypes.P2PKH:
|
||||
// if (redeemScript) throw new Error('Nonstandard... P2SH(P2PKH)')
|
||||
pubKeys = chunks.slice(1)
|
||||
signatures = chunks.slice(0, 1)
|
||||
break
|
||||
|
||||
case scriptTypes.P2PK:
|
||||
pubKeys[0] = script ? btemplates.pubKey.output.decode(script) : undefined
|
||||
signatures = chunks.slice(0, 1)
|
||||
break
|
||||
|
||||
case scriptTypes.MULTISIG:
|
||||
if (script) {
|
||||
var multisig = btemplates.multisig.output.decode(script)
|
||||
pubKeys = multisig.pubKeys
|
||||
}
|
||||
|
||||
signatures = chunks.slice(1).map(function (chunk) {
|
||||
return chunk.length === 0 ? undefined : chunk
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
return {
|
||||
pubKeys: pubKeys,
|
||||
signatures: signatures
|
||||
}
|
||||
}
|
||||
function expandInput (scriptSig, witnessStack) {
|
||||
if (scriptSig.length === 0 && witnessStack.length === 0) return {}
|
||||
|
||||
var prevOutScript
|
||||
var prevOutType
|
||||
var scriptType
|
||||
var script
|
||||
var redeemScript
|
||||
var witnessScript
|
||||
var witnessScriptType
|
||||
var redeemScriptType
|
||||
var witness = false
|
||||
var p2wsh = false
|
||||
var p2sh = false
|
||||
var witnessProgram
|
||||
var chunks
|
||||
|
||||
var scriptSigChunks = bscript.decompile(scriptSig)
|
||||
var sigType = btemplates.classifyInput(scriptSigChunks, true)
|
||||
if (sigType === scriptTypes.P2SH) {
|
||||
p2sh = true
|
||||
redeemScript = scriptSigChunks[scriptSigChunks.length - 1]
|
||||
redeemScriptType = btemplates.classifyOutput(redeemScript)
|
||||
prevOutScript = btemplates.scriptHash.output.encode(bcrypto.hash160(redeemScript))
|
||||
prevOutType = scriptTypes.P2SH
|
||||
script = redeemScript
|
||||
}
|
||||
|
||||
var classifyWitness = btemplates.classifyWitness(witnessStack, true)
|
||||
if (classifyWitness === scriptTypes.P2WSH) {
|
||||
witnessScript = witnessStack[witnessStack.length - 1]
|
||||
witnessScriptType = btemplates.classifyOutput(witnessScript)
|
||||
p2wsh = true
|
||||
witness = true
|
||||
if (scriptSig.length === 0) {
|
||||
prevOutScript = btemplates.witnessScriptHash.output.encode(bcrypto.sha256(witnessScript))
|
||||
prevOutType = scriptTypes.P2WSH
|
||||
if (redeemScript !== undefined) {
|
||||
throw new Error('Redeem script given when unnecessary')
|
||||
}
|
||||
// bare witness
|
||||
} else {
|
||||
if (!redeemScript) {
|
||||
throw new Error('No redeemScript provided for P2WSH, but scriptSig non-empty')
|
||||
}
|
||||
witnessProgram = btemplates.witnessScriptHash.output.encode(bcrypto.sha256(witnessScript))
|
||||
if (!redeemScript.equals(witnessProgram)) {
|
||||
throw new Error('Redeem script didn\'t match witnessScript')
|
||||
}
|
||||
}
|
||||
|
||||
if (!supportedType(btemplates.classifyOutput(witnessScript))) {
|
||||
throw new Error('unsupported witness script')
|
||||
}
|
||||
|
||||
script = witnessScript
|
||||
scriptType = witnessScriptType
|
||||
chunks = witnessStack.slice(0, -1)
|
||||
} else if (classifyWitness === scriptTypes.P2WPKH) {
|
||||
witness = true
|
||||
var key = witnessStack[witnessStack.length - 1]
|
||||
var keyHash = bcrypto.hash160(key)
|
||||
if (scriptSig.length === 0) {
|
||||
prevOutScript = btemplates.witnessPubKeyHash.output.encode(keyHash)
|
||||
prevOutType = scriptTypes.P2WPKH
|
||||
if (typeof redeemScript !== 'undefined') {
|
||||
throw new Error('Redeem script given when unnecessary')
|
||||
}
|
||||
} else {
|
||||
if (!redeemScript) {
|
||||
throw new Error('No redeemScript provided for P2WPKH, but scriptSig wasn\'t empty')
|
||||
}
|
||||
witnessProgram = btemplates.witnessPubKeyHash.output.encode(keyHash)
|
||||
if (!redeemScript.equals(witnessProgram)) {
|
||||
throw new Error('Redeem script did not have the right witness program')
|
||||
}
|
||||
}
|
||||
|
||||
scriptType = scriptTypes.P2PKH
|
||||
chunks = witnessStack
|
||||
} else if (redeemScript) {
|
||||
if (!supportedP2SHType(redeemScriptType)) {
|
||||
throw new Error('Bad redeemscript!')
|
||||
}
|
||||
|
||||
script = redeemScript
|
||||
scriptType = redeemScriptType
|
||||
chunks = scriptSigChunks.slice(0, -1)
|
||||
} else {
|
||||
prevOutType = scriptType = btemplates.classifyInput(scriptSig)
|
||||
chunks = scriptSigChunks
|
||||
}
|
||||
|
||||
var expanded = extractChunks(scriptType, chunks, script)
|
||||
|
||||
var result = {
|
||||
pubKeys: expanded.pubKeys,
|
||||
signatures: expanded.signatures,
|
||||
prevOutScript: prevOutScript,
|
||||
prevOutType: prevOutType,
|
||||
signType: scriptType,
|
||||
signScript: script,
|
||||
witness: Boolean(witness)
|
||||
}
|
||||
|
||||
if (p2sh) {
|
||||
result.redeemScript = redeemScript
|
||||
result.redeemScriptType = redeemScriptType
|
||||
}
|
||||
|
||||
if (p2wsh) {
|
||||
result.witnessScript = witnessScript
|
||||
result.witnessScriptType = witnessScriptType
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// could be done in expandInput, but requires the original Transaction for hashForSignature
|
||||
function fixMultisigOrder (input, transaction, vin) {
|
||||
if (input.redeemScriptType !== scriptTypes.MULTISIG || !input.redeemScript) return
|
||||
if (input.pubKeys.length === input.signatures.length) return
|
||||
|
||||
var unmatched = input.signatures.concat()
|
||||
|
||||
input.signatures = input.pubKeys.map(function (pubKey) {
|
||||
var keyPair = ECPair.fromPublicKeyBuffer(pubKey)
|
||||
var match
|
||||
|
||||
// check for a signature
|
||||
unmatched.some(function (signature, i) {
|
||||
// skip if undefined || OP_0
|
||||
if (!signature) return false
|
||||
|
||||
// TODO: avoid O(n) hashForSignature
|
||||
var parsed = ECSignature.parseScriptSignature(signature)
|
||||
var hash = transaction.hashForSignature(vin, input.redeemScript, parsed.hashType)
|
||||
|
||||
// skip if signature does not match pubKey
|
||||
if (!keyPair.verify(hash, parsed.signature)) return false
|
||||
|
||||
// remove matched signature from unmatched
|
||||
unmatched[i] = undefined
|
||||
match = signature
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
return match
|
||||
})
|
||||
}
|
||||
|
||||
function expandOutput (script, scriptType, ourPubKey) {
|
||||
typeforce(types.Buffer, script)
|
||||
|
||||
var scriptChunks = bscript.decompile(script)
|
||||
if (!scriptType) {
|
||||
scriptType = btemplates.classifyOutput(script)
|
||||
}
|
||||
|
||||
var pubKeys = []
|
||||
|
||||
switch (scriptType) {
|
||||
// does our hash160(pubKey) match the output scripts?
|
||||
case scriptTypes.P2PKH:
|
||||
if (!ourPubKey) break
|
||||
|
||||
var pkh1 = scriptChunks[2]
|
||||
var pkh2 = bcrypto.hash160(ourPubKey)
|
||||
if (pkh1.equals(pkh2)) pubKeys = [ourPubKey]
|
||||
break
|
||||
|
||||
// does our hash160(pubKey) match the output scripts?
|
||||
case scriptTypes.P2WPKH:
|
||||
if (!ourPubKey) break
|
||||
|
||||
var wpkh1 = scriptChunks[1]
|
||||
var wpkh2 = bcrypto.hash160(ourPubKey)
|
||||
if (wpkh1.equals(wpkh2)) pubKeys = [ourPubKey]
|
||||
break
|
||||
|
||||
case scriptTypes.P2PK:
|
||||
pubKeys = scriptChunks.slice(0, 1)
|
||||
break
|
||||
|
||||
case scriptTypes.MULTISIG:
|
||||
pubKeys = scriptChunks.slice(1, -2)
|
||||
break
|
||||
|
||||
default: return { scriptType: scriptType }
|
||||
}
|
||||
|
||||
return {
|
||||
pubKeys: pubKeys,
|
||||
scriptType: scriptType,
|
||||
signatures: pubKeys.map(function () { return undefined })
|
||||
}
|
||||
}
|
||||
|
||||
function checkP2SHInput (input, redeemScriptHash) {
|
||||
if (input.prevOutType) {
|
||||
if (input.prevOutType !== scriptTypes.P2SH) throw new Error('PrevOutScript must be P2SH')
|
||||
|
||||
var prevOutScriptScriptHash = bscript.decompile(input.prevOutScript)[1]
|
||||
if (!prevOutScriptScriptHash.equals(redeemScriptHash)) throw new Error('Inconsistent hash160(redeemScript)')
|
||||
}
|
||||
}
|
||||
|
||||
function checkP2WSHInput (input, witnessScriptHash) {
|
||||
if (input.prevOutType) {
|
||||
if (input.prevOutType !== scriptTypes.P2WSH) throw new Error('PrevOutScript must be P2WSH')
|
||||
|
||||
var scriptHash = bscript.decompile(input.prevOutScript)[1]
|
||||
if (!scriptHash.equals(witnessScriptHash)) throw new Error('Inconsistent sha256(witnessScript)')
|
||||
}
|
||||
}
|
||||
|
||||
function prepareInput (input, kpPubKey, redeemScript, witnessValue, witnessScript) {
|
||||
var expanded
|
||||
var prevOutType
|
||||
var prevOutScript
|
||||
|
||||
var p2sh = false
|
||||
var p2shType
|
||||
var redeemScriptHash
|
||||
|
||||
var witness = false
|
||||
var p2wsh = false
|
||||
var witnessType
|
||||
var witnessScriptHash
|
||||
|
||||
var signType
|
||||
var signScript
|
||||
|
||||
if (redeemScript && witnessScript) {
|
||||
redeemScriptHash = bcrypto.hash160(redeemScript)
|
||||
witnessScriptHash = bcrypto.sha256(witnessScript)
|
||||
checkP2SHInput(input, redeemScriptHash)
|
||||
|
||||
if (!redeemScript.equals(btemplates.witnessScriptHash.output.encode(witnessScriptHash))) throw new Error('Witness script inconsistent with redeem script')
|
||||
|
||||
expanded = expandOutput(witnessScript, undefined, kpPubKey)
|
||||
if (!expanded.pubKeys) throw new Error(expanded.scriptType + ' not supported as witnessScript (' + bscript.toASM(witnessScript) + ')')
|
||||
|
||||
prevOutType = btemplates.types.P2SH
|
||||
prevOutScript = btemplates.scriptHash.output.encode(redeemScriptHash)
|
||||
p2sh = witness = p2wsh = true
|
||||
p2shType = btemplates.types.P2WSH
|
||||
signType = witnessType = expanded.scriptType
|
||||
signScript = witnessScript
|
||||
} else if (redeemScript) {
|
||||
redeemScriptHash = bcrypto.hash160(redeemScript)
|
||||
checkP2SHInput(input, redeemScriptHash)
|
||||
|
||||
expanded = expandOutput(redeemScript, undefined, kpPubKey)
|
||||
if (!expanded.pubKeys) throw new Error(expanded.scriptType + ' not supported as redeemScript (' + bscript.toASM(redeemScript) + ')')
|
||||
|
||||
prevOutType = btemplates.types.P2SH
|
||||
prevOutScript = btemplates.scriptHash.output.encode(redeemScriptHash)
|
||||
p2sh = true
|
||||
signType = p2shType = expanded.scriptType
|
||||
signScript = redeemScript
|
||||
witness = signType === btemplates.types.P2WPKH
|
||||
} else if (witnessScript) {
|
||||
witnessScriptHash = bcrypto.sha256(witnessScript)
|
||||
checkP2WSHInput(input, witnessScriptHash)
|
||||
|
||||
expanded = expandOutput(witnessScript, undefined, kpPubKey)
|
||||
if (!expanded.pubKeys) throw new Error(expanded.scriptType + ' not supported as witnessScript (' + bscript.toASM(witnessScript) + ')')
|
||||
|
||||
prevOutType = btemplates.types.P2WSH
|
||||
prevOutScript = btemplates.witnessScriptHash.output.encode(witnessScriptHash)
|
||||
witness = p2wsh = true
|
||||
signType = witnessType = expanded.scriptType
|
||||
signScript = witnessScript
|
||||
} else if (input.prevOutType) {
|
||||
// embedded scripts are not possible without a redeemScript
|
||||
if (input.prevOutType === scriptTypes.P2SH) {
|
||||
throw new Error('PrevOutScript is ' + input.prevOutType + ', requires redeemScript')
|
||||
}
|
||||
|
||||
if (input.prevOutType === scriptTypes.P2WSH) {
|
||||
throw new Error('PrevOutScript is ' + input.prevOutType + ', requires witnessScript')
|
||||
}
|
||||
|
||||
prevOutType = input.prevOutType
|
||||
prevOutScript = input.prevOutScript
|
||||
expanded = expandOutput(input.prevOutScript, input.prevOutType, kpPubKey)
|
||||
if (!expanded.pubKeys) return
|
||||
|
||||
witness = (input.prevOutType === scriptTypes.P2WPKH)
|
||||
signType = prevOutType
|
||||
signScript = prevOutScript
|
||||
} else {
|
||||
prevOutScript = btemplates.pubKeyHash.output.encode(bcrypto.hash160(kpPubKey))
|
||||
expanded = expandOutput(prevOutScript, scriptTypes.P2PKH, kpPubKey)
|
||||
|
||||
prevOutType = scriptTypes.P2PKH
|
||||
witness = false
|
||||
signType = prevOutType
|
||||
signScript = prevOutScript
|
||||
}
|
||||
|
||||
if (signType === scriptTypes.P2WPKH) {
|
||||
signScript = btemplates.pubKeyHash.output.encode(btemplates.witnessPubKeyHash.output.decode(signScript))
|
||||
}
|
||||
|
||||
if (p2sh) {
|
||||
input.redeemScript = redeemScript
|
||||
input.redeemScriptType = p2shType
|
||||
}
|
||||
|
||||
if (p2wsh) {
|
||||
input.witnessScript = witnessScript
|
||||
input.witnessScriptType = witnessType
|
||||
}
|
||||
|
||||
input.pubKeys = expanded.pubKeys
|
||||
input.signatures = expanded.signatures
|
||||
input.signScript = signScript
|
||||
input.signType = signType
|
||||
input.prevOutScript = prevOutScript
|
||||
input.prevOutType = prevOutType
|
||||
input.witness = witness
|
||||
}
|
||||
|
||||
function buildStack (type, signatures, pubKeys, allowIncomplete) {
|
||||
if (type === scriptTypes.P2PKH) {
|
||||
if (signatures.length === 1 && Buffer.isBuffer(signatures[0]) && pubKeys.length === 1) return btemplates.pubKeyHash.input.encodeStack(signatures[0], pubKeys[0])
|
||||
} else if (type === scriptTypes.P2PK) {
|
||||
if (signatures.length === 1 && Buffer.isBuffer(signatures[0])) return btemplates.pubKey.input.encodeStack(signatures[0])
|
||||
} else if (type === scriptTypes.MULTISIG) {
|
||||
if (signatures.length > 0) {
|
||||
signatures = signatures.map(function (signature) {
|
||||
return signature || ops.OP_0
|
||||
})
|
||||
if (!allowIncomplete) {
|
||||
// remove blank signatures
|
||||
signatures = signatures.filter(function (x) { return x !== ops.OP_0 })
|
||||
}
|
||||
|
||||
return btemplates.multisig.input.encodeStack(signatures)
|
||||
}
|
||||
} else {
|
||||
throw new Error('Not yet supported')
|
||||
}
|
||||
|
||||
if (!allowIncomplete) throw new Error('Not enough signatures provided')
|
||||
return []
|
||||
}
|
||||
|
||||
function buildInput (input, allowIncomplete) {
|
||||
var scriptType = input.prevOutType
|
||||
var sig = []
|
||||
var witness = []
|
||||
|
||||
if (supportedType(scriptType)) {
|
||||
sig = buildStack(scriptType, input.signatures, input.pubKeys, allowIncomplete)
|
||||
}
|
||||
|
||||
var p2sh = false
|
||||
if (scriptType === btemplates.types.P2SH) {
|
||||
// We can remove this error later when we have a guarantee prepareInput
|
||||
// rejects unsignable scripts - it MUST be signable at this point.
|
||||
if (!allowIncomplete && !supportedP2SHType(input.redeemScriptType)) {
|
||||
throw new Error('Impossible to sign this type')
|
||||
}
|
||||
|
||||
if (supportedType(input.redeemScriptType)) {
|
||||
sig = buildStack(input.redeemScriptType, input.signatures, input.pubKeys, allowIncomplete)
|
||||
}
|
||||
|
||||
// If it wasn't SIGNABLE, it's witness, defer to that
|
||||
if (input.redeemScriptType) {
|
||||
p2sh = true
|
||||
scriptType = input.redeemScriptType
|
||||
}
|
||||
}
|
||||
|
||||
switch (scriptType) {
|
||||
// P2WPKH is a special case of P2PKH
|
||||
case btemplates.types.P2WPKH:
|
||||
witness = buildStack(btemplates.types.P2PKH, input.signatures, input.pubKeys, allowIncomplete)
|
||||
break
|
||||
|
||||
case btemplates.types.P2WSH:
|
||||
// We can remove this check later
|
||||
if (!allowIncomplete && !supportedType(input.witnessScriptType)) {
|
||||
throw new Error('Impossible to sign this type')
|
||||
}
|
||||
|
||||
if (supportedType(input.witnessScriptType)) {
|
||||
witness = buildStack(input.witnessScriptType, input.signatures, input.pubKeys, allowIncomplete)
|
||||
witness.push(input.witnessScript)
|
||||
scriptType = input.witnessScriptType
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
// append redeemScript if necessary
|
||||
if (p2sh) {
|
||||
sig.push(input.redeemScript)
|
||||
}
|
||||
|
||||
return {
|
||||
type: scriptType,
|
||||
script: bscript.compile(sig),
|
||||
witness: witness
|
||||
}
|
||||
}
|
||||
|
||||
function TransactionBuilder (network, maximumFeeRate) {
|
||||
this.prevTxMap = {}
|
||||
this.network = network || networks.bitcoin
|
||||
|
||||
// WARNING: This is __NOT__ to be relied on, its just another potential safety mechanism (safety in-depth)
|
||||
this.maximumFeeRate = maximumFeeRate || 2500
|
||||
|
||||
this.inputs = []
|
||||
this.tx = new Transaction()
|
||||
}
|
||||
|
||||
TransactionBuilder.prototype.setLockTime = function (locktime) {
|
||||
typeforce(types.UInt32, locktime)
|
||||
|
||||
// if any signatures exist, throw
|
||||
if (this.inputs.some(function (input) {
|
||||
if (!input.signatures) return false
|
||||
|
||||
return input.signatures.some(function (s) { return s })
|
||||
})) {
|
||||
throw new Error('No, this would invalidate signatures')
|
||||
}
|
||||
|
||||
this.tx.locktime = locktime
|
||||
}
|
||||
|
||||
TransactionBuilder.prototype.setVersion = function (version) {
|
||||
typeforce(types.UInt32, version)
|
||||
|
||||
// XXX: this might eventually become more complex depending on what the versions represent
|
||||
this.tx.version = version
|
||||
}
|
||||
|
||||
TransactionBuilder.fromTransaction = function (transaction, network) {
|
||||
var txb = new TransactionBuilder(network)
|
||||
|
||||
// Copy transaction fields
|
||||
txb.setVersion(transaction.version)
|
||||
txb.setLockTime(transaction.locktime)
|
||||
|
||||
// Copy outputs (done first to avoid signature invalidation)
|
||||
transaction.outs.forEach(function (txOut) {
|
||||
txb.addOutput(txOut.script, txOut.value)
|
||||
})
|
||||
|
||||
// Copy inputs
|
||||
transaction.ins.forEach(function (txIn) {
|
||||
txb.__addInputUnsafe(txIn.hash, txIn.index, {
|
||||
sequence: txIn.sequence,
|
||||
script: txIn.script,
|
||||
witness: txIn.witness
|
||||
})
|
||||
})
|
||||
|
||||
// fix some things not possible through the public API
|
||||
txb.inputs.forEach(function (input, i) {
|
||||
fixMultisigOrder(input, transaction, i)
|
||||
})
|
||||
|
||||
return txb
|
||||
}
|
||||
|
||||
TransactionBuilder.prototype.addInput = function (txHash, vout, sequence, prevOutScript) {
|
||||
if (!this.__canModifyInputs()) {
|
||||
throw new Error('No, this would invalidate signatures')
|
||||
}
|
||||
|
||||
var value
|
||||
|
||||
// is it a hex string?
|
||||
if (typeof txHash === 'string') {
|
||||
// transaction hashs's are displayed in reverse order, un-reverse it
|
||||
txHash = Buffer.from(txHash, 'hex').reverse()
|
||||
|
||||
// is it a Transaction object?
|
||||
} else if (txHash instanceof Transaction) {
|
||||
var txOut = txHash.outs[vout]
|
||||
prevOutScript = txOut.script
|
||||
value = txOut.value
|
||||
|
||||
txHash = txHash.getHash()
|
||||
}
|
||||
|
||||
return this.__addInputUnsafe(txHash, vout, {
|
||||
sequence: sequence,
|
||||
prevOutScript: prevOutScript,
|
||||
value: value
|
||||
})
|
||||
}
|
||||
|
||||
TransactionBuilder.prototype.__addInputUnsafe = function (txHash, vout, options) {
|
||||
if (Transaction.isCoinbaseHash(txHash)) {
|
||||
throw new Error('coinbase inputs not supported')
|
||||
}
|
||||
|
||||
var prevTxOut = txHash.toString('hex') + ':' + vout
|
||||
if (this.prevTxMap[prevTxOut] !== undefined) throw new Error('Duplicate TxOut: ' + prevTxOut)
|
||||
|
||||
var input = {}
|
||||
|
||||
// derive what we can from the scriptSig
|
||||
if (options.script !== undefined) {
|
||||
input = expandInput(options.script, options.witness || [])
|
||||
}
|
||||
|
||||
// if an input value was given, retain it
|
||||
if (options.value !== undefined) {
|
||||
input.value = options.value
|
||||
}
|
||||
|
||||
// derive what we can from the previous transactions output script
|
||||
if (!input.prevOutScript && options.prevOutScript) {
|
||||
var prevOutType
|
||||
|
||||
if (!input.pubKeys && !input.signatures) {
|
||||
var expanded = expandOutput(options.prevOutScript)
|
||||
|
||||
if (expanded.pubKeys) {
|
||||
input.pubKeys = expanded.pubKeys
|
||||
input.signatures = expanded.signatures
|
||||
}
|
||||
|
||||
prevOutType = expanded.scriptType
|
||||
}
|
||||
|
||||
input.prevOutScript = options.prevOutScript
|
||||
input.prevOutType = prevOutType || btemplates.classifyOutput(options.prevOutScript)
|
||||
}
|
||||
|
||||
var vin = this.tx.addInput(txHash, vout, options.sequence, options.scriptSig)
|
||||
this.inputs[vin] = input
|
||||
this.prevTxMap[prevTxOut] = vin
|
||||
return vin
|
||||
}
|
||||
|
||||
TransactionBuilder.prototype.addOutput = function (scriptPubKey, value) {
|
||||
if (!this.__canModifyOutputs()) {
|
||||
throw new Error('No, this would invalidate signatures')
|
||||
}
|
||||
|
||||
// Attempt to get a script if it's a base58 or bech32 address string
|
||||
if (typeof scriptPubKey === 'string') {
|
||||
scriptPubKey = baddress.toOutputScript(scriptPubKey, this.network)
|
||||
}
|
||||
|
||||
return this.tx.addOutput(scriptPubKey, value)
|
||||
}
|
||||
|
||||
TransactionBuilder.prototype.build = function () {
|
||||
return this.__build(false)
|
||||
}
|
||||
TransactionBuilder.prototype.buildIncomplete = function () {
|
||||
return this.__build(true)
|
||||
}
|
||||
|
||||
TransactionBuilder.prototype.__build = function (allowIncomplete) {
|
||||
if (!allowIncomplete) {
|
||||
if (!this.tx.ins.length) throw new Error('Transaction has no inputs')
|
||||
if (!this.tx.outs.length) throw new Error('Transaction has no outputs')
|
||||
}
|
||||
|
||||
var tx = this.tx.clone()
|
||||
// Create script signatures from inputs
|
||||
this.inputs.forEach(function (input, i) {
|
||||
var scriptType = input.witnessScriptType || input.redeemScriptType || input.prevOutType
|
||||
if (!scriptType && !allowIncomplete) throw new Error('Transaction is not complete')
|
||||
var result = buildInput(input, allowIncomplete)
|
||||
|
||||
// skip if no result
|
||||
if (!allowIncomplete) {
|
||||
if (!supportedType(result.type) && result.type !== btemplates.types.P2WPKH) {
|
||||
throw new Error(result.type + ' not supported')
|
||||
}
|
||||
}
|
||||
|
||||
tx.setInputScript(i, result.script)
|
||||
tx.setWitness(i, result.witness)
|
||||
})
|
||||
|
||||
if (!allowIncomplete) {
|
||||
// do not rely on this, its merely a last resort
|
||||
if (this.__overMaximumFees(tx.virtualSize())) {
|
||||
throw new Error('Transaction has absurd fees')
|
||||
}
|
||||
}
|
||||
|
||||
return tx
|
||||
}
|
||||
|
||||
function canSign (input) {
|
||||
return input.prevOutScript !== undefined &&
|
||||
input.signScript !== undefined &&
|
||||
input.pubKeys !== undefined &&
|
||||
input.signatures !== undefined &&
|
||||
input.signatures.length === input.pubKeys.length &&
|
||||
input.pubKeys.length > 0 &&
|
||||
(
|
||||
input.witness === false ||
|
||||
(input.witness === true && input.value !== undefined)
|
||||
)
|
||||
}
|
||||
|
||||
TransactionBuilder.prototype.sign = function (vin, keyPair, redeemScript, hashType, witnessValue, witnessScript) {
|
||||
// TODO: remove keyPair.network matching in 4.0.0
|
||||
if (keyPair.network && keyPair.network !== this.network) throw new TypeError('Inconsistent network')
|
||||
if (!this.inputs[vin]) throw new Error('No input at index: ' + vin)
|
||||
hashType = hashType || Transaction.SIGHASH_ALL
|
||||
|
||||
var input = this.inputs[vin]
|
||||
|
||||
// if redeemScript was previously provided, enforce consistency
|
||||
if (input.redeemScript !== undefined &&
|
||||
redeemScript &&
|
||||
!input.redeemScript.equals(redeemScript)) {
|
||||
throw new Error('Inconsistent redeemScript')
|
||||
}
|
||||
|
||||
var kpPubKey = keyPair.publicKey || keyPair.getPublicKeyBuffer()
|
||||
if (!canSign(input)) {
|
||||
if (witnessValue !== undefined) {
|
||||
if (input.value !== undefined && input.value !== witnessValue) throw new Error('Input didn\'t match witnessValue')
|
||||
typeforce(types.Satoshi, witnessValue)
|
||||
input.value = witnessValue
|
||||
}
|
||||
|
||||
if (!canSign(input)) prepareInput(input, kpPubKey, redeemScript, witnessValue, witnessScript)
|
||||
if (!canSign(input)) throw Error(input.prevOutType + ' not supported')
|
||||
}
|
||||
|
||||
// ready to sign
|
||||
var signatureHash
|
||||
if (input.witness) {
|
||||
signatureHash = this.tx.hashForWitnessV0(vin, input.signScript, input.value, hashType)
|
||||
} else {
|
||||
signatureHash = this.tx.hashForSignature(vin, input.signScript, hashType)
|
||||
}
|
||||
|
||||
// enforce in order signing of public keys
|
||||
var signed = input.pubKeys.some(function (pubKey, i) {
|
||||
if (!kpPubKey.equals(pubKey)) return false
|
||||
if (input.signatures[i]) throw new Error('Signature already exists')
|
||||
|
||||
if (kpPubKey.length !== 33 && (
|
||||
input.signType === scriptTypes.P2WPKH ||
|
||||
input.redeemScriptType === scriptTypes.P2WSH ||
|
||||
input.prevOutType === scriptTypes.P2WSH
|
||||
)) throw new Error('BIP143 rejects uncompressed public keys in P2WPKH or P2WSH')
|
||||
|
||||
var signature = keyPair.sign(signatureHash)
|
||||
if (Buffer.isBuffer(signature)) signature = ECSignature.fromRSBuffer(signature)
|
||||
|
||||
input.signatures[i] = signature.toScriptSignature(hashType)
|
||||
return true
|
||||
})
|
||||
|
||||
if (!signed) throw new Error('Key pair cannot sign for this input')
|
||||
}
|
||||
|
||||
function signatureHashType (buffer) {
|
||||
return buffer.readUInt8(buffer.length - 1)
|
||||
}
|
||||
|
||||
TransactionBuilder.prototype.__canModifyInputs = function () {
|
||||
return this.inputs.every(function (input) {
|
||||
// any signatures?
|
||||
if (input.signatures === undefined) return true
|
||||
|
||||
return input.signatures.every(function (signature) {
|
||||
if (!signature) return true
|
||||
var hashType = signatureHashType(signature)
|
||||
|
||||
// if SIGHASH_ANYONECANPAY is set, signatures would not
|
||||
// be invalidated by more inputs
|
||||
return hashType & Transaction.SIGHASH_ANYONECANPAY
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
TransactionBuilder.prototype.__canModifyOutputs = function () {
|
||||
var nInputs = this.tx.ins.length
|
||||
var nOutputs = this.tx.outs.length
|
||||
|
||||
return this.inputs.every(function (input) {
|
||||
if (input.signatures === undefined) return true
|
||||
|
||||
return input.signatures.every(function (signature) {
|
||||
if (!signature) return true
|
||||
var hashType = signatureHashType(signature)
|
||||
|
||||
var hashTypeMod = hashType & 0x1f
|
||||
if (hashTypeMod === Transaction.SIGHASH_NONE) return true
|
||||
if (hashTypeMod === Transaction.SIGHASH_SINGLE) {
|
||||
// if SIGHASH_SINGLE is set, and nInputs > nOutputs
|
||||
// some signatures would be invalidated by the addition
|
||||
// of more outputs
|
||||
return nInputs <= nOutputs
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
TransactionBuilder.prototype.__overMaximumFees = function (bytes) {
|
||||
// not all inputs will have .value defined
|
||||
var incoming = this.inputs.reduce(function (a, x) { return a + (x.value >>> 0) }, 0)
|
||||
|
||||
// but all outputs do, and if we have any input value
|
||||
// we can immediately determine if the outputs are too small
|
||||
var outgoing = this.tx.outs.reduce(function (a, x) { return a + x.value }, 0)
|
||||
var fee = incoming - outgoing
|
||||
var feeRate = fee / bytes
|
||||
|
||||
return feeRate > this.maximumFeeRate
|
||||
}
|
||||
|
||||
module.exports = TransactionBuilder
|
29
src/types.d.ts
vendored
29
src/types.d.ts
vendored
|
@ -1,29 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
export declare const typeforce: any;
|
||||
export declare function isPoint(p: Buffer | number | undefined | null): boolean;
|
||||
export declare function UInt31(value: number): boolean;
|
||||
export declare function BIP32Path(value: string): boolean;
|
||||
export declare namespace BIP32Path {
|
||||
var toJSON: () => string;
|
||||
}
|
||||
export declare function Signer(obj: any): boolean;
|
||||
export declare function Satoshi(value: number): boolean;
|
||||
export declare const ECPoint: any;
|
||||
export declare const Network: any;
|
||||
export declare const Buffer256bit: any;
|
||||
export declare const Hash160bit: any;
|
||||
export declare const Hash256bit: any;
|
||||
export declare const Number: any;
|
||||
export declare const Array: any;
|
||||
export declare const Boolean: any;
|
||||
export declare const String: any;
|
||||
export declare const Buffer: any;
|
||||
export declare const Hex: any;
|
||||
export declare const maybe: any;
|
||||
export declare const tuple: any;
|
||||
export declare const UInt8: any;
|
||||
export declare const UInt32: any;
|
||||
export declare const Function: any;
|
||||
export declare const BufferN: any;
|
||||
export declare const Null: any;
|
||||
export declare const oneOf: any;
|
126
src/types.js
126
src/types.js
|
@ -1,87 +1,53 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
exports.oneOf = exports.Null = exports.BufferN = exports.Function = exports.UInt32 = exports.UInt8 = exports.tuple = exports.maybe = exports.Hex = exports.Buffer = exports.String = exports.Boolean = exports.Array = exports.Number = exports.Hash256bit = exports.Hash160bit = exports.Buffer256bit = exports.Network = exports.ECPoint = exports.Satoshi = exports.Signer = exports.BIP32Path = exports.UInt31 = exports.isPoint = exports.typeforce = void 0;
|
||||
const buffer_1 = require('buffer');
|
||||
exports.typeforce = require('typeforce');
|
||||
const ZERO32 = buffer_1.Buffer.alloc(32, 0);
|
||||
const EC_P = buffer_1.Buffer.from(
|
||||
'fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f',
|
||||
'hex',
|
||||
);
|
||||
function isPoint(p) {
|
||||
if (!buffer_1.Buffer.isBuffer(p)) return false;
|
||||
if (p.length < 33) return false;
|
||||
const t = p[0];
|
||||
const x = p.slice(1, 33);
|
||||
if (x.compare(ZERO32) === 0) return false;
|
||||
if (x.compare(EC_P) >= 0) return false;
|
||||
if ((t === 0x02 || t === 0x03) && p.length === 33) {
|
||||
return true;
|
||||
}
|
||||
const y = p.slice(33);
|
||||
if (y.compare(ZERO32) === 0) return false;
|
||||
if (y.compare(EC_P) >= 0) return false;
|
||||
if (t === 0x04 && p.length === 65) return true;
|
||||
return false;
|
||||
var typeforce = require('typeforce')
|
||||
|
||||
var UINT31_MAX = Math.pow(2, 31) - 1
|
||||
function UInt31 (value) {
|
||||
return typeforce.UInt32(value) && value <= UINT31_MAX
|
||||
}
|
||||
exports.isPoint = isPoint;
|
||||
const UINT31_MAX = Math.pow(2, 31) - 1;
|
||||
function UInt31(value) {
|
||||
return exports.typeforce.UInt32(value) && value <= UINT31_MAX;
|
||||
|
||||
function BIP32Path (value) {
|
||||
return typeforce.String(value) && value.match(/^(m\/)?(\d+'?\/)*\d+'?$/)
|
||||
}
|
||||
exports.UInt31 = UInt31;
|
||||
function BIP32Path(value) {
|
||||
return (
|
||||
exports.typeforce.String(value) && !!value.match(/^(m\/)?(\d+'?\/)*\d+'?$/)
|
||||
);
|
||||
BIP32Path.toJSON = function () { return 'BIP32 derivation path' }
|
||||
|
||||
var SATOSHI_MAX = 21 * 1e14
|
||||
function Satoshi (value) {
|
||||
return typeforce.UInt53(value) && value <= SATOSHI_MAX
|
||||
}
|
||||
exports.BIP32Path = BIP32Path;
|
||||
BIP32Path.toJSON = () => {
|
||||
return 'BIP32 derivation path';
|
||||
};
|
||||
function Signer(obj) {
|
||||
return (
|
||||
(exports.typeforce.Buffer(obj.publicKey) ||
|
||||
typeof obj.getPublicKey === 'function') &&
|
||||
typeof obj.sign === 'function'
|
||||
);
|
||||
}
|
||||
exports.Signer = Signer;
|
||||
const SATOSHI_MAX = 21 * 1e14;
|
||||
function Satoshi(value) {
|
||||
return exports.typeforce.UInt53(value) && value <= SATOSHI_MAX;
|
||||
}
|
||||
exports.Satoshi = Satoshi;
|
||||
|
||||
// external dependent types
|
||||
exports.ECPoint = exports.typeforce.quacksLike('Point');
|
||||
var BigInt = typeforce.quacksLike('BigInteger')
|
||||
var ECPoint = typeforce.quacksLike('Point')
|
||||
|
||||
// exposed, external API
|
||||
exports.Network = exports.typeforce.compile({
|
||||
messagePrefix: exports.typeforce.oneOf(
|
||||
exports.typeforce.Buffer,
|
||||
exports.typeforce.String,
|
||||
),
|
||||
var ECSignature = typeforce.compile({ r: BigInt, s: BigInt })
|
||||
var Network = typeforce.compile({
|
||||
messagePrefix: typeforce.oneOf(typeforce.Buffer, typeforce.String),
|
||||
bip32: {
|
||||
public: exports.typeforce.UInt32,
|
||||
private: exports.typeforce.UInt32,
|
||||
public: typeforce.UInt32,
|
||||
private: typeforce.UInt32
|
||||
},
|
||||
pubKeyHash: exports.typeforce.UInt8,
|
||||
scriptHash: exports.typeforce.UInt8,
|
||||
wif: exports.typeforce.UInt8,
|
||||
});
|
||||
exports.Buffer256bit = exports.typeforce.BufferN(32);
|
||||
exports.Hash160bit = exports.typeforce.BufferN(20);
|
||||
exports.Hash256bit = exports.typeforce.BufferN(32);
|
||||
exports.Number = exports.typeforce.Number; // tslint:disable-line variable-name
|
||||
exports.Array = exports.typeforce.Array;
|
||||
exports.Boolean = exports.typeforce.Boolean; // tslint:disable-line variable-name
|
||||
exports.String = exports.typeforce.String; // tslint:disable-line variable-name
|
||||
exports.Buffer = exports.typeforce.Buffer;
|
||||
exports.Hex = exports.typeforce.Hex;
|
||||
exports.maybe = exports.typeforce.maybe;
|
||||
exports.tuple = exports.typeforce.tuple;
|
||||
exports.UInt8 = exports.typeforce.UInt8;
|
||||
exports.UInt32 = exports.typeforce.UInt32;
|
||||
exports.Function = exports.typeforce.Function;
|
||||
exports.BufferN = exports.typeforce.BufferN;
|
||||
exports.Null = exports.typeforce.Null;
|
||||
exports.oneOf = exports.typeforce.oneOf;
|
||||
pubKeyHash: typeforce.UInt8,
|
||||
scriptHash: typeforce.UInt8,
|
||||
wif: typeforce.UInt8
|
||||
})
|
||||
|
||||
// extend typeforce types with ours
|
||||
var types = {
|
||||
BigInt: BigInt,
|
||||
BIP32Path: BIP32Path,
|
||||
Buffer256bit: typeforce.BufferN(32),
|
||||
ECPoint: ECPoint,
|
||||
ECSignature: ECSignature,
|
||||
Hash160bit: typeforce.BufferN(20),
|
||||
Hash256bit: typeforce.BufferN(32),
|
||||
Network: Network,
|
||||
Satoshi: Satoshi,
|
||||
UInt31: UInt31
|
||||
}
|
||||
|
||||
for (var typeName in typeforce) {
|
||||
types[typeName] = typeforce[typeName]
|
||||
}
|
||||
|
||||
module.exports = types
|
||||
|
|
124
test/address.js
Normal file
124
test/address.js
Normal file
|
@ -0,0 +1,124 @@
|
|||
/* global describe, it */
|
||||
|
||||
var assert = require('assert')
|
||||
var baddress = require('../src/address')
|
||||
var networks = require('../src/networks')
|
||||
var bscript = require('../src/script')
|
||||
var fixtures = require('./fixtures/address.json')
|
||||
|
||||
describe('address', function () {
|
||||
describe('fromBase58Check', function () {
|
||||
fixtures.standard.forEach(function (f) {
|
||||
if (!f.base58check) return
|
||||
|
||||
it('decodes ' + f.base58check, function () {
|
||||
var decode = baddress.fromBase58Check(f.base58check)
|
||||
|
||||
assert.strictEqual(decode.version, f.version)
|
||||
assert.strictEqual(decode.hash.toString('hex'), f.hash)
|
||||
})
|
||||
})
|
||||
|
||||
fixtures.invalid.fromBase58Check.forEach(function (f) {
|
||||
it('throws on ' + f.exception, function () {
|
||||
assert.throws(function () {
|
||||
baddress.fromBase58Check(f.address)
|
||||
}, new RegExp(f.address + ' ' + f.exception))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('fromBech32', function () {
|
||||
fixtures.standard.forEach((f) => {
|
||||
if (!f.bech32) return
|
||||
|
||||
it('decodes ' + f.bech32, function () {
|
||||
var actual = baddress.fromBech32(f.bech32)
|
||||
|
||||
assert.strictEqual(actual.version, f.version)
|
||||
assert.strictEqual(actual.prefix, networks[f.network].bech32)
|
||||
assert.strictEqual(actual.data.toString('hex'), f.data)
|
||||
})
|
||||
})
|
||||
|
||||
fixtures.invalid.bech32.forEach((f, i) => {
|
||||
it('decode fails for ' + f.bech32 + '(' + f.exception + ')', function () {
|
||||
assert.throws(function () {
|
||||
baddress.fromBech32(f.address)
|
||||
}, new RegExp(f.exception))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('fromOutputScript', function () {
|
||||
fixtures.standard.forEach(function (f) {
|
||||
it('encodes ' + f.script.slice(0, 30) + '... (' + f.network + ')', function () {
|
||||
var script = bscript.fromASM(f.script)
|
||||
var address = baddress.fromOutputScript(script, networks[f.network])
|
||||
|
||||
assert.strictEqual(address, f.base58check || f.bech32.toLowerCase())
|
||||
})
|
||||
})
|
||||
|
||||
fixtures.invalid.fromOutputScript.forEach(function (f) {
|
||||
it('throws when ' + f.script.slice(0, 30) + '... ' + f.exception, function () {
|
||||
var script = bscript.fromASM(f.script)
|
||||
|
||||
assert.throws(function () {
|
||||
baddress.fromOutputScript(script)
|
||||
}, new RegExp(f.exception))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('toBase58Check', function () {
|
||||
fixtures.standard.forEach(function (f) {
|
||||
if (!f.base58check) return
|
||||
|
||||
it('encodes ' + f.hash + ' (' + f.network + ')', function () {
|
||||
var address = baddress.toBase58Check(Buffer.from(f.hash, 'hex'), f.version)
|
||||
|
||||
assert.strictEqual(address, f.base58check)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('toBech32', function () {
|
||||
fixtures.bech32.forEach((f, i) => {
|
||||
if (!f.bech32) return
|
||||
var data = Buffer.from(f.data, 'hex')
|
||||
|
||||
it('encode ' + f.address, function () {
|
||||
assert.deepEqual(baddress.toBech32(data, f.version, f.prefix), f.address)
|
||||
})
|
||||
})
|
||||
|
||||
fixtures.invalid.bech32.forEach((f, i) => {
|
||||
if (!f.prefix || f.version === undefined || f.data === undefined) return
|
||||
|
||||
it('encode fails (' + f.exception, function () {
|
||||
assert.throws(function () {
|
||||
baddress.toBech32(Buffer.from(f.data, 'hex'), f.version, f.prefix)
|
||||
}, new RegExp(f.exception))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('toOutputScript', function () {
|
||||
fixtures.standard.forEach(function (f) {
|
||||
it('decodes ' + f.script.slice(0, 30) + '... (' + f.network + ')', function () {
|
||||
var script = baddress.toOutputScript(f.base58check || f.bech32, networks[f.network])
|
||||
|
||||
assert.strictEqual(bscript.toASM(script), f.script)
|
||||
})
|
||||
})
|
||||
|
||||
fixtures.invalid.toOutputScript.forEach(function (f) {
|
||||
it('throws when ' + f.exception, function () {
|
||||
assert.throws(function () {
|
||||
baddress.toOutputScript(f.address, f.network)
|
||||
}, new RegExp(f.address + ' ' + f.exception))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,148 +0,0 @@
|
|||
import * as assert from 'assert';
|
||||
import { describe, it } from 'mocha';
|
||||
import * as baddress from '../src/address';
|
||||
import * as bscript from '../src/script';
|
||||
import * as fixtures from './fixtures/address.json';
|
||||
|
||||
const NETWORKS = Object.assign(
|
||||
{
|
||||
litecoin: {
|
||||
messagePrefix: '\x19Litecoin Signed Message:\n',
|
||||
bip32: {
|
||||
public: 0x019da462,
|
||||
private: 0x019d9cfe,
|
||||
},
|
||||
pubKeyHash: 0x30,
|
||||
scriptHash: 0x32,
|
||||
wif: 0xb0,
|
||||
},
|
||||
},
|
||||
require('../src/networks'),
|
||||
);
|
||||
|
||||
describe('address', () => {
|
||||
describe('fromBase58Check', () => {
|
||||
fixtures.standard.forEach(f => {
|
||||
if (!f.base58check) return;
|
||||
|
||||
it('decodes ' + f.base58check, () => {
|
||||
const decode = baddress.fromBase58Check(f.base58check);
|
||||
|
||||
assert.strictEqual(decode.version, f.version);
|
||||
assert.strictEqual(decode.hash.toString('hex'), f.hash);
|
||||
});
|
||||
});
|
||||
|
||||
fixtures.invalid.fromBase58Check.forEach(f => {
|
||||
it('throws on ' + f.exception, () => {
|
||||
assert.throws(() => {
|
||||
baddress.fromBase58Check(f.address);
|
||||
}, new RegExp(f.address + ' ' + f.exception));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('fromBech32', () => {
|
||||
fixtures.standard.forEach(f => {
|
||||
if (!f.bech32) return;
|
||||
|
||||
it('decodes ' + f.bech32, () => {
|
||||
const actual = baddress.fromBech32(f.bech32);
|
||||
|
||||
assert.strictEqual(actual.version, f.version);
|
||||
assert.strictEqual(actual.prefix, NETWORKS[f.network].bech32);
|
||||
assert.strictEqual(actual.data.toString('hex'), f.data);
|
||||
});
|
||||
});
|
||||
|
||||
fixtures.invalid.bech32.forEach(f => {
|
||||
it('decode fails for ' + f.address + '(' + f.exception + ')', () => {
|
||||
assert.throws(() => {
|
||||
baddress.fromBech32(f.address);
|
||||
}, new RegExp(f.exception));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('fromOutputScript', () => {
|
||||
fixtures.standard.forEach(f => {
|
||||
it('encodes ' + f.script.slice(0, 30) + '... (' + f.network + ')', () => {
|
||||
const script = bscript.fromASM(f.script);
|
||||
const address = baddress.fromOutputScript(script, NETWORKS[f.network]);
|
||||
|
||||
assert.strictEqual(address, f.base58check || f.bech32!.toLowerCase());
|
||||
});
|
||||
});
|
||||
|
||||
fixtures.invalid.fromOutputScript.forEach(f => {
|
||||
it('throws when ' + f.script.slice(0, 30) + '... ' + f.exception, () => {
|
||||
const script = bscript.fromASM(f.script);
|
||||
|
||||
assert.throws(() => {
|
||||
baddress.fromOutputScript(script);
|
||||
}, new RegExp(f.exception));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('toBase58Check', () => {
|
||||
fixtures.standard.forEach(f => {
|
||||
if (!f.base58check) return;
|
||||
|
||||
it('encodes ' + f.hash + ' (' + f.network + ')', () => {
|
||||
const address = baddress.toBase58Check(
|
||||
Buffer.from(f.hash, 'hex'),
|
||||
f.version,
|
||||
);
|
||||
|
||||
assert.strictEqual(address, f.base58check);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('toBech32', () => {
|
||||
fixtures.bech32.forEach(f => {
|
||||
if (!f.address) return;
|
||||
const data = Buffer.from(f.data, 'hex');
|
||||
|
||||
it('encode ' + f.address, () => {
|
||||
assert.deepStrictEqual(
|
||||
baddress.toBech32(data, f.version, f.prefix),
|
||||
f.address.toLowerCase(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// TODO: These fixtures (according to TypeScript) have none of the data used below
|
||||
fixtures.invalid.bech32.forEach((f: any) => {
|
||||
if (!f.prefix || f.version === undefined || f.data === undefined) return;
|
||||
|
||||
it('encode fails (' + f.exception, () => {
|
||||
assert.throws(() => {
|
||||
baddress.toBech32(Buffer.from(f.data, 'hex'), f.version, f.prefix);
|
||||
}, new RegExp(f.exception));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('toOutputScript', () => {
|
||||
fixtures.standard.forEach(f => {
|
||||
it('decodes ' + f.script.slice(0, 30) + '... (' + f.network + ')', () => {
|
||||
const script = baddress.toOutputScript(
|
||||
(f.base58check || f.bech32)!,
|
||||
NETWORKS[f.network],
|
||||
);
|
||||
|
||||
assert.strictEqual(bscript.toASM(script), f.script);
|
||||
});
|
||||
});
|
||||
|
||||
fixtures.invalid.toOutputScript.forEach(f => {
|
||||
it('throws when ' + f.exception, () => {
|
||||
assert.throws(() => {
|
||||
baddress.toOutputScript(f.address, f.network as any);
|
||||
}, new RegExp(f.address + ' ' + f.exception));
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
226
test/bitcoin.core.js
Normal file
226
test/bitcoin.core.js
Normal file
|
@ -0,0 +1,226 @@
|
|||
/* global describe, it */
|
||||
|
||||
var assert = require('assert')
|
||||
var base58 = require('bs58')
|
||||
var bitcoin = require('../')
|
||||
|
||||
var base58EncodeDecode = require('./fixtures/core/base58_encode_decode.json')
|
||||
var base58KeysInvalid = require('./fixtures/core/base58_keys_invalid.json')
|
||||
var base58KeysValid = require('./fixtures/core/base58_keys_valid.json')
|
||||
var blocksValid = require('./fixtures/core/blocks.json')
|
||||
var sigCanonical = require('./fixtures/core/sig_canonical.json')
|
||||
var sigHash = require('./fixtures/core/sighash.json')
|
||||
var sigNoncanonical = require('./fixtures/core/sig_noncanonical.json')
|
||||
var txValid = require('./fixtures/core/tx_valid.json')
|
||||
|
||||
describe('Bitcoin-core', function () {
|
||||
// base58EncodeDecode
|
||||
describe('base58', function () {
|
||||
base58EncodeDecode.forEach(function (f) {
|
||||
var fhex = f[0]
|
||||
var fb58 = f[1]
|
||||
|
||||
it('can decode ' + fb58, function () {
|
||||
var buffer = base58.decode(fb58)
|
||||
var actual = buffer.toString('hex')
|
||||
|
||||
assert.strictEqual(actual, fhex)
|
||||
})
|
||||
|
||||
it('can encode ' + fhex, function () {
|
||||
var buffer = Buffer.from(fhex, 'hex')
|
||||
var actual = base58.encode(buffer)
|
||||
|
||||
assert.strictEqual(actual, fb58)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// base58KeysValid
|
||||
describe('address.toBase58Check', function () {
|
||||
var typeMap = {
|
||||
'pubkey': 'pubKeyHash',
|
||||
'script': 'scriptHash'
|
||||
}
|
||||
|
||||
base58KeysValid.forEach(function (f) {
|
||||
var expected = f[0]
|
||||
var hash = Buffer.from(f[1], 'hex')
|
||||
var params = f[2]
|
||||
|
||||
if (params.isPrivkey) return
|
||||
|
||||
var network = params.isTestnet ? bitcoin.networks.testnet : bitcoin.networks.bitcoin
|
||||
var version = network[typeMap[params.addrType]]
|
||||
|
||||
it('can export ' + expected, function () {
|
||||
assert.strictEqual(bitcoin.address.toBase58Check(hash, version), expected)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// base58KeysInvalid
|
||||
describe('address.fromBase58Check', function () {
|
||||
var allowedNetworks = [
|
||||
bitcoin.networks.bitcoin.pubkeyhash,
|
||||
bitcoin.networks.bitcoin.scripthash,
|
||||
bitcoin.networks.testnet.pubkeyhash,
|
||||
bitcoin.networks.testnet.scripthash
|
||||
]
|
||||
|
||||
base58KeysInvalid.forEach(function (f) {
|
||||
var string = f[0]
|
||||
|
||||
it('throws on ' + string, function () {
|
||||
assert.throws(function () {
|
||||
var address = bitcoin.address.fromBase58Check(string)
|
||||
|
||||
assert.notEqual(allowedNetworks.indexOf(address.version), -1, 'Invalid network')
|
||||
}, /(Invalid (checksum|network))|(too (short|long))/)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// base58KeysValid
|
||||
describe('ECPair', function () {
|
||||
base58KeysValid.forEach(function (f) {
|
||||
var string = f[0]
|
||||
var hex = f[1]
|
||||
var params = f[2]
|
||||
|
||||
if (!params.isPrivkey) return
|
||||
|
||||
var network = params.isTestnet ? bitcoin.networks.testnet : bitcoin.networks.bitcoin
|
||||
var keyPair = bitcoin.ECPair.fromWIF(string, network)
|
||||
|
||||
it('fromWIF imports ' + string, function () {
|
||||
assert.strictEqual(keyPair.d.toHex(), hex)
|
||||
assert.strictEqual(keyPair.compressed, params.isCompressed)
|
||||
})
|
||||
|
||||
it('toWIF exports ' + hex + ' to ' + string, function () {
|
||||
assert.strictEqual(keyPair.toWIF(), string)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// base58KeysInvalid
|
||||
describe('ECPair.fromWIF', function () {
|
||||
var allowedNetworks = [
|
||||
bitcoin.networks.bitcoin,
|
||||
bitcoin.networks.testnet
|
||||
]
|
||||
|
||||
base58KeysInvalid.forEach(function (f) {
|
||||
var string = f[0]
|
||||
|
||||
it('throws on ' + string, function () {
|
||||
assert.throws(function () {
|
||||
bitcoin.ECPair.fromWIF(string, allowedNetworks)
|
||||
}, /(Invalid|Unknown) (checksum|compression flag|network version|WIF length)/)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Block.fromHex', function () {
|
||||
blocksValid.forEach(function (f) {
|
||||
it('can parse ' + f.id, function () {
|
||||
var block = bitcoin.Block.fromHex(f.hex)
|
||||
|
||||
assert.strictEqual(block.getId(), f.id)
|
||||
assert.strictEqual(block.transactions.length, f.transactions)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// txValid
|
||||
describe('Transaction.fromHex', function () {
|
||||
txValid.forEach(function (f) {
|
||||
// Objects that are only a single string are ignored
|
||||
if (f.length === 1) return
|
||||
|
||||
var inputs = f[0]
|
||||
var fhex = f[1]
|
||||
// var verifyFlags = f[2] // TODO: do we need to test this?
|
||||
|
||||
it('can decode ' + fhex, function () {
|
||||
var transaction = bitcoin.Transaction.fromHex(fhex)
|
||||
|
||||
transaction.ins.forEach(function (txIn, i) {
|
||||
var input = inputs[i]
|
||||
|
||||
// reverse because test data is reversed
|
||||
var prevOutHash = Buffer.from(input[0], 'hex').reverse()
|
||||
var prevOutIndex = input[1]
|
||||
|
||||
assert.deepEqual(txIn.hash, prevOutHash)
|
||||
|
||||
// we read UInt32, not Int32
|
||||
assert.strictEqual(txIn.index & 0xffffffff, prevOutIndex)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// sighash
|
||||
describe('Transaction', function () {
|
||||
sigHash.forEach(function (f) {
|
||||
// Objects that are only a single string are ignored
|
||||
if (f.length === 1) return
|
||||
|
||||
var txHex = f[0]
|
||||
var scriptHex = f[1]
|
||||
var inIndex = f[2]
|
||||
var hashType = f[3]
|
||||
var expectedHash = f[4]
|
||||
|
||||
var hashTypes = []
|
||||
if ((hashType & 0x1f) === bitcoin.Transaction.SIGHASH_NONE) hashTypes.push('SIGHASH_NONE')
|
||||
else if ((hashType & 0x1f) === bitcoin.Transaction.SIGHASH_SINGLE) hashTypes.push('SIGHASH_SINGLE')
|
||||
else hashTypes.push('SIGHASH_ALL')
|
||||
if (hashType & bitcoin.Transaction.SIGHASH_ANYONECANPAY) hashTypes.push('SIGHASH_ANYONECANPAY')
|
||||
|
||||
var hashTypeName = hashTypes.join(' | ')
|
||||
|
||||
it('should hash ' + txHex.slice(0, 40) + '... (' + hashTypeName + ')', function () {
|
||||
var transaction = bitcoin.Transaction.fromHex(txHex)
|
||||
assert.strictEqual(transaction.toHex(), txHex)
|
||||
|
||||
var script = Buffer.from(scriptHex, 'hex')
|
||||
var scriptChunks = bitcoin.script.decompile(script)
|
||||
assert.strictEqual(bitcoin.script.compile(scriptChunks).toString('hex'), scriptHex)
|
||||
|
||||
var hash = transaction.hashForSignature(inIndex, script, hashType)
|
||||
|
||||
// reverse because test data is reversed
|
||||
assert.equal(hash.reverse().toString('hex'), expectedHash)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('ECSignature.parseScriptSignature', function () {
|
||||
sigCanonical.forEach(function (hex) {
|
||||
var buffer = Buffer.from(hex, 'hex')
|
||||
|
||||
it('can parse ' + hex, function () {
|
||||
var parsed = bitcoin.ECSignature.parseScriptSignature(buffer)
|
||||
var actual = parsed.signature.toScriptSignature(parsed.hashType)
|
||||
assert.strictEqual(actual.toString('hex'), hex)
|
||||
})
|
||||
})
|
||||
|
||||
sigNoncanonical.forEach(function (hex, i) {
|
||||
if (i === 0) return
|
||||
if (i % 2 !== 0) return
|
||||
|
||||
var description = sigNoncanonical[i - 1].slice(0, -1)
|
||||
var buffer = Buffer.from(hex, 'hex')
|
||||
|
||||
it('throws on ' + description, function () {
|
||||
assert.throws(function () {
|
||||
bitcoin.ECSignature.parseScriptSignature(buffer)
|
||||
}, /Expected DER (integer|sequence)|(R|S) value (excessively padded|is negative)|(R|S|DER sequence) length is (zero|too short|too long|invalid)|Invalid hashType/)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,223 +0,0 @@
|
|||
import * as assert from 'assert';
|
||||
import * as base58 from 'bs58';
|
||||
import { describe, it } from 'mocha';
|
||||
import * as bitcoin from '..';
|
||||
import * as base58EncodeDecode from './fixtures/core/base58_encode_decode.json';
|
||||
import * as base58KeysInvalid from './fixtures/core/base58_keys_invalid.json';
|
||||
import * as base58KeysValid from './fixtures/core/base58_keys_valid.json';
|
||||
import * as blocksValid from './fixtures/core/blocks.json';
|
||||
import * as sigCanonical from './fixtures/core/sig_canonical.json';
|
||||
import * as sigNoncanonical from './fixtures/core/sig_noncanonical.json';
|
||||
import * as sigHash from './fixtures/core/sighash.json';
|
||||
import * as txValid from './fixtures/core/tx_valid.json';
|
||||
|
||||
describe('Bitcoin-core', () => {
|
||||
// base58EncodeDecode
|
||||
describe('base58', () => {
|
||||
base58EncodeDecode.forEach(f => {
|
||||
const fhex = f[0];
|
||||
const fb58 = f[1];
|
||||
|
||||
it('can decode ' + fb58, () => {
|
||||
const buffer = base58.decode(fb58);
|
||||
const actual = buffer.toString('hex');
|
||||
|
||||
assert.strictEqual(actual, fhex);
|
||||
});
|
||||
|
||||
it('can encode ' + fhex, () => {
|
||||
const buffer = Buffer.from(fhex, 'hex');
|
||||
const actual = base58.encode(buffer);
|
||||
|
||||
assert.strictEqual(actual, fb58);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// base58KeysValid
|
||||
describe('address.toBase58Check', () => {
|
||||
const typeMap: any = {
|
||||
pubkey: 'pubKeyHash',
|
||||
script: 'scriptHash',
|
||||
};
|
||||
|
||||
base58KeysValid.forEach(f => {
|
||||
const expected = f[0];
|
||||
const hash = Buffer.from(f[1] as any, 'hex');
|
||||
const params = f[2] as any;
|
||||
|
||||
if (params.isPrivkey) return;
|
||||
|
||||
const network: any = params.isTestnet
|
||||
? bitcoin.networks.testnet
|
||||
: bitcoin.networks.bitcoin;
|
||||
const version = network[typeMap[params.addrType]];
|
||||
|
||||
it('can export ' + expected, () => {
|
||||
assert.strictEqual(
|
||||
bitcoin.address.toBase58Check(hash, version),
|
||||
expected,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// base58KeysInvalid
|
||||
describe('address.fromBase58Check', () => {
|
||||
const allowedNetworks = [
|
||||
bitcoin.networks.bitcoin.pubKeyHash,
|
||||
bitcoin.networks.bitcoin.scriptHash,
|
||||
bitcoin.networks.testnet.pubKeyHash,
|
||||
bitcoin.networks.testnet.scriptHash,
|
||||
];
|
||||
|
||||
base58KeysInvalid.forEach(f => {
|
||||
const strng = f[0];
|
||||
|
||||
it('throws on ' + strng, () => {
|
||||
assert.throws(() => {
|
||||
const address = bitcoin.address.fromBase58Check(strng);
|
||||
|
||||
assert.notStrictEqual(
|
||||
allowedNetworks.indexOf(address.version),
|
||||
-1,
|
||||
'Invalid network',
|
||||
);
|
||||
}, /(Invalid (checksum|network))|(too (short|long))/);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Block.fromHex', () => {
|
||||
blocksValid.forEach(f => {
|
||||
it('can parse ' + f.id, () => {
|
||||
const block = bitcoin.Block.fromHex(f.hex);
|
||||
|
||||
assert.strictEqual(block.getId(), f.id);
|
||||
assert.strictEqual(block.transactions!.length, f.transactions);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// txValid
|
||||
describe('Transaction.fromHex', () => {
|
||||
txValid.forEach(f => {
|
||||
// Objects that are only a single string are ignored
|
||||
if (f.length === 1) return;
|
||||
|
||||
const inputs = f[0];
|
||||
const fhex = f[1];
|
||||
// const verifyFlags = f[2] // TODO: do we need to test this?
|
||||
|
||||
it('can decode ' + fhex, () => {
|
||||
const transaction = bitcoin.Transaction.fromHex(fhex as string);
|
||||
|
||||
transaction.ins.forEach((txIn, i) => {
|
||||
const input = inputs[i];
|
||||
|
||||
// reverse because test data is reversed
|
||||
const prevOutHash = Buffer.from(input[0] as string, 'hex').reverse();
|
||||
const prevOutIndex = input[1];
|
||||
|
||||
assert.deepStrictEqual(txIn.hash, prevOutHash);
|
||||
|
||||
// we read UInt32, not Int32
|
||||
assert.strictEqual(txIn.index & 0xffffffff, prevOutIndex);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// sighash
|
||||
describe('Transaction', () => {
|
||||
sigHash.forEach(f => {
|
||||
// Objects that are only a single string are ignored
|
||||
if (f.length === 1) return;
|
||||
|
||||
const txHex = f[0] as string;
|
||||
const scriptHex = f[1] as string;
|
||||
const inIndex = f[2] as number;
|
||||
const hashType = f[3] as number;
|
||||
const expectedHash = f[4];
|
||||
|
||||
const hashTypes = [];
|
||||
if ((hashType & 0x1f) === bitcoin.Transaction.SIGHASH_NONE)
|
||||
hashTypes.push('SIGHASH_NONE');
|
||||
else if ((hashType & 0x1f) === bitcoin.Transaction.SIGHASH_SINGLE)
|
||||
hashTypes.push('SIGHASH_SINGLE');
|
||||
else hashTypes.push('SIGHASH_ALL');
|
||||
if (hashType & bitcoin.Transaction.SIGHASH_ANYONECANPAY)
|
||||
hashTypes.push('SIGHASH_ANYONECANPAY');
|
||||
|
||||
const hashTypeName = hashTypes.join(' | ');
|
||||
|
||||
it(
|
||||
'should hash ' + txHex.slice(0, 40) + '... (' + hashTypeName + ')',
|
||||
() => {
|
||||
const transaction = bitcoin.Transaction.fromHex(txHex);
|
||||
assert.strictEqual(transaction.toHex(), txHex);
|
||||
|
||||
const script = Buffer.from(scriptHex, 'hex');
|
||||
const scriptChunks = bitcoin.script.decompile(script);
|
||||
assert.strictEqual(
|
||||
bitcoin.script.compile(scriptChunks!).toString('hex'),
|
||||
scriptHex,
|
||||
);
|
||||
|
||||
const hash = transaction.hashForSignature(inIndex, script, hashType);
|
||||
|
||||
// reverse because test data is reversed
|
||||
assert.strictEqual(
|
||||
(hash.reverse() as Buffer).toString('hex'),
|
||||
expectedHash,
|
||||
);
|
||||
|
||||
assert.doesNotThrow(() =>
|
||||
transaction.hashForWitnessV0(
|
||||
inIndex,
|
||||
script,
|
||||
0,
|
||||
// convert to UInt32
|
||||
hashType < 0 ? 0x100000000 + hashType : hashType,
|
||||
),
|
||||
);
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('script.signature.decode', () => {
|
||||
sigCanonical.forEach(hex => {
|
||||
const buffer = Buffer.from(hex, 'hex');
|
||||
|
||||
it('can parse ' + hex, () => {
|
||||
const parsed = bitcoin.script.signature.decode(buffer);
|
||||
const actual = bitcoin.script.signature.encode(
|
||||
parsed.signature,
|
||||
parsed.hashType,
|
||||
);
|
||||
|
||||
assert.strictEqual(actual.toString('hex'), hex);
|
||||
});
|
||||
});
|
||||
|
||||
sigNoncanonical.forEach((hex, i) => {
|
||||
if (i === 0) return;
|
||||
if (i % 2 !== 0) return;
|
||||
|
||||
const description = sigNoncanonical[i - 1].slice(0, -1);
|
||||
const buffer = Buffer.from(hex, 'hex');
|
||||
|
||||
it('throws on ' + description, () => {
|
||||
const reg = new RegExp(
|
||||
'Expected DER (integer|sequence)|(R|S) value (excessively ' +
|
||||
'padded|is negative)|(R|S|DER sequence) length is (zero|too ' +
|
||||
'short|too long|invalid)|Invalid hashType',
|
||||
);
|
||||
assert.throws(() => {
|
||||
bitcoin.script.signature.decode(buffer);
|
||||
}, reg);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
149
test/block.js
Normal file
149
test/block.js
Normal file
|
@ -0,0 +1,149 @@
|
|||
/* global describe, it, beforeEach */
|
||||
|
||||
var assert = require('assert')
|
||||
var Block = require('../src/block')
|
||||
|
||||
var fixtures = require('./fixtures/block')
|
||||
|
||||
describe('Block', function () {
|
||||
describe('version', function () {
|
||||
it('should be interpreted as an int32le', function () {
|
||||
var blockHex = 'ffffffff0000000000000000000000000000000000000000000000000000000000000000414141414141414141414141414141414141414141414141414141414141414101000000020000000300000000'
|
||||
var block = Block.fromHex(blockHex)
|
||||
assert.equal(-1, block.version)
|
||||
assert.equal(1, block.timestamp)
|
||||
})
|
||||
})
|
||||
|
||||
describe('calculateTarget', function () {
|
||||
fixtures.targets.forEach(function (f) {
|
||||
it('returns ' + f.expected + ' for 0x' + f.bits, function () {
|
||||
var bits = parseInt(f.bits, 16)
|
||||
|
||||
assert.equal(Block.calculateTarget(bits).toString('hex'), f.expected)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('fromBuffer/fromHex', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
it('imports ' + f.description, function () {
|
||||
var block = Block.fromHex(f.hex)
|
||||
|
||||
assert.strictEqual(block.version, f.version)
|
||||
assert.strictEqual(block.prevHash.toString('hex'), f.prevHash)
|
||||
assert.strictEqual(block.merkleRoot.toString('hex'), f.merkleRoot)
|
||||
assert.strictEqual(block.timestamp, f.timestamp)
|
||||
assert.strictEqual(block.bits, f.bits)
|
||||
assert.strictEqual(block.nonce, f.nonce)
|
||||
assert.strictEqual(!block.transactions, f.hex.length === 160)
|
||||
})
|
||||
})
|
||||
|
||||
fixtures.invalid.forEach(function (f) {
|
||||
it('throws on ' + f.exception, function () {
|
||||
assert.throws(function () {
|
||||
Block.fromHex(f.hex)
|
||||
}, new RegExp(f.exception))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('toBuffer/toHex', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
var block
|
||||
|
||||
beforeEach(function () {
|
||||
block = Block.fromHex(f.hex)
|
||||
})
|
||||
|
||||
it('exports ' + f.description, function () {
|
||||
assert.strictEqual(block.toHex(true), f.hex.slice(0, 160))
|
||||
assert.strictEqual(block.toHex(), f.hex)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getHash/getId', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
var block
|
||||
|
||||
beforeEach(function () {
|
||||
block = Block.fromHex(f.hex)
|
||||
})
|
||||
|
||||
it('returns ' + f.id + ' for ' + f.description, function () {
|
||||
assert.strictEqual(block.getHash().toString('hex'), f.hash)
|
||||
assert.strictEqual(block.getId(), f.id)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getUTCDate', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
var block
|
||||
|
||||
beforeEach(function () {
|
||||
block = Block.fromHex(f.hex)
|
||||
})
|
||||
|
||||
it('returns UTC date of ' + f.id, function () {
|
||||
var utcDate = block.getUTCDate().getTime()
|
||||
|
||||
assert.strictEqual(utcDate, f.timestamp * 1e3)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('calculateMerkleRoot', function () {
|
||||
it('should throw on zero-length transaction array', function () {
|
||||
assert.throws(function () {
|
||||
Block.calculateMerkleRoot([])
|
||||
}, /Cannot compute merkle root for zero transactions/)
|
||||
})
|
||||
|
||||
fixtures.valid.forEach(function (f) {
|
||||
if (f.hex.length === 160) return
|
||||
|
||||
var block
|
||||
|
||||
beforeEach(function () {
|
||||
block = Block.fromHex(f.hex)
|
||||
})
|
||||
|
||||
it('returns ' + f.merkleRoot + ' for ' + f.id, function () {
|
||||
assert.strictEqual(Block.calculateMerkleRoot(block.transactions).toString('hex'), f.merkleRoot)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('checkMerkleRoot', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
if (f.hex.length === 160) return
|
||||
|
||||
var block
|
||||
|
||||
beforeEach(function () {
|
||||
block = Block.fromHex(f.hex)
|
||||
})
|
||||
|
||||
it('returns ' + f.valid + ' for ' + f.id, function () {
|
||||
assert.strictEqual(block.checkMerkleRoot(), true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('checkProofOfWork', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
var block
|
||||
|
||||
beforeEach(function () {
|
||||
block = Block.fromHex(f.hex)
|
||||
})
|
||||
|
||||
it('returns ' + f.valid + ' for ' + f.id, function () {
|
||||
assert.strictEqual(block.checkProofOfWork(), f.valid)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,179 +0,0 @@
|
|||
import * as assert from 'assert';
|
||||
import { beforeEach, describe, it } from 'mocha';
|
||||
import { Block } from '..';
|
||||
|
||||
import * as fixtures from './fixtures/block.json';
|
||||
|
||||
describe('Block', () => {
|
||||
describe('version', () => {
|
||||
it('should be interpreted as an int32le', () => {
|
||||
const blockHex =
|
||||
'ffffffff000000000000000000000000000000000000000000000000000000000000' +
|
||||
'00004141414141414141414141414141414141414141414141414141414141414141' +
|
||||
'01000000020000000300000000';
|
||||
const block = Block.fromHex(blockHex);
|
||||
assert.strictEqual(-1, block.version);
|
||||
assert.strictEqual(1, block.timestamp);
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateTarget', () => {
|
||||
fixtures.targets.forEach(f => {
|
||||
it('returns ' + f.expected + ' for 0x' + f.bits, () => {
|
||||
const bits = parseInt(f.bits, 16);
|
||||
|
||||
assert.strictEqual(
|
||||
Block.calculateTarget(bits).toString('hex'),
|
||||
f.expected,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('fromBuffer/fromHex', () => {
|
||||
fixtures.valid.forEach(f => {
|
||||
it('imports ' + f.description, () => {
|
||||
const block = Block.fromHex(f.hex);
|
||||
|
||||
assert.strictEqual(block.version, f.version);
|
||||
assert.strictEqual(block.prevHash!.toString('hex'), f.prevHash);
|
||||
assert.strictEqual(block.merkleRoot!.toString('hex'), f.merkleRoot);
|
||||
if (block.witnessCommit) {
|
||||
assert.strictEqual(
|
||||
block.witnessCommit.toString('hex'),
|
||||
f.witnessCommit,
|
||||
);
|
||||
}
|
||||
assert.strictEqual(block.timestamp, f.timestamp);
|
||||
assert.strictEqual(block.bits, f.bits);
|
||||
assert.strictEqual(block.nonce, f.nonce);
|
||||
assert.strictEqual(!block.transactions, f.hex.length === 160);
|
||||
if (f.size && f.strippedSize && f.weight) {
|
||||
assert.strictEqual(block.byteLength(false, true), f.size);
|
||||
assert.strictEqual(block.byteLength(false, false), f.strippedSize);
|
||||
assert.strictEqual(block.weight(), f.weight);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
fixtures.invalid.forEach(f => {
|
||||
it('throws on ' + f.exception, () => {
|
||||
assert.throws(() => {
|
||||
Block.fromHex(f.hex);
|
||||
}, new RegExp(f.exception));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('toBuffer/toHex', () => {
|
||||
fixtures.valid.forEach(f => {
|
||||
let block: Block;
|
||||
|
||||
beforeEach(() => {
|
||||
block = Block.fromHex(f.hex);
|
||||
});
|
||||
|
||||
it('exports ' + f.description, () => {
|
||||
assert.strictEqual(block.toHex(true), f.hex.slice(0, 160));
|
||||
assert.strictEqual(block.toHex(), f.hex);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getHash/getId', () => {
|
||||
fixtures.valid.forEach(f => {
|
||||
let block: Block;
|
||||
|
||||
beforeEach(() => {
|
||||
block = Block.fromHex(f.hex);
|
||||
});
|
||||
|
||||
it('returns ' + f.id + ' for ' + f.description, () => {
|
||||
assert.strictEqual(block.getHash().toString('hex'), f.hash);
|
||||
assert.strictEqual(block.getId(), f.id);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getUTCDate', () => {
|
||||
fixtures.valid.forEach(f => {
|
||||
let block: Block;
|
||||
|
||||
beforeEach(() => {
|
||||
block = Block.fromHex(f.hex);
|
||||
});
|
||||
|
||||
it('returns UTC date of ' + f.id, () => {
|
||||
const utcDate = block.getUTCDate().getTime();
|
||||
|
||||
assert.strictEqual(utcDate, f.timestamp * 1e3);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateMerkleRoot', () => {
|
||||
it('should throw on zero-length transaction array', () => {
|
||||
assert.throws(() => {
|
||||
Block.calculateMerkleRoot([]);
|
||||
}, /Cannot compute merkle root for zero transactions/);
|
||||
});
|
||||
|
||||
fixtures.valid.forEach(f => {
|
||||
if (f.hex.length === 160) return;
|
||||
|
||||
let block: Block;
|
||||
|
||||
beforeEach(() => {
|
||||
block = Block.fromHex(f.hex);
|
||||
});
|
||||
|
||||
it('returns ' + f.merkleRoot + ' for ' + f.id, () => {
|
||||
assert.strictEqual(
|
||||
Block.calculateMerkleRoot(block.transactions!).toString('hex'),
|
||||
f.merkleRoot,
|
||||
);
|
||||
});
|
||||
|
||||
if (f.witnessCommit) {
|
||||
it('returns witness commit ' + f.witnessCommit + ' for ' + f.id, () => {
|
||||
assert.strictEqual(
|
||||
Block.calculateMerkleRoot(block.transactions!, true).toString(
|
||||
'hex',
|
||||
),
|
||||
f.witnessCommit,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkTxRoots', () => {
|
||||
fixtures.valid.forEach(f => {
|
||||
if (f.hex.length === 160) return;
|
||||
|
||||
let block: Block;
|
||||
|
||||
beforeEach(() => {
|
||||
block = Block.fromHex(f.hex);
|
||||
});
|
||||
|
||||
it('returns ' + f.valid + ' for ' + f.id, () => {
|
||||
assert.strictEqual(block.checkTxRoots(), true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkProofOfWork', () => {
|
||||
fixtures.valid.forEach(f => {
|
||||
let block: Block;
|
||||
|
||||
beforeEach(() => {
|
||||
block = Block.fromHex(f.hex);
|
||||
});
|
||||
|
||||
it('returns ' + f.valid + ' for ' + f.id, () => {
|
||||
assert.strictEqual(block.checkProofOfWork(), f.valid);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
165
test/bufferutils.js
Normal file
165
test/bufferutils.js
Normal file
|
@ -0,0 +1,165 @@
|
|||
/* global describe, it */
|
||||
|
||||
var assert = require('assert')
|
||||
var bufferutils = require('../src/bufferutils')
|
||||
|
||||
var fixtures = require('./fixtures/bufferutils.json')
|
||||
|
||||
describe('bufferutils', function () {
|
||||
describe('pushDataSize', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
it('determines the pushDataSize of ' + f.dec + ' correctly', function () {
|
||||
if (!f.hexPD) return
|
||||
|
||||
var size = bufferutils.pushDataSize(f.dec)
|
||||
|
||||
assert.strictEqual(size, f.hexPD.length / 2)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('readPushDataInt', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
if (!f.hexPD) return
|
||||
|
||||
it('decodes ' + f.hexPD + ' correctly', function () {
|
||||
var buffer = Buffer.from(f.hexPD, 'hex')
|
||||
var d = bufferutils.readPushDataInt(buffer, 0)
|
||||
var fopcode = parseInt(f.hexPD.substr(0, 2), 16)
|
||||
|
||||
assert.strictEqual(d.opcode, fopcode)
|
||||
assert.strictEqual(d.number, f.dec)
|
||||
assert.strictEqual(d.size, buffer.length)
|
||||
})
|
||||
})
|
||||
|
||||
fixtures.invalid.readPushDataInt.forEach(function (f) {
|
||||
if (!f.hexPD) return
|
||||
|
||||
it('decodes ' + f.hexPD + ' as null', function () {
|
||||
var buffer = Buffer.from(f.hexPD, 'hex')
|
||||
|
||||
var n = bufferutils.readPushDataInt(buffer, 0)
|
||||
assert.strictEqual(n, null)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('readUInt64LE', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
it('decodes ' + f.hex64 + ' correctly', function () {
|
||||
var buffer = Buffer.from(f.hex64, 'hex')
|
||||
var number = bufferutils.readUInt64LE(buffer, 0)
|
||||
|
||||
assert.strictEqual(number, f.dec)
|
||||
})
|
||||
})
|
||||
|
||||
fixtures.invalid.readUInt64LE.forEach(function (f) {
|
||||
it('throws on ' + f.description, function () {
|
||||
var buffer = Buffer.from(f.hex64, 'hex')
|
||||
|
||||
assert.throws(function () {
|
||||
bufferutils.readUInt64LE(buffer, 0)
|
||||
}, new RegExp(f.exception))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('readVarInt', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
it('decodes ' + f.hexVI + ' correctly', function () {
|
||||
var buffer = Buffer.from(f.hexVI, 'hex')
|
||||
var d = bufferutils.readVarInt(buffer, 0)
|
||||
|
||||
assert.strictEqual(d.number, f.dec)
|
||||
assert.strictEqual(d.size, buffer.length)
|
||||
})
|
||||
})
|
||||
|
||||
fixtures.invalid.readUInt64LE.forEach(function (f) {
|
||||
it('throws on ' + f.description, function () {
|
||||
var buffer = Buffer.from(f.hexVI, 'hex')
|
||||
|
||||
assert.throws(function () {
|
||||
bufferutils.readVarInt(buffer, 0)
|
||||
}, new RegExp(f.exception))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('varIntBuffer', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
it('encodes ' + f.dec + ' correctly', function () {
|
||||
var buffer = bufferutils.varIntBuffer(f.dec)
|
||||
|
||||
assert.strictEqual(buffer.toString('hex'), f.hexVI)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('varIntSize', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
it('determines the varIntSize of ' + f.dec + ' correctly', function () {
|
||||
var size = bufferutils.varIntSize(f.dec)
|
||||
|
||||
assert.strictEqual(size, f.hexVI.length / 2)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('writePushDataInt', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
if (!f.hexPD) return
|
||||
|
||||
it('encodes ' + f.dec + ' correctly', function () {
|
||||
var buffer = Buffer.alloc(5, 0)
|
||||
|
||||
var n = bufferutils.writePushDataInt(buffer, f.dec, 0)
|
||||
assert.strictEqual(buffer.slice(0, n).toString('hex'), f.hexPD)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('writeUInt64LE', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
it('encodes ' + f.dec + ' correctly', function () {
|
||||
var buffer = Buffer.alloc(8, 0)
|
||||
|
||||
bufferutils.writeUInt64LE(buffer, f.dec, 0)
|
||||
assert.strictEqual(buffer.toString('hex'), f.hex64)
|
||||
})
|
||||
})
|
||||
|
||||
fixtures.invalid.readUInt64LE.forEach(function (f) {
|
||||
it('throws on ' + f.description, function () {
|
||||
var buffer = Buffer.alloc(8, 0)
|
||||
|
||||
assert.throws(function () {
|
||||
bufferutils.writeUInt64LE(buffer, f.dec, 0)
|
||||
}, new RegExp(f.exception))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('writeVarInt', function () {
|
||||
fixtures.valid.forEach(function (f) {
|
||||
it('encodes ' + f.dec + ' correctly', function () {
|
||||
var buffer = Buffer.alloc(9, 0)
|
||||
|
||||
var n = bufferutils.writeVarInt(buffer, f.dec, 0)
|
||||
assert.strictEqual(buffer.slice(0, n).toString('hex'), f.hexVI)
|
||||
})
|
||||
})
|
||||
|
||||
fixtures.invalid.readUInt64LE.forEach(function (f) {
|
||||
it('throws on ' + f.description, function () {
|
||||
var buffer = Buffer.alloc(9, 0)
|
||||
|
||||
assert.throws(function () {
|
||||
bufferutils.writeVarInt(buffer, f.dec, 0)
|
||||
}, new RegExp(f.exception))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,512 +0,0 @@
|
|||
import * as assert from 'assert';
|
||||
import { describe, it } from 'mocha';
|
||||
import * as bufferutils from '../src/bufferutils';
|
||||
import { BufferReader, BufferWriter } from '../src/bufferutils';
|
||||
|
||||
import * as fixtures from './fixtures/bufferutils.json';
|
||||
const varuint = require('varuint-bitcoin');
|
||||
|
||||
describe('bufferutils', () => {
|
||||
function concatToBuffer(values: number[][]): Buffer {
|
||||
return Buffer.concat(values.map(data => Buffer.from(data)));
|
||||
}
|
||||
|
||||
describe('readUInt64LE', () => {
|
||||
fixtures.valid.forEach(f => {
|
||||
it('decodes ' + f.hex, () => {
|
||||
const buffer = Buffer.from(f.hex, 'hex');
|
||||
const num = bufferutils.readUInt64LE(buffer, 0);
|
||||
|
||||
assert.strictEqual(num, f.dec);
|
||||
});
|
||||
});
|
||||
|
||||
fixtures.invalid.readUInt64LE.forEach(f => {
|
||||
it('throws on ' + f.description, () => {
|
||||
const buffer = Buffer.from(f.hex, 'hex');
|
||||
|
||||
assert.throws(() => {
|
||||
bufferutils.readUInt64LE(buffer, 0);
|
||||
}, new RegExp(f.exception));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('writeUInt64LE', () => {
|
||||
fixtures.valid.forEach(f => {
|
||||
it('encodes ' + f.dec, () => {
|
||||
const buffer = Buffer.alloc(8, 0);
|
||||
|
||||
bufferutils.writeUInt64LE(buffer, f.dec, 0);
|
||||
assert.strictEqual(buffer.toString('hex'), f.hex);
|
||||
});
|
||||
});
|
||||
|
||||
fixtures.invalid.writeUInt64LE.forEach(f => {
|
||||
it('throws on ' + f.description, () => {
|
||||
const buffer = Buffer.alloc(8, 0);
|
||||
|
||||
assert.throws(() => {
|
||||
bufferutils.writeUInt64LE(buffer, f.dec, 0);
|
||||
}, new RegExp(f.exception));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('BufferWriter', () => {
|
||||
function testBuffer(
|
||||
bufferWriter: BufferWriter,
|
||||
expectedBuffer: Buffer,
|
||||
expectedOffset: number = expectedBuffer.length,
|
||||
): void {
|
||||
assert.strictEqual(bufferWriter.offset, expectedOffset);
|
||||
assert.deepStrictEqual(
|
||||
bufferWriter.buffer.slice(0, expectedOffset),
|
||||
expectedBuffer.slice(0, expectedOffset),
|
||||
);
|
||||
}
|
||||
|
||||
it('withCapacity', () => {
|
||||
const expectedBuffer = Buffer.from('04030201', 'hex');
|
||||
const withCapacity = BufferWriter.withCapacity(4);
|
||||
withCapacity.writeInt32(0x01020304);
|
||||
testBuffer(withCapacity, expectedBuffer);
|
||||
});
|
||||
|
||||
it('writeUint8', () => {
|
||||
const values = [0, 1, 254, 255];
|
||||
const expectedBuffer = Buffer.from([0, 1, 0xfe, 0xff]);
|
||||
const bufferWriter = new BufferWriter(
|
||||
Buffer.allocUnsafe(expectedBuffer.length),
|
||||
);
|
||||
values.forEach((v: number) => {
|
||||
const expectedOffset = bufferWriter.offset + 1;
|
||||
bufferWriter.writeUInt8(v);
|
||||
testBuffer(bufferWriter, expectedBuffer, expectedOffset);
|
||||
});
|
||||
testBuffer(bufferWriter, expectedBuffer);
|
||||
});
|
||||
|
||||
it('writeInt32', () => {
|
||||
const values = [
|
||||
0,
|
||||
1,
|
||||
Math.pow(2, 31) - 2,
|
||||
Math.pow(2, 31) - 1,
|
||||
-1,
|
||||
-Math.pow(2, 31),
|
||||
];
|
||||
const expectedBuffer = concatToBuffer([
|
||||
[0, 0, 0, 0],
|
||||
[1, 0, 0, 0],
|
||||
[0xfe, 0xff, 0xff, 0x7f],
|
||||
[0xff, 0xff, 0xff, 0x7f],
|
||||
[0xff, 0xff, 0xff, 0xff],
|
||||
[0x00, 0x00, 0x00, 0x80],
|
||||
]);
|
||||
const bufferWriter = new BufferWriter(
|
||||
Buffer.allocUnsafe(expectedBuffer.length),
|
||||
);
|
||||
values.forEach((value: number) => {
|
||||
const expectedOffset = bufferWriter.offset + 4;
|
||||
bufferWriter.writeInt32(value);
|
||||
testBuffer(bufferWriter, expectedBuffer, expectedOffset);
|
||||
});
|
||||
testBuffer(bufferWriter, expectedBuffer);
|
||||
});
|
||||
|
||||
it('writeUInt32', () => {
|
||||
const maxUInt32 = Math.pow(2, 32) - 1;
|
||||
const values = [0, 1, Math.pow(2, 16), maxUInt32];
|
||||
const expectedBuffer = concatToBuffer([
|
||||
[0, 0, 0, 0],
|
||||
[1, 0, 0, 0],
|
||||
[0, 0, 1, 0],
|
||||
[0xff, 0xff, 0xff, 0xff],
|
||||
]);
|
||||
const bufferWriter = new BufferWriter(
|
||||
Buffer.allocUnsafe(expectedBuffer.length),
|
||||
);
|
||||
values.forEach((value: number) => {
|
||||
const expectedOffset = bufferWriter.offset + 4;
|
||||
bufferWriter.writeUInt32(value);
|
||||
testBuffer(bufferWriter, expectedBuffer, expectedOffset);
|
||||
});
|
||||
testBuffer(bufferWriter, expectedBuffer);
|
||||
});
|
||||
|
||||
it('writeUInt64', () => {
|
||||
const values = [
|
||||
0,
|
||||
1,
|
||||
Math.pow(2, 32),
|
||||
Number.MAX_SAFE_INTEGER /* 2^53 - 1 */,
|
||||
];
|
||||
const expectedBuffer = concatToBuffer([
|
||||
[0, 0, 0, 0, 0, 0, 0, 0],
|
||||
[1, 0, 0, 0, 0, 0, 0, 0],
|
||||
[0, 0, 0, 0, 1, 0, 0, 0],
|
||||
[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1f, 0x00],
|
||||
]);
|
||||
const bufferWriter = new BufferWriter(
|
||||
Buffer.allocUnsafe(expectedBuffer.length),
|
||||
);
|
||||
values.forEach((value: number) => {
|
||||
const expectedOffset = bufferWriter.offset + 8;
|
||||
bufferWriter.writeUInt64(value);
|
||||
testBuffer(bufferWriter, expectedBuffer, expectedOffset);
|
||||
});
|
||||
testBuffer(bufferWriter, expectedBuffer);
|
||||
});
|
||||
|
||||
it('writeVarInt', () => {
|
||||
const values = [
|
||||
0,
|
||||
1,
|
||||
252,
|
||||
253,
|
||||
254,
|
||||
255,
|
||||
256,
|
||||
Math.pow(2, 16) - 2,
|
||||
Math.pow(2, 16) - 1,
|
||||
Math.pow(2, 16),
|
||||
Math.pow(2, 32) - 2,
|
||||
Math.pow(2, 32) - 1,
|
||||
Math.pow(2, 32),
|
||||
Number.MAX_SAFE_INTEGER,
|
||||
];
|
||||
const expectedBuffer = concatToBuffer([
|
||||
[0x00],
|
||||
[0x01],
|
||||
[0xfc],
|
||||
[0xfd, 0xfd, 0x00],
|
||||
[0xfd, 0xfe, 0x00],
|
||||
[0xfd, 0xff, 0x00],
|
||||
[0xfd, 0x00, 0x01],
|
||||
[0xfd, 0xfe, 0xff],
|
||||
[0xfd, 0xff, 0xff],
|
||||
[0xfe, 0x00, 0x00, 0x01, 0x00],
|
||||
[0xfe, 0xfe, 0xff, 0xff, 0xff],
|
||||
[0xfe, 0xff, 0xff, 0xff, 0xff],
|
||||
[0xff, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00],
|
||||
[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1f, 0x00],
|
||||
]);
|
||||
const bufferWriter = new BufferWriter(
|
||||
Buffer.allocUnsafe(expectedBuffer.length),
|
||||
);
|
||||
values.forEach((value: number) => {
|
||||
const expectedOffset =
|
||||
bufferWriter.offset + varuint.encodingLength(value);
|
||||
bufferWriter.writeVarInt(value);
|
||||
testBuffer(bufferWriter, expectedBuffer, expectedOffset);
|
||||
});
|
||||
testBuffer(bufferWriter, expectedBuffer);
|
||||
});
|
||||
|
||||
it('writeSlice', () => {
|
||||
const values = [[], [1], [1, 2, 3, 4], [254, 255]];
|
||||
const expectedBuffer = concatToBuffer(values);
|
||||
const bufferWriter = new BufferWriter(
|
||||
Buffer.allocUnsafe(expectedBuffer.length),
|
||||
);
|
||||
values.forEach((v: number[]) => {
|
||||
const expectedOffset = bufferWriter.offset + v.length;
|
||||
bufferWriter.writeSlice(Buffer.from(v));
|
||||
testBuffer(bufferWriter, expectedBuffer, expectedOffset);
|
||||
});
|
||||
testBuffer(bufferWriter, expectedBuffer);
|
||||
assert.throws(() => {
|
||||
bufferWriter.writeSlice(Buffer.from([0, 0]));
|
||||
}, /^Error: Cannot write slice out of bounds$/);
|
||||
});
|
||||
|
||||
it('writeVarSlice', () => {
|
||||
const values = [
|
||||
Buffer.alloc(1, 1),
|
||||
Buffer.alloc(252, 2),
|
||||
Buffer.alloc(253, 3),
|
||||
];
|
||||
const expectedBuffer = Buffer.concat([
|
||||
Buffer.from([0x01, 0x01]),
|
||||
Buffer.from([0xfc]),
|
||||
Buffer.alloc(252, 0x02),
|
||||
Buffer.from([0xfd, 0xfd, 0x00]),
|
||||
Buffer.alloc(253, 0x03),
|
||||
]);
|
||||
const bufferWriter = new BufferWriter(
|
||||
Buffer.allocUnsafe(expectedBuffer.length),
|
||||
);
|
||||
values.forEach((value: Buffer) => {
|
||||
const expectedOffset =
|
||||
bufferWriter.offset +
|
||||
varuint.encodingLength(value.length) +
|
||||
value.length;
|
||||
bufferWriter.writeVarSlice(value);
|
||||
testBuffer(bufferWriter, expectedBuffer, expectedOffset);
|
||||
});
|
||||
testBuffer(bufferWriter, expectedBuffer);
|
||||
});
|
||||
|
||||
it('writeVector', () => {
|
||||
const values = [
|
||||
[Buffer.alloc(1, 4), Buffer.alloc(253, 5)],
|
||||
Array(253).fill(Buffer.alloc(1, 6)),
|
||||
];
|
||||
const expectedBuffer = Buffer.concat([
|
||||
Buffer.from([0x02]),
|
||||
Buffer.from([0x01, 0x04]),
|
||||
Buffer.from([0xfd, 0xfd, 0x00]),
|
||||
Buffer.alloc(253, 5),
|
||||
|
||||
Buffer.from([0xfd, 0xfd, 0x00]),
|
||||
Buffer.concat(
|
||||
Array(253)
|
||||
.fill(0)
|
||||
.map(() => Buffer.from([0x01, 0x06])),
|
||||
),
|
||||
]);
|
||||
|
||||
const bufferWriter = new BufferWriter(
|
||||
Buffer.allocUnsafe(expectedBuffer.length),
|
||||
);
|
||||
values.forEach((value: Buffer[]) => {
|
||||
const expectedOffset =
|
||||
bufferWriter.offset +
|
||||
varuint.encodingLength(value.length) +
|
||||
value.reduce(
|
||||
(sum: number, v) =>
|
||||
sum + varuint.encodingLength(v.length) + v.length,
|
||||
0,
|
||||
);
|
||||
bufferWriter.writeVector(value);
|
||||
testBuffer(bufferWriter, expectedBuffer, expectedOffset);
|
||||
});
|
||||
testBuffer(bufferWriter, expectedBuffer);
|
||||
});
|
||||
|
||||
it('end', () => {
|
||||
const expected = Buffer.from('0403020108070605', 'hex');
|
||||
const bufferWriter = BufferWriter.withCapacity(8);
|
||||
bufferWriter.writeUInt32(0x01020304);
|
||||
bufferWriter.writeUInt32(0x05060708);
|
||||
const result = bufferWriter.end();
|
||||
testBuffer(bufferWriter, result);
|
||||
testBuffer(bufferWriter, expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('BufferReader', () => {
|
||||
function testValue(
|
||||
bufferReader: BufferReader,
|
||||
value: Buffer | number,
|
||||
expectedValue: Buffer | number,
|
||||
expectedOffset: number = Buffer.isBuffer(expectedValue)
|
||||
? expectedValue.length
|
||||
: 0,
|
||||
): void {
|
||||
assert.strictEqual(bufferReader.offset, expectedOffset);
|
||||
if (Buffer.isBuffer(expectedValue)) {
|
||||
assert.deepStrictEqual(
|
||||
(value as Buffer).slice(0, expectedOffset),
|
||||
expectedValue.slice(0, expectedOffset),
|
||||
);
|
||||
} else {
|
||||
assert.strictEqual(value as number, expectedValue);
|
||||
}
|
||||
}
|
||||
|
||||
it('readUint8', () => {
|
||||
const values = [0, 1, 0xfe, 0xff];
|
||||
const buffer = Buffer.from([0, 1, 0xfe, 0xff]);
|
||||
const bufferReader = new BufferReader(buffer);
|
||||
values.forEach((v: number) => {
|
||||
const expectedOffset = bufferReader.offset + 1;
|
||||
const val = bufferReader.readUInt8();
|
||||
testValue(bufferReader, val, v, expectedOffset);
|
||||
});
|
||||
});
|
||||
|
||||
it('readInt32', () => {
|
||||
const values = [
|
||||
0,
|
||||
1,
|
||||
Math.pow(2, 31) - 2,
|
||||
Math.pow(2, 31) - 1,
|
||||
-1,
|
||||
-Math.pow(2, 31),
|
||||
];
|
||||
const buffer = concatToBuffer([
|
||||
[0, 0, 0, 0],
|
||||
[1, 0, 0, 0],
|
||||
[0xfe, 0xff, 0xff, 0x7f],
|
||||
[0xff, 0xff, 0xff, 0x7f],
|
||||
[0xff, 0xff, 0xff, 0xff],
|
||||
[0x00, 0x00, 0x00, 0x80],
|
||||
]);
|
||||
const bufferReader = new BufferReader(buffer);
|
||||
values.forEach((value: number) => {
|
||||
const expectedOffset = bufferReader.offset + 4;
|
||||
const val = bufferReader.readInt32();
|
||||
testValue(bufferReader, val, value, expectedOffset);
|
||||
});
|
||||
});
|
||||
|
||||
it('readUInt32', () => {
|
||||
const maxUInt32 = Math.pow(2, 32) - 1;
|
||||
const values = [0, 1, Math.pow(2, 16), maxUInt32];
|
||||
const buffer = concatToBuffer([
|
||||
[0, 0, 0, 0],
|
||||
[1, 0, 0, 0],
|
||||
[0, 0, 1, 0],
|
||||
[0xff, 0xff, 0xff, 0xff],
|
||||
]);
|
||||
const bufferReader = new BufferReader(buffer);
|
||||
values.forEach((value: number) => {
|
||||
const expectedOffset = bufferReader.offset + 4;
|
||||
const val = bufferReader.readUInt32();
|
||||
testValue(bufferReader, val, value, expectedOffset);
|
||||
});
|
||||
});
|
||||
|
||||
it('readUInt64', () => {
|
||||
const values = [
|
||||
0,
|
||||
1,
|
||||
Math.pow(2, 32),
|
||||
Number.MAX_SAFE_INTEGER /* 2^53 - 1 */,
|
||||
];
|
||||
const buffer = concatToBuffer([
|
||||
[0, 0, 0, 0, 0, 0, 0, 0],
|
||||
[1, 0, 0, 0, 0, 0, 0, 0],
|
||||
[0, 0, 0, 0, 1, 0, 0, 0],
|
||||
[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1f, 0x00],
|
||||
]);
|
||||
const bufferReader = new BufferReader(buffer);
|
||||
values.forEach((value: number) => {
|
||||
const expectedOffset = bufferReader.offset + 8;
|
||||
const val = bufferReader.readUInt64();
|
||||
testValue(bufferReader, val, value, expectedOffset);
|
||||
});
|
||||
});
|
||||
|
||||
it('readVarInt', () => {
|
||||
const values = [
|
||||
0,
|
||||
1,
|
||||
252,
|
||||
253,
|
||||
254,
|
||||
255,
|
||||
256,
|
||||
Math.pow(2, 16) - 2,
|
||||
Math.pow(2, 16) - 1,
|
||||
Math.pow(2, 16),
|
||||
Math.pow(2, 32) - 2,
|
||||
Math.pow(2, 32) - 1,
|
||||
Math.pow(2, 32),
|
||||
Number.MAX_SAFE_INTEGER,
|
||||
];
|
||||
const buffer = concatToBuffer([
|
||||
[0x00],
|
||||
[0x01],
|
||||
[0xfc],
|
||||
[0xfd, 0xfd, 0x00],
|
||||
[0xfd, 0xfe, 0x00],
|
||||
[0xfd, 0xff, 0x00],
|
||||
[0xfd, 0x00, 0x01],
|
||||
[0xfd, 0xfe, 0xff],
|
||||
[0xfd, 0xff, 0xff],
|
||||
[0xfe, 0x00, 0x00, 0x01, 0x00],
|
||||
[0xfe, 0xfe, 0xff, 0xff, 0xff],
|
||||
[0xfe, 0xff, 0xff, 0xff, 0xff],
|
||||
[0xff, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00],
|
||||
[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1f, 0x00],
|
||||
]);
|
||||
const bufferReader = new BufferReader(buffer);
|
||||
values.forEach((value: number) => {
|
||||
const expectedOffset =
|
||||
bufferReader.offset + varuint.encodingLength(value);
|
||||
const val = bufferReader.readVarInt();
|
||||
testValue(bufferReader, val, value, expectedOffset);
|
||||
});
|
||||
});
|
||||
|
||||
it('readSlice', () => {
|
||||
const values = [[1], [1, 2, 3, 4], [254, 255]];
|
||||
const buffer = concatToBuffer(values);
|
||||
const bufferReader = new BufferReader(buffer);
|
||||
values.forEach((v: number[]) => {
|
||||
const expectedOffset = bufferReader.offset + v.length;
|
||||
const val = bufferReader.readSlice(v.length);
|
||||
testValue(bufferReader, val, Buffer.from(v), expectedOffset);
|
||||
});
|
||||
assert.throws(() => {
|
||||
bufferReader.readSlice(2);
|
||||
}, /^Error: Cannot read slice out of bounds$/);
|
||||
});
|
||||
|
||||
it('readVarSlice', () => {
|
||||
const values = [
|
||||
Buffer.alloc(1, 1),
|
||||
Buffer.alloc(252, 2),
|
||||
Buffer.alloc(253, 3),
|
||||
];
|
||||
const buffer = Buffer.concat([
|
||||
Buffer.from([0x01, 0x01]),
|
||||
Buffer.from([0xfc]),
|
||||
Buffer.alloc(252, 0x02),
|
||||
Buffer.from([0xfd, 0xfd, 0x00]),
|
||||
Buffer.alloc(253, 0x03),
|
||||
]);
|
||||
const bufferReader = new BufferReader(buffer);
|
||||
values.forEach((value: Buffer) => {
|
||||
const expectedOffset =
|
||||
bufferReader.offset +
|
||||
varuint.encodingLength(value.length) +
|
||||
value.length;
|
||||
const val = bufferReader.readVarSlice();
|
||||
testValue(bufferReader, val, value, expectedOffset);
|
||||
});
|
||||
});
|
||||
|
||||
it('readVector', () => {
|
||||
const values = [
|
||||
[Buffer.alloc(1, 4), Buffer.alloc(253, 5)],
|
||||
Array(253).fill(Buffer.alloc(1, 6)),
|
||||
];
|
||||
const buffer = Buffer.concat([
|
||||
Buffer.from([0x02]),
|
||||
Buffer.from([0x01, 0x04]),
|
||||
Buffer.from([0xfd, 0xfd, 0x00]),
|
||||
Buffer.alloc(253, 5),
|
||||
|
||||
Buffer.from([0xfd, 0xfd, 0x00]),
|
||||
Buffer.concat(
|
||||
Array(253)
|
||||
.fill(0)
|
||||
.map(() => Buffer.from([0x01, 0x06])),
|
||||
),
|
||||
]);
|
||||
|
||||
const bufferReader = new BufferReader(buffer);
|
||||
values.forEach((value: Buffer[]) => {
|
||||
const expectedOffset =
|
||||
bufferReader.offset +
|
||||
varuint.encodingLength(value.length) +
|
||||
value.reduce(
|
||||
(sum: number, v) =>
|
||||
sum + varuint.encodingLength(v.length) + v.length,
|
||||
0,
|
||||
);
|
||||
const val = bufferReader.readVector();
|
||||
testValue(
|
||||
bufferReader,
|
||||
Buffer.concat(val),
|
||||
Buffer.concat(value),
|
||||
expectedOffset,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
24
test/crypto.js
Normal file
24
test/crypto.js
Normal file
|
@ -0,0 +1,24 @@
|
|||
/* global describe, it */
|
||||
|
||||
var assert = require('assert')
|
||||
var bcrypto = require('../src/crypto')
|
||||
|
||||
var fixtures = require('./fixtures/crypto')
|
||||
|
||||
describe('crypto', function () {
|
||||
['hash160', 'hash256', 'ripemd160', 'sha1', 'sha256'].forEach(function (algorithm) {
|
||||
describe(algorithm, function () {
|
||||
fixtures.forEach(function (f) {
|
||||
var fn = bcrypto[algorithm]
|
||||
var expected = f[algorithm]
|
||||
|
||||
it('returns ' + expected + ' for ' + f.hex, function () {
|
||||
var data = Buffer.from(f.hex, 'hex')
|
||||
var actual = fn(data).toString('hex')
|
||||
|
||||
assert.strictEqual(actual, expected)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,33 +0,0 @@
|
|||
import * as assert from 'assert';
|
||||
import { describe, it } from 'mocha';
|
||||
import { crypto as bcrypto, TaggedHashPrefix } from '..';
|
||||
import * as fixtures from './fixtures/crypto.json';
|
||||
|
||||
describe('crypto', () => {
|
||||
['hash160', 'hash256', 'ripemd160', 'sha1', 'sha256'].forEach(algorithm => {
|
||||
describe(algorithm, () => {
|
||||
fixtures.hashes.forEach(f => {
|
||||
const fn = (bcrypto as any)[algorithm];
|
||||
const expected = (f as any)[algorithm];
|
||||
|
||||
it('returns ' + expected + ' for ' + f.hex, () => {
|
||||
const data = Buffer.from(f.hex, 'hex');
|
||||
const actual = fn(data).toString('hex');
|
||||
|
||||
assert.strictEqual(actual, expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('taggedHash', () => {
|
||||
fixtures.taggedHash.forEach(f => {
|
||||
const bytes = Buffer.from(f.hex, 'hex');
|
||||
const expected = Buffer.from(f.result, 'hex');
|
||||
it(`returns ${f.result} for taggedHash "${f.tag}" of ${f.hex}`, () => {
|
||||
const actual = bcrypto.taggedHash(f.tag as TaggedHashPrefix, bytes);
|
||||
assert.strictEqual(actual.toString('hex'), expected.toString('hex'));
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue