#1477 remove non-public dependecies of TransactionBuilder (classify & templates))

This commit is contained in:
Vlad Stan 2020-11-27 12:02:07 +02:00
parent 5c6243f4e4
commit c217551884
68 changed files with 1 additions and 1440 deletions

View file

@ -1,59 +0,0 @@
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
const script_1 = require('./script');
const multisig = require('./templates/multisig');
const nullData = require('./templates/nulldata');
const pubKey = require('./templates/pubkey');
const pubKeyHash = require('./templates/pubkeyhash');
const scriptHash = require('./templates/scripthash');
const witnessCommitment = require('./templates/witnesscommitment');
const witnessPubKeyHash = require('./templates/witnesspubkeyhash');
const witnessScriptHash = require('./templates/witnessscripthash');
const types = {
P2MS: 'multisig',
NONSTANDARD: 'nonstandard',
NULLDATA: 'nulldata',
P2PK: 'pubkey',
P2PKH: 'pubkeyhash',
P2SH: 'scripthash',
P2WPKH: 'witnesspubkeyhash',
P2WSH: 'witnessscripthash',
WITNESS_COMMITMENT: 'witnesscommitment',
};
exports.types = types;
function classifyOutput(script) {
if (witnessPubKeyHash.output.check(script)) return types.P2WPKH;
if (witnessScriptHash.output.check(script)) return types.P2WSH;
if (pubKeyHash.output.check(script)) return types.P2PKH;
if (scriptHash.output.check(script)) return types.P2SH;
// XXX: optimization, below functions .decompile before use
const chunks = script_1.decompile(script);
if (!chunks) throw new TypeError('Invalid script');
if (multisig.output.check(chunks)) return types.P2MS;
if (pubKey.output.check(chunks)) return types.P2PK;
if (witnessCommitment.output.check(chunks)) return types.WITNESS_COMMITMENT;
if (nullData.output.check(chunks)) return types.NULLDATA;
return types.NONSTANDARD;
}
exports.output = classifyOutput;
function classifyInput(script, allowIncomplete) {
// XXX: optimization, below functions .decompile before use
const chunks = script_1.decompile(script);
if (!chunks) throw new TypeError('Invalid script');
if (pubKeyHash.input.check(chunks)) return types.P2PKH;
if (scriptHash.input.check(chunks, allowIncomplete)) return types.P2SH;
if (multisig.input.check(chunks, allowIncomplete)) return types.P2MS;
if (pubKey.input.check(chunks)) return types.P2PK;
return types.NONSTANDARD;
}
exports.input = classifyInput;
function classifyWitness(script, allowIncomplete) {
// XXX: optimization, below functions .decompile before use
const chunks = script_1.decompile(script);
if (!chunks) throw new TypeError('Invalid script');
if (witnessPubKeyHash.input.check(chunks)) return types.P2WPKH;
if (witnessScriptHash.input.check(chunks, allowIncomplete))
return types.P2WSH;
return types.NONSTANDARD;
}
exports.witness = classifyWitness;

View file

@ -1,6 +0,0 @@
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
const input = require('./input');
exports.input = input;
const output = require('./output');
exports.output = output;

View file

@ -1,23 +0,0 @@
'use strict';
// OP_0 [signatures ...]
Object.defineProperty(exports, '__esModule', { value: true });
const bscript = require('../../script');
const script_1 = require('../../script');
function partialSignature(value) {
return (
value === script_1.OPS.OP_0 || bscript.isCanonicalScriptSignature(value)
);
}
function check(script, allowIncomplete) {
const chunks = bscript.decompile(script);
if (chunks.length < 2) return false;
if (chunks[0] !== script_1.OPS.OP_0) return false;
if (allowIncomplete) {
return chunks.slice(1).every(partialSignature);
}
return chunks.slice(1).every(bscript.isCanonicalScriptSignature);
}
exports.check = check;
check.toJSON = () => {
return 'multisig input';
};

View file

@ -1,27 +0,0 @@
'use strict';
// m [pubKeys ...] n OP_CHECKMULTISIG
Object.defineProperty(exports, '__esModule', { value: true });
const bscript = require('../../script');
const script_1 = require('../../script');
const types = require('../../types');
const OP_INT_BASE = script_1.OPS.OP_RESERVED; // OP_1 - 1
function check(script, allowIncomplete) {
const chunks = bscript.decompile(script);
if (chunks.length < 4) return false;
if (chunks[chunks.length - 1] !== script_1.OPS.OP_CHECKMULTISIG) return false;
if (!types.Number(chunks[0])) return false;
if (!types.Number(chunks[chunks.length - 2])) return false;
const m = chunks[0] - OP_INT_BASE;
const n = chunks[chunks.length - 2] - OP_INT_BASE;
if (m <= 0) return false;
if (n > 16) return false;
if (m > n) return false;
if (n !== chunks.length - 3) return false;
if (allowIncomplete) return true;
const keys = chunks.slice(1, -2);
return keys.every(bscript.isCanonicalPubKey);
}
exports.check = check;
check.toJSON = () => {
return 'multi-sig output';
};

View file

@ -1,15 +0,0 @@
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
// OP_RETURN {data}
const bscript = require('../script');
const OPS = bscript.OPS;
function check(script) {
const buffer = bscript.compile(script);
return buffer.length > 1 && buffer[0] === OPS.OP_RETURN;
}
exports.check = check;
check.toJSON = () => {
return 'null data output';
};
const output = { check };
exports.output = output;

View file

@ -1,6 +0,0 @@
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
const input = require('./input');
exports.input = input;
const output = require('./output');
exports.output = output;

View file

@ -1,12 +0,0 @@
'use strict';
// {signature}
Object.defineProperty(exports, '__esModule', { value: true });
const bscript = require('../../script');
function check(script) {
const chunks = bscript.decompile(script);
return chunks.length === 1 && bscript.isCanonicalScriptSignature(chunks[0]);
}
exports.check = check;
check.toJSON = () => {
return 'pubKey input';
};

View file

@ -1,17 +0,0 @@
'use strict';
// {pubKey} OP_CHECKSIG
Object.defineProperty(exports, '__esModule', { value: true });
const bscript = require('../../script');
const script_1 = require('../../script');
function check(script) {
const chunks = bscript.decompile(script);
return (
chunks.length === 2 &&
bscript.isCanonicalPubKey(chunks[0]) &&
chunks[1] === script_1.OPS.OP_CHECKSIG
);
}
exports.check = check;
check.toJSON = () => {
return 'pubKey output';
};

View file

@ -1,6 +0,0 @@
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
const input = require('./input');
exports.input = input;
const output = require('./output');
exports.output = output;

View file

@ -1,16 +0,0 @@
'use strict';
// {signature} {pubKey}
Object.defineProperty(exports, '__esModule', { value: true });
const bscript = require('../../script');
function check(script) {
const chunks = bscript.decompile(script);
return (
chunks.length === 2 &&
bscript.isCanonicalScriptSignature(chunks[0]) &&
bscript.isCanonicalPubKey(chunks[1])
);
}
exports.check = check;
check.toJSON = () => {
return 'pubKeyHash input';
};

View file

@ -1,20 +0,0 @@
'use strict';
// OP_DUP OP_HASH160 {pubKeyHash} OP_EQUALVERIFY OP_CHECKSIG
Object.defineProperty(exports, '__esModule', { value: true });
const bscript = require('../../script');
const script_1 = require('../../script');
function check(script) {
const buffer = bscript.compile(script);
return (
buffer.length === 25 &&
buffer[0] === script_1.OPS.OP_DUP &&
buffer[1] === script_1.OPS.OP_HASH160 &&
buffer[2] === 0x14 &&
buffer[23] === script_1.OPS.OP_EQUALVERIFY &&
buffer[24] === script_1.OPS.OP_CHECKSIG
);
}
exports.check = check;
check.toJSON = () => {
return 'pubKeyHash output';
};

View file

@ -1,6 +0,0 @@
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
const input = require('./input');
exports.input = input;
const output = require('./output');
exports.output = output;

View file

@ -1,50 +0,0 @@
'use strict';
// <scriptSig> {serialized scriptPubKey script}
Object.defineProperty(exports, '__esModule', { value: true });
const bscript = require('../../script');
const p2ms = require('../multisig');
const p2pk = require('../pubkey');
const p2pkh = require('../pubkeyhash');
const p2wpkho = require('../witnesspubkeyhash/output');
const p2wsho = require('../witnessscripthash/output');
function check(script, allowIncomplete) {
const chunks = bscript.decompile(script);
if (chunks.length < 1) return false;
const lastChunk = chunks[chunks.length - 1];
if (!Buffer.isBuffer(lastChunk)) return false;
const scriptSigChunks = bscript.decompile(
bscript.compile(chunks.slice(0, -1)),
);
const redeemScriptChunks = bscript.decompile(lastChunk);
// is redeemScript a valid script?
if (!redeemScriptChunks) return false;
// is redeemScriptSig push only?
if (!bscript.isPushOnly(scriptSigChunks)) return false;
// is witness?
if (chunks.length === 1) {
return (
p2wsho.check(redeemScriptChunks) || p2wpkho.check(redeemScriptChunks)
);
}
// match types
if (
p2pkh.input.check(scriptSigChunks) &&
p2pkh.output.check(redeemScriptChunks)
)
return true;
if (
p2ms.input.check(scriptSigChunks, allowIncomplete) &&
p2ms.output.check(redeemScriptChunks)
)
return true;
if (
p2pk.input.check(scriptSigChunks) &&
p2pk.output.check(redeemScriptChunks)
)
return true;
return false;
}
exports.check = check;
check.toJSON = () => {
return 'scriptHash input';
};

View file

@ -1,18 +0,0 @@
'use strict';
// OP_HASH160 {scriptHash} OP_EQUAL
Object.defineProperty(exports, '__esModule', { value: true });
const bscript = require('../../script');
const script_1 = require('../../script');
function check(script) {
const buffer = bscript.compile(script);
return (
buffer.length === 23 &&
buffer[0] === script_1.OPS.OP_HASH160 &&
buffer[1] === 0x14 &&
buffer[22] === script_1.OPS.OP_EQUAL
);
}
exports.check = check;
check.toJSON = () => {
return 'scriptHash output';
};

View file

@ -1,4 +0,0 @@
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
const output = require('./output');
exports.output = output;

View file

@ -1,34 +0,0 @@
'use strict';
// OP_RETURN {aa21a9ed} {commitment}
Object.defineProperty(exports, '__esModule', { value: true });
const bscript = require('../../script');
const script_1 = require('../../script');
const types = require('../../types');
const typeforce = require('typeforce');
const HEADER = Buffer.from('aa21a9ed', 'hex');
function check(script) {
const buffer = bscript.compile(script);
return (
buffer.length > 37 &&
buffer[0] === script_1.OPS.OP_RETURN &&
buffer[1] === 0x24 &&
buffer.slice(2, 6).equals(HEADER)
);
}
exports.check = check;
check.toJSON = () => {
return 'Witness commitment output';
};
function encode(commitment) {
typeforce(types.Hash256bit, commitment);
const buffer = Buffer.allocUnsafe(36);
HEADER.copy(buffer, 0);
commitment.copy(buffer, 4);
return bscript.compile([script_1.OPS.OP_RETURN, buffer]);
}
exports.encode = encode;
function decode(buffer) {
typeforce(check, buffer);
return bscript.decompile(buffer)[1].slice(4, 36);
}
exports.decode = decode;

View file

@ -1,6 +0,0 @@
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
const input = require('./input');
exports.input = input;
const output = require('./output');
exports.output = output;

View file

@ -1,19 +0,0 @@
'use strict';
// {signature} {pubKey}
Object.defineProperty(exports, '__esModule', { value: true });
const bscript = require('../../script');
function isCompressedCanonicalPubKey(pubKey) {
return bscript.isCanonicalPubKey(pubKey) && pubKey.length === 33;
}
function check(script) {
const chunks = bscript.decompile(script);
return (
chunks.length === 2 &&
bscript.isCanonicalScriptSignature(chunks[0]) &&
isCompressedCanonicalPubKey(chunks[1])
);
}
exports.check = check;
check.toJSON = () => {
return 'witnessPubKeyHash input';
};

View file

@ -1,17 +0,0 @@
'use strict';
// OP_0 {pubKeyHash}
Object.defineProperty(exports, '__esModule', { value: true });
const bscript = require('../../script');
const script_1 = require('../../script');
function check(script) {
const buffer = bscript.compile(script);
return (
buffer.length === 22 &&
buffer[0] === script_1.OPS.OP_0 &&
buffer[1] === 0x14
);
}
exports.check = check;
check.toJSON = () => {
return 'Witness pubKeyHash output';
};

View file

@ -1,6 +0,0 @@
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
const input = require('./input');
exports.input = input;
const output = require('./output');
exports.output = output;

View file

@ -1,39 +0,0 @@
'use strict';
// <scriptSig> {serialized scriptPubKey script}
Object.defineProperty(exports, '__esModule', { value: true });
const bscript = require('../../script');
const typeforce = require('typeforce');
const p2ms = require('../multisig');
const p2pk = require('../pubkey');
const p2pkh = require('../pubkeyhash');
function check(chunks, allowIncomplete) {
typeforce(typeforce.Array, chunks);
if (chunks.length < 1) return false;
const witnessScript = chunks[chunks.length - 1];
if (!Buffer.isBuffer(witnessScript)) return false;
const witnessScriptChunks = bscript.decompile(witnessScript);
// is witnessScript a valid script?
if (!witnessScriptChunks || witnessScriptChunks.length === 0) return false;
const witnessRawScriptSig = bscript.compile(chunks.slice(0, -1));
// match types
if (
p2pkh.input.check(witnessRawScriptSig) &&
p2pkh.output.check(witnessScriptChunks)
)
return true;
if (
p2ms.input.check(witnessRawScriptSig, allowIncomplete) &&
p2ms.output.check(witnessScriptChunks)
)
return true;
if (
p2pk.input.check(witnessRawScriptSig) &&
p2pk.output.check(witnessScriptChunks)
)
return true;
return false;
}
exports.check = check;
check.toJSON = () => {
return 'witnessScriptHash input';
};

View file

@ -1,17 +0,0 @@
'use strict';
// OP_0 {scriptHash}
Object.defineProperty(exports, '__esModule', { value: true });
const bscript = require('../../script');
const script_1 = require('../../script');
function check(script) {
const buffer = bscript.compile(script);
return (
buffer.length === 34 &&
buffer[0] === script_1.OPS.OP_0 &&
buffer[1] === 0x20
);
}
exports.check = check;
check.toJSON = () => {
return 'Witness scriptHash output';
};

View file

@ -1,180 +0,0 @@
import * as assert from 'assert';
import { describe, it } from 'mocha';
import * as classify from '../src/classify';
import * as bscript from '../src/script';
import * as fixtures from './fixtures/templates.json';
import * as multisig from '../src/templates/multisig';
import * as nullData from '../src/templates/nulldata';
import * as pubKey from '../src/templates/pubkey';
import * as pubKeyHash from '../src/templates/pubkeyhash';
import * as scriptHash from '../src/templates/scripthash';
import * as witnessCommitment from '../src/templates/witnesscommitment';
import * as witnessPubKeyHash from '../src/templates/witnesspubkeyhash';
import * as witnessScriptHash from '../src/templates/witnessscripthash';
const tmap = {
pubKey,
pubKeyHash,
scriptHash,
witnessPubKeyHash,
witnessScriptHash,
multisig,
nullData,
witnessCommitment,
};
describe('classify', () => {
describe('input', () => {
fixtures.valid.forEach(f => {
if (!f.input) return;
it('classifies ' + f.input + ' as ' + f.type, () => {
const input = bscript.fromASM(f.input);
const type = classify.input(input);
assert.strictEqual(type, f.type);
});
});
fixtures.valid.forEach(f => {
if (!f.input) return;
if (!f.typeIncomplete) return;
it('classifies incomplete ' + f.input + ' as ' + f.typeIncomplete, () => {
const input = bscript.fromASM(f.input);
const type = classify.input(input, true);
assert.strictEqual(type, f.typeIncomplete);
});
});
});
describe('classifyOutput', () => {
fixtures.valid.forEach(f => {
if (!f.output) return;
it('classifies ' + f.output + ' as ' + f.type, () => {
const output = bscript.fromASM(f.output);
const type = classify.output(output);
assert.strictEqual(type, f.type);
});
});
});
[
'pubKey',
'pubKeyHash',
'scriptHash',
'witnessPubKeyHash',
'witnessScriptHash',
'multisig',
'nullData',
'witnessCommitment',
].forEach(name => {
const inputType = (tmap as any)[name].input;
const outputType = (tmap as any)[name].output;
describe(name + '.input.check', () => {
fixtures.valid.forEach(f => {
if (name.toLowerCase() === classify.types.P2WPKH) return;
if (name.toLowerCase() === classify.types.P2WSH) return;
const expected = name.toLowerCase() === f.type.toLowerCase();
if (inputType && f.input) {
const input = bscript.fromASM(f.input);
it('returns ' + expected + ' for ' + f.input, () => {
assert.strictEqual(inputType.check(input), expected);
});
if (f.typeIncomplete) {
const expectedIncomplete = name.toLowerCase() === f.typeIncomplete;
it('returns ' + expected + ' for ' + f.input, () => {
assert.strictEqual(
inputType.check(input, true),
expectedIncomplete,
);
});
}
}
});
if (!(fixtures.invalid as any)[name]) return;
(fixtures.invalid as any)[name].inputs.forEach((f: any) => {
if (!f.input && !f.inputHex) return;
it(
'returns false for ' +
f.description +
' (' +
(f.input || f.inputHex) +
')',
() => {
let input;
if (f.input) {
input = bscript.fromASM(f.input);
} else {
input = Buffer.from(f.inputHex, 'hex');
}
assert.strictEqual(inputType.check(input), false);
},
);
});
});
describe(name + '.output.check', () => {
fixtures.valid.forEach(f => {
const expected = name.toLowerCase() === f.type;
if (outputType && f.output) {
it('returns ' + expected + ' for ' + f.output, () => {
const output = bscript.fromASM(f.output);
if (
name.toLowerCase() === 'nulldata' &&
f.type === classify.types.WITNESS_COMMITMENT
)
return;
if (
name.toLowerCase() === 'witnesscommitment' &&
f.type === classify.types.NULLDATA
)
return;
assert.strictEqual(outputType.check(output), expected);
});
}
});
if (!(fixtures.invalid as any)[name]) return;
(fixtures.invalid as any)[name].outputs.forEach((f: any) => {
if (!f.output && !f.outputHex) return;
it(
'returns false for ' +
f.description +
' (' +
(f.output || f.outputHex) +
')',
() => {
let output;
if (f.output) {
output = bscript.fromASM(f.output);
} else {
output = Buffer.from(f.outputHex, 'hex');
}
assert.strictEqual(outputType.check(output), false);
},
);
});
});
});
});

View file

@ -255,276 +255,5 @@
"000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
]
}
],
"invalid": {
"pubKey": {
"inputs": [
{
"description": "non-canonical signature (too short)",
"input": "304402207515cf147d201f411092e6be5a64a6006f9308fad7b2a8fdaab22cd86ce764c202200974b8aca7bf51dbf54150d3884e1ae04f675637b926ec33bf7593"
},
{
"description": "non-canonical signature (too long)",
"input": "304402207515cf147d201f411092e6be5a64a6006f9308fad7b2a8fdaab22cd86ce764c202200974b8aca7bf51dbf54150d3884e1ae04f675637b926ec33bf75939446f6ca28ffffffff01"
},
{
"description": "non-canonical signature (invalid hashType)",
"input": "304402207515cf147d201f411092e6be5a64a6006f9308fad7b2a8fdaab22cd86ce764c202200974b8aca7bf51dbf54150d3884e1ae04f675637b926ec33bf75939446f6ca28ff"
}
],
"outputs": [
{
"description": "non-canonical pubkey (too short)",
"output": "02359c6e3f04cefbf089cf1d6670dc47c3fb4df68e2bad1fa5a369f9ce OP_CHECKSIG"
},
{
"description": "non-canonical pubkey (too long)",
"output": "02359c6e3f04cefbf089cf1d6670dc47c3fb4df68e2bad1fa5a369f9ce4b42bbd1ffffff OP_CHECKSIG"
},
{
"description": "last operator is wrong for pubkey-output",
"outputHex": "21027a71801ab59336de37785c50005b6abd8ea859eecce1edbe8e81afa74ee5c752ae"
},
{
"description": "missing OP_CHECKSIG",
"outputHex": "21027a71801ab59336de37785c50005b6abd8ea859eecce1edbe8e81afa74ee5c752"
},
{
"description": "non-canonical pubkey (bad prefix)",
"output": "427a71801ab59336de37785c50005b6abd8ea859eecce1edbe8e81afa74ee5c752 OP_CHECKSIG"
},
{
"description": "has extra opcode at the end isPubKeyOutput",
"output": "027a71801ab59336de37785c50005b6abd8ea859eecce1edbe8e81afa74ee5c752 OP_CHECKSIG OP_0"
}
]
},
"pubKeyHash": {
"inputs": [
{
"description": "pubKeyHash input : extraneous data",
"input": "304402207515cf147d201f411092e6be5a64a6006f9308fad7b2a8fdaab22cd86ce764c202200974b8aca7bf51dbf54150d3884e1ae04f675637b926ec33bf75939446f6ca2801 02359c6e3f04cefbf089cf1d6670dc47c3fb4df68e2bad1fa5a369f9ce4b42bbd1 ffffffff"
}
],
"outputs": [
{
"description": "non-minimal encoded isPubKeyHashOutput (non BIP62 compliant)",
"outputHex": "76a94c14aa4d7985c57e011a8b3dd8e0e5a73aaef41629c588ac"
},
{
"description": "bad OP_DUP isPubKeyHashOutput",
"outputHex": "aca914aa4d7985c57e011a8b3dd8e0e5a73aaef41629c588ac"
},
{
"description": "bad OP_HASH160 isPubKeyHashOutput",
"outputHex": "76ac14aa4d7985c57e011a8b3dd8e0e5a73aaef41629c588ac"
},
{
"description": "bad OP_EQUALVERIFY isPubKeyHashOutput",
"outputHex": "76a914aa4d7985c57e011a8b3dd8e0e5a73aaef41629c5acac"
},
{
"description": "bad OP_CHECKSIG isPubKeyHashOutput",
"outputHex": "76a914aa4d7985c57e011a8b3dd8e0e5a73aaef41629c58888"
},
{
"description": "bad length isPubKeyHashOutput",
"outputHex": "76a920aa4d7985c57e011a8b3dd8e0e5a73aaef41629c588ac"
},
{
"description": "has something at the end isPubKeyHashOutput",
"outputHex": "76a920aa4d7985c57e011a8b3dd8e0e5a73aaef41629c588ac00"
},
{
"exception": "Expected Buffer\\(Length: 20\\), got Buffer\\(Length: 2\\)",
"hash": "ffff"
}
]
},
"scriptHash": {
"inputs": [
{
"description": "redeemScript not data",
"input": "OP_0 304402207515cf147d201f411092e6be5a64a6006f9308fad7b2a8fdaab22cd86ce764c202200974b8aca7bf51dbf54150d3884e1ae04f675637b926ec33bf75939446f6ca2801 3045022100ef253c1faa39e65115872519e5f0a33bbecf430c0f35cf562beabbad4da24d8d02201742be8ee49812a73adea3007c9641ce6725c32cd44ddb8e3a3af460015d140501 OP_RESERVED"
},
{
"description": "redeemScript is a signature, therefore not a valid script",
"input": "OP_0 3045022100e12b17b3a4c80c401a1687487bd2bafee9e5f1f8f1ffc6180ce186672ad7b43a02205e316d1e5e71822f5ef301b694e578fa9c94af4f5f098c952c833f4691307f4e01"
}
],
"outputs": [
{
"description": "non-minimal encoded isScriptHashOutput (non BIP62 compliant)",
"outputHex": "a94c14c286a1af0947f58d1ad787385b1c2c4a976f9e7187"
},
{
"description": "wrong OP_HASH160 opcode",
"outputHex": "ac4c14c286a1af0947f58d1ad787385b1c2c4a976f9e7187"
},
{
"description": "wrong length marker",
"outputHex": "a916c286a1af0947f58d1ad787385b1c2c4a976f9e7187"
},
{
"description": "wrong OP_EQUAL opcode",
"outputHex": "a914c286a1af0947f58d1ad787385b1c2c4a976f9e7188"
},
{
"exception": "Expected Buffer\\(Length: 20\\), got Buffer\\(Length: 3\\)",
"hash": "ffffff"
}
]
},
"multisig": {
"inputs": [
{
"description": "Not enough signatures provided",
"type": "multisig",
"output": "OP_2 0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 02b80011a883a0fd621ad46dfc405df1e74bf075cbaf700fd4aebef6e96f848340 OP_2 OP_CHECKMULTISIG",
"signatures": []
},
{
"exception": "Not enough signatures provided",
"output": "OP_2 02359c6e3f04cefbf089cf1d6670dc47c3fb4df68e2bad1fa5a369f9ce4b42bbd1 0395a9d84d47d524548f79f435758c01faec5da2b7e551d3b8c995b7e06326ae4a OP_2 OP_CHECKMULTISIG",
"signatures": [
"304402207515cf147d201f411092e6be5a64a6006f9308fad7b2a8fdaab22cd86ce764c202200974b8aca7bf51dbf54150d3884e1ae04f675637b926ec33bf75939446f6ca2801"
]
},
{
"exception": "Too many signatures provided",
"output": "OP_2 02359c6e3f04cefbf089cf1d6670dc47c3fb4df68e2bad1fa5a369f9ce4b42bbd1 0395a9d84d47d524548f79f435758c01faec5da2b7e551d3b8c995b7e06326ae4a OP_2 OP_CHECKMULTISIG",
"signatures": [
"304402207515cf147d201f411092e6be5a64a6006f9308fad7b2a8fdaab22cd86ce764c202200974b8aca7bf51dbf54150d3884e1ae04f675637b926ec33bf75939446f6ca2801",
"3045022100ef253c1faa39e65115872519e5f0a33bbecf430c0f35cf562beabbad4da24d8d02201742be8ee49812a73adea3007c9641ce6725c32cd44ddb8e3a3af460015d140501",
"3045022100ef253c1faa39e65115872519e5f0a33bbecf430c0f35cf562beabbad4da24d8d02201742be8ee49812a73adea3007c9641ce6725c32cd44ddb8e3a3af460015d140501"
]
}
],
"outputs": [
{
"description": "OP_CHECKMULTISIG not found",
"output": "OP_0 0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 02b80011a883a0fd621ad46dfc405df1e74bf075cbaf700fd4aebef6e96f848340 OP_2 OP_HASH160"
},
{
"description": "less than 4 chunks",
"output": "OP_0 0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 OP_HASH160"
},
{
"description": "m === 0",
"output": "OP_0 0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 02b80011a883a0fd621ad46dfc405df1e74bf075cbaf700fd4aebef6e96f848340 OP_2 OP_CHECKMULTISIG"
},
{
"description": "m < OP_1",
"output": "OP_1NEGATE 0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 02b80011a883a0fd621ad46dfc405df1e74bf075cbaf700fd4aebef6e96f848340 OP_2 OP_CHECKMULTISIG"
},
{
"description": "m > OP_16",
"output": "OP_NOP 0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 02b80011a883a0fd621ad46dfc405df1e74bf075cbaf700fd4aebef6e96f848340 OP_2 OP_CHECKMULTISIG"
},
{
"description": "n === 0",
"output": "OP_1 0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 02b80011a883a0fd621ad46dfc405df1e74bf075cbaf700fd4aebef6e96f848340 OP_0 OP_CHECKMULTISIG"
},
{
"description": "n < OP_1",
"output": "OP_1 0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 02b80011a883a0fd621ad46dfc405df1e74bf075cbaf700fd4aebef6e96f848340 OP_1NEGATE OP_CHECKMULTISIG"
},
{
"description": "n > OP_16",
"output": "OP_1 0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 02b80011a883a0fd621ad46dfc405df1e74bf075cbaf700fd4aebef6e96f848340 OP_NOP OP_CHECKMULTISIG"
},
{
"description": "n < m",
"output": "OP_2 0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 02b80011a883a0fd621ad46dfc405df1e74bf075cbaf700fd4aebef6e96f848340 OP_1 OP_CHECKMULTISIG"
},
{
"description": "n < len(pubKeys)",
"output": "OP_2 0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 02b80011a883a0fd621ad46dfc405df1e74bf075cbaf700fd4aebef6e96f848340 024289801366bcee6172b771cf5a7f13aaecd237a0b9a1ff9d769cabc2e6b70a34 OP_2 OP_CHECKMULTISIG"
},
{
"description": "n > len(pubKeys)",
"output": "OP_1 024289801366bcee6172b771cf5a7f13aaecd237a0b9a1ff9d769cabc2e6b70a34 OP_2 OP_CHECKMULTISIG"
},
{
"description": "m is data",
"output": "ffff 024289801366bcee6172b771cf5a7f13aaecd237a0b9a1ff9d769cabc2e6b70a34 OP_1 OP_CHECKMULTISIG"
},
{
"description": "n is data",
"output": "OP_1 024289801366bcee6172b771cf5a7f13aaecd237a0b9a1ff9d769cabc2e6b70a34 ffff OP_CHECKMULTISIG"
},
{
"description": "non-canonical pubKey (bad length)",
"output": "OP_1 0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798ffff OP_1 OP_CHECKMULTISIG"
},
{
"exception": "Not enough pubKeys provided",
"m": 4,
"pubKeys": [
"02ea1297665dd733d444f31ec2581020004892cdaaf3dd6c0107c615afb839785f",
"02fab2dea1458990793f56f42e4a47dbf35a12a351f26fa5d7e0cc7447eaafa21f",
"036c6802ce7e8113723dd92cdb852e492ebb157a871ca532c3cb9ed08248ff0e19"
],
"signatures": [
"304402207515cf147d201f411092e6be5a64a6006f9308fad7b2a8fdaab22cd86ce764c202200974b8aca7bf51dbf54150d3884e1ae04f675637b926ec33bf75939446f6ca2801"
]
}
]
},
"witnessPubKeyHash": {
"inputs": [],
"outputs": [
{
"description": "wrong version",
"outputHex": "51149090909090909090909090909090909090909090"
},
{
"description": "wrong length marker",
"outputHex": "00209090909090909090909090909090909090909090"
},
{
"exception": "Expected Buffer\\(Length: 20\\), got Buffer\\(Length: 3\\)",
"hash": "ffffff"
}
]
},
"witnessScriptHash": {
"inputs": [],
"outputs": [
{
"description": "wrong version",
"outputHex": "51209090909090909090909090909090909090909090909090909090909090909090"
},
{
"description": "wrong length marker",
"outputHex": "00219090909090909090909090909090909090909090909090909090909090909090"
},
{
"exception": "Expected Buffer\\(Length: 32\\), got Buffer\\(Length: 3\\)",
"hash": "ffffff"
}
]
},
"witnessCommitment": {
"inputs": [],
"outputs": [
{
"exception": "",
"commitment": "abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd"
},
{
"description": "wrong OPCODE at the start",
"scriptPubKeyHex": "6024aa21a9ed4db4fb830efe3e804337413ffe8ad7393af301e0ec8e71b6e6f2b860a56f4dcd"
},
{
"description": "wrong length marker",
"scriptPubKeyHex": "6a23aa21a9ed4db4fb830efe3e804337413ffe8ad7393af301e0ec8e71b6e6f2b860a56f4dcd"
},
{
"description": "commitment of wrong length",
"scriptPubKeyHex": "6a23aa21a9ed4db4fb830efe3e804337413ffe8ad7393af301e0ec8e71b6e6f2b860a56f4d"
}
]
}
}
]
}

View file

@ -1,71 +0,0 @@
import { decompile } from './script';
import * as multisig from './templates/multisig';
import * as nullData from './templates/nulldata';
import * as pubKey from './templates/pubkey';
import * as pubKeyHash from './templates/pubkeyhash';
import * as scriptHash from './templates/scripthash';
import * as witnessCommitment from './templates/witnesscommitment';
import * as witnessPubKeyHash from './templates/witnesspubkeyhash';
import * as witnessScriptHash from './templates/witnessscripthash';
const types = {
P2MS: 'multisig' as string,
NONSTANDARD: 'nonstandard' as string,
NULLDATA: 'nulldata' as string,
P2PK: 'pubkey' as string,
P2PKH: 'pubkeyhash' as string,
P2SH: 'scripthash' as string,
P2WPKH: 'witnesspubkeyhash' as string,
P2WSH: 'witnessscripthash' as string,
WITNESS_COMMITMENT: 'witnesscommitment' as string,
};
function classifyOutput(script: Buffer): string {
if (witnessPubKeyHash.output.check(script)) return types.P2WPKH;
if (witnessScriptHash.output.check(script)) return types.P2WSH;
if (pubKeyHash.output.check(script)) return types.P2PKH;
if (scriptHash.output.check(script)) return types.P2SH;
// XXX: optimization, below functions .decompile before use
const chunks = decompile(script);
if (!chunks) throw new TypeError('Invalid script');
if (multisig.output.check(chunks)) return types.P2MS;
if (pubKey.output.check(chunks)) return types.P2PK;
if (witnessCommitment.output.check(chunks)) return types.WITNESS_COMMITMENT;
if (nullData.output.check(chunks)) return types.NULLDATA;
return types.NONSTANDARD;
}
function classifyInput(script: Buffer, allowIncomplete?: boolean): string {
// XXX: optimization, below functions .decompile before use
const chunks = decompile(script);
if (!chunks) throw new TypeError('Invalid script');
if (pubKeyHash.input.check(chunks)) return types.P2PKH;
if (scriptHash.input.check(chunks, allowIncomplete)) return types.P2SH;
if (multisig.input.check(chunks, allowIncomplete)) return types.P2MS;
if (pubKey.input.check(chunks)) return types.P2PK;
return types.NONSTANDARD;
}
function classifyWitness(script: Buffer[], allowIncomplete?: boolean): string {
// XXX: optimization, below functions .decompile before use
const chunks = decompile(script);
if (!chunks) throw new TypeError('Invalid script');
if (witnessPubKeyHash.input.check(chunks)) return types.P2WPKH;
if (witnessScriptHash.input.check(chunks as Buffer[], allowIncomplete))
return types.P2WSH;
return types.NONSTANDARD;
}
export {
classifyInput as input,
classifyOutput as output,
classifyWitness as witness,
types,
};

View file

@ -1,4 +0,0 @@
import * as input from './input';
import * as output from './output';
export { input, output };

View file

@ -1,31 +0,0 @@
// OP_0 [signatures ...]
import { Stack } from '../../payments';
import * as bscript from '../../script';
import { OPS } from '../../script';
function partialSignature(value: number | Buffer): boolean {
return (
value === OPS.OP_0 || bscript.isCanonicalScriptSignature(value as Buffer)
);
}
export function check(
script: Buffer | Stack,
allowIncomplete?: boolean,
): boolean {
const chunks = bscript.decompile(script) as Stack;
if (chunks.length < 2) return false;
if (chunks[0] !== OPS.OP_0) return false;
if (allowIncomplete) {
return chunks.slice(1).every(partialSignature);
}
return (chunks.slice(1) as Buffer[]).every(
bscript.isCanonicalScriptSignature,
);
}
check.toJSON = (): string => {
return 'multisig input';
};

View file

@ -1,33 +0,0 @@
// m [pubKeys ...] n OP_CHECKMULTISIG
import { Stack } from '../../payments';
import * as bscript from '../../script';
import { OPS } from '../../script';
import * as types from '../../types';
const OP_INT_BASE = OPS.OP_RESERVED; // OP_1 - 1
export function check(
script: Buffer | Stack,
allowIncomplete?: boolean,
): boolean {
const chunks = bscript.decompile(script) as Stack;
if (chunks.length < 4) return false;
if (chunks[chunks.length - 1] !== OPS.OP_CHECKMULTISIG) return false;
if (!types.Number(chunks[0])) return false;
if (!types.Number(chunks[chunks.length - 2])) return false;
const m = (chunks[0] as number) - OP_INT_BASE;
const n = (chunks[chunks.length - 2] as number) - OP_INT_BASE;
if (m <= 0) return false;
if (n > 16) return false;
if (m > n) return false;
if (n !== chunks.length - 3) return false;
if (allowIncomplete) return true;
const keys = chunks.slice(1, -2) as Buffer[];
return keys.every(bscript.isCanonicalPubKey);
}
check.toJSON = (): string => {
return 'multi-sig output';
};

View file

@ -1,16 +0,0 @@
// OP_RETURN {data}
import * as bscript from '../script';
const OPS = bscript.OPS;
export function check(script: Buffer | Array<number | Buffer>): boolean {
const buffer = bscript.compile(script);
return buffer.length > 1 && buffer[0] === OPS.OP_RETURN;
}
check.toJSON = (): string => {
return 'null data output';
};
const output = { check };
export { output };

View file

@ -1,4 +0,0 @@
import * as input from './input';
import * as output from './output';
export { input, output };

View file

@ -1,16 +0,0 @@
// {signature}
import { Stack } from '../../payments';
import * as bscript from '../../script';
export function check(script: Buffer | Stack): boolean {
const chunks = bscript.decompile(script) as Stack;
return (
chunks.length === 1 &&
bscript.isCanonicalScriptSignature(chunks[0] as Buffer)
);
}
check.toJSON = (): string => {
return 'pubKey input';
};

View file

@ -1,18 +0,0 @@
// {pubKey} OP_CHECKSIG
import { Stack } from '../../payments';
import * as bscript from '../../script';
import { OPS } from '../../script';
export function check(script: Buffer | Stack): boolean {
const chunks = bscript.decompile(script) as Stack;
return (
chunks.length === 2 &&
bscript.isCanonicalPubKey(chunks[0] as Buffer) &&
chunks[1] === OPS.OP_CHECKSIG
);
}
check.toJSON = (): string => {
return 'pubKey output';
};

View file

@ -1,4 +0,0 @@
import * as input from './input';
import * as output from './output';
export { input, output };

View file

@ -1,17 +0,0 @@
// {signature} {pubKey}
import { Stack } from '../../payments';
import * as bscript from '../../script';
export function check(script: Buffer | Stack): boolean {
const chunks = bscript.decompile(script) as Stack;
return (
chunks.length === 2 &&
bscript.isCanonicalScriptSignature(chunks[0] as Buffer) &&
bscript.isCanonicalPubKey(chunks[1] as Buffer)
);
}
check.toJSON = (): string => {
return 'pubKeyHash input';
};

View file

@ -1,20 +0,0 @@
// OP_DUP OP_HASH160 {pubKeyHash} OP_EQUALVERIFY OP_CHECKSIG
import * as bscript from '../../script';
import { OPS } from '../../script';
export function check(script: Buffer | Array<number | Buffer>): boolean {
const buffer = bscript.compile(script);
return (
buffer.length === 25 &&
buffer[0] === OPS.OP_DUP &&
buffer[1] === OPS.OP_HASH160 &&
buffer[2] === 0x14 &&
buffer[23] === OPS.OP_EQUALVERIFY &&
buffer[24] === OPS.OP_CHECKSIG
);
}
check.toJSON = (): string => {
return 'pubKeyHash output';
};

View file

@ -1,4 +0,0 @@
import * as input from './input';
import * as output from './output';
export { input, output };

View file

@ -1,61 +0,0 @@
// <scriptSig> {serialized scriptPubKey script}
import * as bscript from '../../script';
import * as p2ms from '../multisig';
import * as p2pk from '../pubkey';
import * as p2pkh from '../pubkeyhash';
import * as p2wpkho from '../witnesspubkeyhash/output';
import * as p2wsho from '../witnessscripthash/output';
export function check(
script: Buffer | Array<number | Buffer>,
allowIncomplete?: boolean,
): boolean {
const chunks = bscript.decompile(script)!;
if (chunks.length < 1) return false;
const lastChunk = chunks[chunks.length - 1];
if (!Buffer.isBuffer(lastChunk)) return false;
const scriptSigChunks = bscript.decompile(
bscript.compile(chunks.slice(0, -1)),
)!;
const redeemScriptChunks = bscript.decompile(lastChunk);
// is redeemScript a valid script?
if (!redeemScriptChunks) return false;
// is redeemScriptSig push only?
if (!bscript.isPushOnly(scriptSigChunks)) return false;
// is witness?
if (chunks.length === 1) {
return (
p2wsho.check(redeemScriptChunks) || p2wpkho.check(redeemScriptChunks)
);
}
// match types
if (
p2pkh.input.check(scriptSigChunks) &&
p2pkh.output.check(redeemScriptChunks)
)
return true;
if (
p2ms.input.check(scriptSigChunks, allowIncomplete) &&
p2ms.output.check(redeemScriptChunks)
)
return true;
if (
p2pk.input.check(scriptSigChunks) &&
p2pk.output.check(redeemScriptChunks)
)
return true;
return false;
}
check.toJSON = (): string => {
return 'scriptHash input';
};

View file

@ -1,18 +0,0 @@
// OP_HASH160 {scriptHash} OP_EQUAL
import * as bscript from '../../script';
import { OPS } from '../../script';
export function check(script: Buffer | Array<number | Buffer>): boolean {
const buffer = bscript.compile(script);
return (
buffer.length === 23 &&
buffer[0] === OPS.OP_HASH160 &&
buffer[1] === 0x14 &&
buffer[22] === OPS.OP_EQUAL
);
}
check.toJSON = (): string => {
return 'scriptHash output';
};

View file

@ -1,3 +0,0 @@
import * as output from './output';
export { output };

View file

@ -1,40 +0,0 @@
// OP_RETURN {aa21a9ed} {commitment}
import * as bscript from '../../script';
import { OPS } from '../../script';
import * as types from '../../types';
const typeforce = require('typeforce');
const HEADER: Buffer = Buffer.from('aa21a9ed', 'hex');
export function check(script: Buffer | Array<number | Buffer>): boolean {
const buffer = bscript.compile(script);
return (
buffer.length > 37 &&
buffer[0] === OPS.OP_RETURN &&
buffer[1] === 0x24 &&
buffer.slice(2, 6).equals(HEADER)
);
}
check.toJSON = (): string => {
return 'Witness commitment output';
};
export function encode(commitment: Buffer): Buffer {
typeforce(types.Hash256bit, commitment);
const buffer = Buffer.allocUnsafe(36);
HEADER.copy(buffer, 0);
commitment.copy(buffer, 4);
return bscript.compile([OPS.OP_RETURN, buffer]);
}
export function decode(buffer: Buffer): Buffer {
typeforce(check, buffer);
return (bscript.decompile(buffer)![1] as Buffer).slice(4, 36);
}

View file

@ -1,4 +0,0 @@
import * as input from './input';
import * as output from './output';
export { input, output };

View file

@ -1,21 +0,0 @@
// {signature} {pubKey}
import { Stack } from '../../payments';
import * as bscript from '../../script';
function isCompressedCanonicalPubKey(pubKey: Buffer): boolean {
return bscript.isCanonicalPubKey(pubKey) && pubKey.length === 33;
}
export function check(script: Buffer | Stack): boolean {
const chunks = bscript.decompile(script) as Stack;
return (
chunks.length === 2 &&
bscript.isCanonicalScriptSignature(chunks[0] as Buffer) &&
isCompressedCanonicalPubKey(chunks[1] as Buffer)
);
}
check.toJSON = (): string => {
return 'witnessPubKeyHash input';
};

View file

@ -1,13 +0,0 @@
// OP_0 {pubKeyHash}
import * as bscript from '../../script';
import { OPS } from '../../script';
export function check(script: Buffer | Array<number | Buffer>): boolean {
const buffer = bscript.compile(script);
return buffer.length === 22 && buffer[0] === OPS.OP_0 && buffer[1] === 0x14;
}
check.toJSON = (): string => {
return 'Witness pubKeyHash output';
};

View file

@ -1,4 +0,0 @@
import * as input from './input';
import * as output from './output';
export { input, output };

View file

@ -1,47 +0,0 @@
// <scriptSig> {serialized scriptPubKey script}
import * as bscript from '../../script';
const typeforce = require('typeforce');
import * as p2ms from '../multisig';
import * as p2pk from '../pubkey';
import * as p2pkh from '../pubkeyhash';
export function check(chunks: Buffer[], allowIncomplete?: boolean): boolean {
typeforce(typeforce.Array, chunks);
if (chunks.length < 1) return false;
const witnessScript = chunks[chunks.length - 1];
if (!Buffer.isBuffer(witnessScript)) return false;
const witnessScriptChunks = bscript.decompile(witnessScript);
// is witnessScript a valid script?
if (!witnessScriptChunks || witnessScriptChunks.length === 0) return false;
const witnessRawScriptSig = bscript.compile(chunks.slice(0, -1));
// match types
if (
p2pkh.input.check(witnessRawScriptSig) &&
p2pkh.output.check(witnessScriptChunks)
)
return true;
if (
p2ms.input.check(witnessRawScriptSig, allowIncomplete) &&
p2ms.output.check(witnessScriptChunks)
)
return true;
if (
p2pk.input.check(witnessRawScriptSig) &&
p2pk.output.check(witnessScriptChunks)
)
return true;
return false;
}
check.toJSON = (): string => {
return 'witnessScriptHash input';
};

View file

@ -1,13 +0,0 @@
// OP_0 {scriptHash}
import * as bscript from '../../script';
import { OPS } from '../../script';
export function check(script: Buffer | Array<number | Buffer>): boolean {
const buffer = bscript.compile(script);
return buffer.length === 34 && buffer[0] === OPS.OP_0 && buffer[1] === 0x20;
}
check.toJSON = (): string => {
return 'Witness scriptHash output';
};

15
types/classify.d.ts vendored
View file

@ -1,15 +0,0 @@
declare const types: {
P2MS: string;
NONSTANDARD: string;
NULLDATA: string;
P2PK: string;
P2PKH: string;
P2SH: string;
P2WPKH: string;
P2WSH: string;
WITNESS_COMMITMENT: string;
};
declare function classifyOutput(script: Buffer): string;
declare function classifyInput(script: Buffer, allowIncomplete?: boolean): string;
declare function classifyWitness(script: Buffer[], allowIncomplete?: boolean): string;
export { classifyInput as input, classifyOutput as output, classifyWitness as witness, types, };

View file

@ -1,3 +0,0 @@
import * as input from './input';
import * as output from './output';
export { input, output };

View file

@ -1,5 +0,0 @@
import { Stack } from '../../payments';
export declare function check(script: Buffer | Stack, allowIncomplete?: boolean): boolean;
export declare namespace check {
var toJSON: () => string;
}

View file

@ -1,5 +0,0 @@
import { Stack } from '../../payments';
export declare function check(script: Buffer | Stack, allowIncomplete?: boolean): boolean;
export declare namespace check {
var toJSON: () => string;
}

View file

@ -1,8 +0,0 @@
export declare function check(script: Buffer | Array<number | Buffer>): boolean;
export declare namespace check {
var toJSON: () => string;
}
declare const output: {
check: typeof check;
};
export { output };

View file

@ -1,3 +0,0 @@
import * as input from './input';
import * as output from './output';
export { input, output };

View file

@ -1,5 +0,0 @@
import { Stack } from '../../payments';
export declare function check(script: Buffer | Stack): boolean;
export declare namespace check {
var toJSON: () => string;
}

View file

@ -1,5 +0,0 @@
import { Stack } from '../../payments';
export declare function check(script: Buffer | Stack): boolean;
export declare namespace check {
var toJSON: () => string;
}

View file

@ -1,3 +0,0 @@
import * as input from './input';
import * as output from './output';
export { input, output };

View file

@ -1,5 +0,0 @@
import { Stack } from '../../payments';
export declare function check(script: Buffer | Stack): boolean;
export declare namespace check {
var toJSON: () => string;
}

View file

@ -1,4 +0,0 @@
export declare function check(script: Buffer | Array<number | Buffer>): boolean;
export declare namespace check {
var toJSON: () => string;
}

View file

@ -1,3 +0,0 @@
import * as input from './input';
import * as output from './output';
export { input, output };

View file

@ -1,4 +0,0 @@
export declare function check(script: Buffer | Array<number | Buffer>, allowIncomplete?: boolean): boolean;
export declare namespace check {
var toJSON: () => string;
}

View file

@ -1,4 +0,0 @@
export declare function check(script: Buffer | Array<number | Buffer>): boolean;
export declare namespace check {
var toJSON: () => string;
}

View file

@ -1,2 +0,0 @@
import * as output from './output';
export { output };

View file

@ -1,6 +0,0 @@
export declare function check(script: Buffer | Array<number | Buffer>): boolean;
export declare namespace check {
var toJSON: () => string;
}
export declare function encode(commitment: Buffer): Buffer;
export declare function decode(buffer: Buffer): Buffer;

View file

@ -1,3 +0,0 @@
import * as input from './input';
import * as output from './output';
export { input, output };

View file

@ -1,5 +0,0 @@
import { Stack } from '../../payments';
export declare function check(script: Buffer | Stack): boolean;
export declare namespace check {
var toJSON: () => string;
}

View file

@ -1,4 +0,0 @@
export declare function check(script: Buffer | Array<number | Buffer>): boolean;
export declare namespace check {
var toJSON: () => string;
}

View file

@ -1,3 +0,0 @@
import * as input from './input';
import * as output from './output';
export { input, output };

View file

@ -1,4 +0,0 @@
export declare function check(chunks: Buffer[], allowIncomplete?: boolean): boolean;
export declare namespace check {
var toJSON: () => string;
}

View file

@ -1,4 +0,0 @@
export declare function check(script: Buffer | Array<number | Buffer>): boolean;
export declare namespace check {
var toJSON: () => string;
}