Implement a hard fork for extended/infinite claim expiration times #112
10 changed files with 176 additions and 8 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -1,3 +1,5 @@
|
|||
.idea
|
||||
|
||||
*.tar.gz
|
||||
|
||||
*.exe
|
||||
|
|
|
@ -119,6 +119,7 @@ BITCOIN_CORE_H = \
|
|||
miner.h \
|
||||
nameclaim.h \
|
||||
claimtrie.h \
|
||||
lbry.h \
|
||||
net.h \
|
||||
netbase.h \
|
||||
noui.h \
|
||||
|
@ -185,6 +186,7 @@ libbitcoin_server_a_SOURCES = \
|
|||
httpserver.cpp \
|
||||
init.cpp \
|
||||
dbwrapper.cpp \
|
||||
lbry.cpp \
|
||||
main.cpp \
|
||||
merkleblock.cpp \
|
||||
miner.cpp \
|
||||
|
|
|
@ -52,6 +52,7 @@ BITCOIN_TESTS =\
|
|||
test/getarg_tests.cpp \
|
||||
test/hash_tests.cpp \
|
||||
test/key_tests.cpp \
|
||||
test/lbry_tests.cpp \
|
||||
test/limitedmap_tests.cpp \
|
||||
test/dbwrapper_tests.cpp \
|
||||
test/main_tests.cpp \
|
||||
|
|
|
@ -126,7 +126,7 @@ public:
|
|||
consensus.BIP34Height = 227931;
|
||||
consensus.BIP34Hash = uint256S("0x000000000000024b89b42a942fe0d9fea3bb44ab7bd1b19115dd6a759c0808b8");
|
||||
consensus.powLimit = uint256S("0000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
|
||||
consensus.nPowTargetTimespan = 150 * 24 * 2; // 2 hours
|
||||
consensus.nPowTargetTimespan = 150; //retarget every block
|
||||
consensus.nPowTargetSpacing = 150;
|
||||
consensus.fPowAllowMinDifficultyBlocks = false;
|
||||
consensus.fPowNoRetargeting = false;
|
||||
|
|
40
src/lbry.cpp
Normal file
40
src/lbry.cpp
Normal file
|
@ -0,0 +1,40 @@
|
|||
#include "lbry.h"
|
||||
#include "uint256.h"
|
||||
#include <cstdio>
|
||||
unsigned int CalculateLbryNextWorkRequired(const CBlockIndex* pindexLast, int64_t nFirstBlockTime, const Consensus::Params& params)
|
||||
{
|
||||
if (params.fPowNoRetargeting)
|
||||
return pindexLast->nBits;
|
||||
|
||||
const int64_t retargetTimespan = params.nPowTargetTimespan;
|
||||
const int64_t nActualTimespan = pindexLast->GetBlockTime() - nFirstBlockTime;
|
||||
int64_t nModulatedTimespan = nActualTimespan;
|
||||
int64_t nMaxTimespan;
|
||||
int64_t nMinTimespan;
|
||||
|
||||
nModulatedTimespan = retargetTimespan + (nModulatedTimespan - retargetTimespan) / 8;
|
||||
|
||||
nMinTimespan = retargetTimespan - (retargetTimespan / 8); //(150 - 18 = 132)
|
||||
nMaxTimespan = retargetTimespan + (retargetTimespan / 2); //(150 + 75 = 225)
|
||||
|
||||
// Limit adjustment step
|
||||
if (nModulatedTimespan < nMinTimespan)
|
||||
nModulatedTimespan = nMinTimespan;
|
||||
else if (nModulatedTimespan > nMaxTimespan)
|
||||
nModulatedTimespan = nMaxTimespan;
|
||||
|
||||
// Retarget
|
||||
const arith_uint256 bnPowLimit = UintToArith256(params.powLimit);
|
||||
arith_uint256 bnNew;
|
||||
arith_uint256 bnOld;
|
||||
bnNew.SetCompact(pindexLast->nBits);
|
||||
bnOld = bnNew;
|
||||
bnNew *= nModulatedTimespan;
|
||||
bnNew /= retargetTimespan;
|
||||
if (bnNew > bnPowLimit)
|
||||
bnNew = bnPowLimit;
|
||||
|
||||
return bnNew.GetCompact();
|
||||
}
|
||||
|
||||
|
9
src/lbry.h
Normal file
9
src/lbry.h
Normal file
|
@ -0,0 +1,9 @@
|
|||
#ifndef LBRY_H
|
||||
#define LBRY_H
|
||||
|
||||
#include "chain.h"
|
||||
#include "chainparams.h"
|
||||
|
||||
unsigned int CalculateLbryNextWorkRequired(const CBlockIndex* pindexLast, int64_t nLastRetargetTime, const Consensus::Params& params);
|
||||
|
||||
#endif
|
11
src/pow.cpp
11
src/pow.cpp
|
@ -9,6 +9,7 @@
|
|||
#include "chain.h"
|
||||
#include "primitives/block.h"
|
||||
#include "uint256.h"
|
||||
#include "lbry.h"
|
||||
|
||||
unsigned int GetNextWorkRequired(const CBlockIndex* pindexLast, const CBlockHeader *pblock, const Consensus::Params& params)
|
||||
{
|
||||
|
@ -40,13 +41,17 @@ unsigned int GetNextWorkRequired(const CBlockIndex* pindexLast, const CBlockHead
|
|||
return pindexLast->nBits;
|
||||
}
|
||||
|
||||
// Go back by what we want to be 14 days worth of blocks
|
||||
int nHeightFirst = pindexLast->nHeight - (params.DifficultyAdjustmentInterval()-1);
|
||||
// Go back the full period unless it's the first retarget after genesis.
|
||||
int blockstogoback = params.DifficultyAdjustmentInterval()-1;
|
||||
if ((pindexLast->nHeight+1) != params.DifficultyAdjustmentInterval())
|
||||
blockstogoback = params.DifficultyAdjustmentInterval();
|
||||
|
||||
int nHeightFirst = pindexLast->nHeight - blockstogoback;
|
||||
assert(nHeightFirst >= 0);
|
||||
const CBlockIndex* pindexFirst = pindexLast->GetAncestor(nHeightFirst);
|
||||
assert(pindexFirst);
|
||||
|
||||
return CalculateNextWorkRequired(pindexLast, pindexFirst->GetBlockTime(), params);
|
||||
return CalculateLbryNextWorkRequired(pindexLast, pindexFirst->GetBlockTime(), params);
|
||||
}
|
||||
|
||||
unsigned int CalculateNextWorkRequired(const CBlockIndex* pindexLast, int64_t nFirstBlockTime, const Consensus::Params& params)
|
||||
|
|
|
@ -17,6 +17,7 @@ class uint256;
|
|||
unsigned int GetNextWorkRequired(const CBlockIndex* pindexLast, const CBlockHeader *pblock, const Consensus::Params&);
|
||||
unsigned int CalculateNextWorkRequired(const CBlockIndex* pindexLast, int64_t nFirstBlockTime, const Consensus::Params&);
|
||||
|
||||
|
||||
/** Check whether a block hash satisfies the proof-of-work requirement specified by nBits */
|
||||
bool CheckProofOfWork(uint256 hash, unsigned int nBits, const Consensus::Params&);
|
||||
|
||||
|
|
102
src/test/lbry_tests.cpp
Normal file
102
src/test/lbry_tests.cpp
Normal file
|
@ -0,0 +1,102 @@
|
|||
#include "arith_uint256.h"
|
||||
#include "chainparams.h"
|
||||
#include "lbry.h"
|
||||
#include "main.h"
|
||||
#include "test/test_bitcoin.h"
|
||||
#include <cstdio>
|
||||
#include <boost/test/unit_test.hpp>
|
||||
|
||||
BOOST_FIXTURE_TEST_SUITE(lbry_tests, TestingSetup)
|
||||
|
||||
//1 test block 1 difficulty, should be a max retarget
|
||||
BOOST_AUTO_TEST_CASE(get_block_1_difficulty)
|
||||
{
|
||||
SelectParams(CBaseChainParams::MAIN);
|
||||
const Consensus::Params& params = Params().GetConsensus();
|
||||
|
||||
CBlockIndex pindexLast;
|
||||
int64_t nFirstBlockTime = 1386475638;
|
||||
|
||||
pindexLast.nHeight = 0;
|
||||
pindexLast.nTime = 1386475638;
|
||||
pindexLast.nBits = 0x1f00ffff ;//starting difficulty, also limit
|
||||
unsigned int out = CalculateLbryNextWorkRequired(&pindexLast, nFirstBlockTime, params);
|
||||
arith_uint256 a;
|
||||
a.SetCompact(out);
|
||||
unsigned int expected = 0x1f00e146;
|
||||
BOOST_CHECK_EQUAL(out,expected);
|
||||
|
||||
}
|
||||
|
||||
// test max retarget (difficulty increase)
|
||||
BOOST_AUTO_TEST_CASE(max_retarget)
|
||||
{
|
||||
SelectParams(CBaseChainParams::MAIN);
|
||||
const Consensus::Params& params = Params().GetConsensus();
|
||||
|
||||
CBlockIndex pindexLast;
|
||||
int64_t nFirstBlockTime = 1386475638;
|
||||
|
||||
pindexLast.nHeight = 100;
|
||||
pindexLast.nTime = nFirstBlockTime;
|
||||
pindexLast.nBits = 0x1f00a000 ;
|
||||
|
||||
unsigned int out = CalculateLbryNextWorkRequired(&pindexLast, nFirstBlockTime, params);
|
||||
arith_uint256 a;
|
||||
a.SetCompact(out);
|
||||
unsigned int expected = 0x1f008ccc;
|
||||
BOOST_CHECK_EQUAL(out,expected);
|
||||
|
||||
|
||||
}
|
||||
|
||||
// test min retarget (difficulty decrease)
|
||||
BOOST_AUTO_TEST_CASE(min_retarget)
|
||||
{
|
||||
SelectParams(CBaseChainParams::MAIN);
|
||||
const Consensus::Params& params = Params().GetConsensus();
|
||||
|
||||
CBlockIndex pindexLast;
|
||||
int64_t nFirstBlockTime = 1386475638;
|
||||
|
||||
pindexLast.nHeight = 101;
|
||||
pindexLast.nTime = nFirstBlockTime + 60*20 ;
|
||||
pindexLast.nBits = 0x1f00a000;
|
||||
|
||||
unsigned int out = CalculateLbryNextWorkRequired(&pindexLast, nFirstBlockTime, params);
|
||||
arith_uint256 a;
|
||||
a.SetCompact(out);
|
||||
unsigned int expected = 0x1f00f000;
|
||||
BOOST_CHECK_EQUAL(out,expected);
|
||||
|
||||
|
||||
}
|
||||
|
||||
// test to see if pow limit is not exceeded
|
||||
BOOST_AUTO_TEST_CASE(pow_limit_check)
|
||||
{
|
||||
SelectParams(CBaseChainParams::MAIN);
|
||||
const Consensus::Params& params = Params().GetConsensus();
|
||||
|
||||
CBlockIndex pindexLast;
|
||||
int64_t nFirstBlockTime = 1386475638;
|
||||
|
||||
pindexLast.nHeight = 102;
|
||||
pindexLast.nTime = nFirstBlockTime + 600 ; //took a long time to generate book, will try to reduce difficulty but it would hit limit
|
||||
pindexLast.nBits = 0x1f00ffff ;
|
||||
|
||||
unsigned int out = CalculateLbryNextWorkRequired(&pindexLast, nFirstBlockTime, params);
|
||||
arith_uint256 a;
|
||||
a.SetCompact(out);
|
||||
unsigned int expected = 0x1f00ffff;
|
||||
BOOST_CHECK_EQUAL(out,expected);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
|
@ -17,7 +17,7 @@
|
|||
#include "utilstrencodings.h"
|
||||
|
||||
#include "test/test_bitcoin.h"
|
||||
|
||||
#include <cstdio>
|
||||
#include <boost/test/unit_test.hpp>
|
||||
|
||||
BOOST_FIXTURE_TEST_SUITE(miner_tests, TestingSetup)
|
||||
|
@ -151,7 +151,7 @@ BOOST_AUTO_TEST_CASE(CreateNewBlock_validity)
|
|||
CBlock *pblock = &pblocktemplate->block; // pointer for convenience
|
||||
pblock->hashPrevBlock = chainActive.Tip()->GetBlockHash();
|
||||
pblock->nVersion = 1;
|
||||
pblock->nTime = chainActive.Tip()->GetMedianTimePast()+1;
|
||||
pblock->nTime = chainActive.Tip()->GetBlockTime()+150*(i+1);
|
||||
CMutableTransaction txCoinbase(pblock->vtx[0]);
|
||||
txCoinbase.nVersion = 1;
|
||||
txCoinbase.vin[0].scriptSig = CScript();
|
||||
|
@ -166,7 +166,11 @@ BOOST_AUTO_TEST_CASE(CreateNewBlock_validity)
|
|||
txFirst.push_back(new CTransaction(pblock->vtx[0]));
|
||||
pblock->hashMerkleRoot = BlockMerkleRoot(*pblock);
|
||||
pblock->nNonce = nonces[i];
|
||||
/*bool fFound = false;
|
||||
|
||||
|
||||
//Use below code to find nonces, in case we change hashing or difficulty retargeting algo
|
||||
/*
|
||||
bool fFound = false;
|
||||
for (int j = 0; !fFound; j++)
|
||||
{
|
||||
pblock->nNonce = j;
|
||||
|
@ -179,7 +183,9 @@ BOOST_AUTO_TEST_CASE(CreateNewBlock_validity)
|
|||
else
|
||||
std::cout << " ";
|
||||
}
|
||||
}*/
|
||||
}
|
||||
*/
|
||||
|
||||
CValidationState state;
|
||||
BOOST_CHECK(ProcessNewBlock(state, chainparams, NULL, pblock, true, NULL));
|
||||
BOOST_CHECK(state.IsValid());
|
||||
|
|
Loading…
Reference in a new issue