mirror of
https://github.com/dashpay/dash.git
synced 2024-12-25 03:52:49 +01:00
Merge pull request #4975 from vijaydasmp/bp2101
backport: bitcoin#18493,18467,18633,18157,18774,19006,18612,20033, 15283
This commit is contained in:
commit
9293d2f382
@ -20,9 +20,9 @@ The macOS configuration assumes dashd will be set up for the current user.
|
|||||||
Configuration
|
Configuration
|
||||||
---------------------------------
|
---------------------------------
|
||||||
|
|
||||||
At a bare minimum, dashd requires that the rpcpassword setting be set
|
Running dashd as a daemon does not require any manual configuration. You may
|
||||||
when running as a daemon. If the configuration file does not exist or this
|
set the `rpcauth` setting in the `dash.conf` configuration file to override
|
||||||
setting is not set, dashd will shut down promptly after startup.
|
the default behaviour of using a special cookie for authentication.
|
||||||
|
|
||||||
This password does not have to be remembered or typed as it is mostly used
|
This password does not have to be remembered or typed as it is mostly used
|
||||||
as a fixed token that dashd and client programs read from the configuration
|
as a fixed token that dashd and client programs read from the configuration
|
||||||
|
@ -9,7 +9,6 @@
|
|||||||
#include <httpserver.h>
|
#include <httpserver.h>
|
||||||
#include <rpc/protocol.h>
|
#include <rpc/protocol.h>
|
||||||
#include <rpc/server.h>
|
#include <rpc/server.h>
|
||||||
#include <ui_interface.h>
|
|
||||||
#include <util/strencodings.h>
|
#include <util/strencodings.h>
|
||||||
#include <util/system.h>
|
#include <util/system.h>
|
||||||
#include <util/translation.h>
|
#include <util/translation.h>
|
||||||
@ -253,9 +252,6 @@ static bool InitRPCAuthentication()
|
|||||||
{
|
{
|
||||||
LogPrintf("Using random cookie authentication.\n");
|
LogPrintf("Using random cookie authentication.\n");
|
||||||
if (!GenerateAuthCookie(&strRPCUserColonPass)) {
|
if (!GenerateAuthCookie(&strRPCUserColonPass)) {
|
||||||
uiInterface.ThreadSafeMessageBox(
|
|
||||||
_("Error: A fatal internal error occurred, see debug.log for details"), // Same message as AbortNode
|
|
||||||
"", CClientUIInterface::MSG_ERROR);
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -423,7 +423,7 @@ bool UpdateHTTPServerLogging(bool enable) {
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static std::thread threadHTTP;
|
static std::thread g_thread_http;
|
||||||
static std::vector<std::thread> g_thread_http_workers;
|
static std::vector<std::thread> g_thread_http_workers;
|
||||||
|
|
||||||
void StartHTTPServer()
|
void StartHTTPServer()
|
||||||
@ -431,7 +431,7 @@ void StartHTTPServer()
|
|||||||
LogPrint(BCLog::HTTP, "Starting HTTP server\n");
|
LogPrint(BCLog::HTTP, "Starting HTTP server\n");
|
||||||
int rpcThreads = std::max((long)gArgs.GetArg("-rpcthreads", DEFAULT_HTTP_THREADS), 1L);
|
int rpcThreads = std::max((long)gArgs.GetArg("-rpcthreads", DEFAULT_HTTP_THREADS), 1L);
|
||||||
LogPrintf("HTTP: starting %d worker threads\n", rpcThreads);
|
LogPrintf("HTTP: starting %d worker threads\n", rpcThreads);
|
||||||
threadHTTP = std::thread(ThreadHTTP, eventBase);
|
g_thread_http = std::thread(ThreadHTTP, eventBase);
|
||||||
|
|
||||||
for (int i = 0; i < rpcThreads; i++) {
|
for (int i = 0; i < rpcThreads; i++) {
|
||||||
g_thread_http_workers.emplace_back(HTTPWorkQueueRun, g_work_queue.get(), i);
|
g_thread_http_workers.emplace_back(HTTPWorkQueueRun, g_work_queue.get(), i);
|
||||||
@ -468,7 +468,7 @@ void StopHTTPServer()
|
|||||||
boundSockets.clear();
|
boundSockets.clear();
|
||||||
if (eventBase) {
|
if (eventBase) {
|
||||||
LogPrint(BCLog::HTTP, "Waiting for HTTP event thread to exit\n");
|
LogPrint(BCLog::HTTP, "Waiting for HTTP event thread to exit\n");
|
||||||
threadHTTP.join();
|
if (g_thread_http.joinable()) g_thread_http.join();
|
||||||
}
|
}
|
||||||
if (eventHTTP) {
|
if (eventHTTP) {
|
||||||
evhttp_free(eventHTTP);
|
evhttp_free(eventHTTP);
|
||||||
|
@ -30,7 +30,7 @@ extern bool fAllowPrivateNet;
|
|||||||
* should be serialized in (unserialized from) v2 format (BIP155).
|
* should be serialized in (unserialized from) v2 format (BIP155).
|
||||||
* Make sure that this does not collide with any of the values in `version.h`
|
* Make sure that this does not collide with any of the values in `version.h`
|
||||||
*/
|
*/
|
||||||
static const int ADDRV2_FORMAT = 0x20000000;
|
static constexpr int ADDRV2_FORMAT = 0x20000000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A network type.
|
* A network type.
|
||||||
|
@ -451,8 +451,6 @@ static UniValue getdifficulty(const JSONRPCRequest& request)
|
|||||||
|
|
||||||
static std::vector<RPCResult> MempoolEntryDescription() { return {
|
static std::vector<RPCResult> MempoolEntryDescription() { return {
|
||||||
RPCResult{RPCResult::Type::NUM, "vsize", "virtual transaction size. This can be different from actual serialized size for high-sigop transactions."},
|
RPCResult{RPCResult::Type::NUM, "vsize", "virtual transaction size. This can be different from actual serialized size for high-sigop transactions."},
|
||||||
RPCResult{RPCResult::Type::NUM, "size", "(DEPRECATED) same as vsize. Only returned if dashd is started with -deprecatedrpc=size. "
|
|
||||||
"size will be completely removed in v0.20."},
|
|
||||||
RPCResult{RPCResult::Type::STR_AMOUNT, "fee", "transaction fee in " + CURRENCY_UNIT + " (DEPRECATED)"},
|
RPCResult{RPCResult::Type::STR_AMOUNT, "fee", "transaction fee in " + CURRENCY_UNIT + " (DEPRECATED)"},
|
||||||
RPCResult{RPCResult::Type::STR_AMOUNT, "modifiedfee", "transaction fee with fee deltas used for mining priority (DEPRECATED)"},
|
RPCResult{RPCResult::Type::STR_AMOUNT, "modifiedfee", "transaction fee with fee deltas used for mining priority (DEPRECATED)"},
|
||||||
RPCResult{RPCResult::Type::NUM_TIME, "time", "local time transaction entered pool in " + UNIX_EPOCH_TIME},
|
RPCResult{RPCResult::Type::NUM_TIME, "time", "local time transaction entered pool in " + UNIX_EPOCH_TIME},
|
||||||
@ -489,7 +487,6 @@ static void entryToJSON(const CTxMemPool& pool, UniValue& info, const CTxMemPool
|
|||||||
info.pushKV("fees", fees);
|
info.pushKV("fees", fees);
|
||||||
|
|
||||||
info.pushKV("vsize", (int)e.GetTxSize());
|
info.pushKV("vsize", (int)e.GetTxSize());
|
||||||
if (IsDeprecatedRPCEnabled("size")) info.pushKV("size", (int)e.GetTxSize());
|
|
||||||
info.pushKV("fee", ValueFromAmount(e.GetFee()));
|
info.pushKV("fee", ValueFromAmount(e.GetFee()));
|
||||||
info.pushKV("modifiedfee", ValueFromAmount(e.GetModifiedFee()));
|
info.pushKV("modifiedfee", ValueFromAmount(e.GetModifiedFee()));
|
||||||
info.pushKV("time", e.GetTime());
|
info.pushKV("time", e.GetTime());
|
||||||
|
@ -416,28 +416,15 @@ public:
|
|||||||
|
|
||||||
SERIALIZE_METHODS(CScript, obj) { READWRITEAS(CScriptBase, obj); }
|
SERIALIZE_METHODS(CScript, obj) { READWRITEAS(CScriptBase, obj); }
|
||||||
|
|
||||||
CScript& operator+=(const CScript& b)
|
|
||||||
{
|
|
||||||
reserve(size() + b.size());
|
|
||||||
insert(end(), b.begin(), b.end());
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
friend CScript operator+(const CScript& a, const CScript& b)
|
|
||||||
{
|
|
||||||
CScript ret = a;
|
|
||||||
ret += b;
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
explicit CScript(int64_t b) { operator<<(b); }
|
explicit CScript(int64_t b) { operator<<(b); }
|
||||||
|
|
||||||
explicit CScript(opcodetype b) { operator<<(b); }
|
explicit CScript(opcodetype b) { operator<<(b); }
|
||||||
explicit CScript(const CScriptNum& b) { operator<<(b); }
|
explicit CScript(const CScriptNum& b) { operator<<(b); }
|
||||||
// delete non-existent constructor to defend against future introduction
|
// delete non-existent constructor to defend against future introduction
|
||||||
// e.g. via prevector
|
// e.g. via prevector
|
||||||
explicit CScript(const std::vector<unsigned char>& b) = delete;
|
explicit CScript(const std::vector<unsigned char>& b) = delete;
|
||||||
|
|
||||||
|
/** Delete non-existent operator to defend against future introduction */
|
||||||
|
CScript& operator<<(const CScript& b) = delete;
|
||||||
|
|
||||||
CScript& operator<<(int64_t b) { return push_int64(b); }
|
CScript& operator<<(int64_t b) { return push_int64(b); }
|
||||||
|
|
||||||
@ -484,15 +471,6 @@ public:
|
|||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
CScript& operator<<(const CScript& b)
|
|
||||||
{
|
|
||||||
// I'm not sure if this should push the script or concatenate scripts.
|
|
||||||
// If there's ever a use for pushing a script onto a script, delete this member fn
|
|
||||||
assert(!"Warning: Pushing a CScript onto a CScript with << is probably not intended, use + to concatenate!");
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
bool GetOp(const_iterator& pc, opcodetype& opcodeRet, std::vector<unsigned char>& vchRet) const
|
bool GetOp(const_iterator& pc, opcodetype& opcodeRet, std::vector<unsigned char>& vchRet) const
|
||||||
{
|
{
|
||||||
return GetScriptOp(pc, end(), opcodeRet, &vchRet);
|
return GetScriptOp(pc, end(), opcodeRet, &vchRet);
|
||||||
@ -503,7 +481,6 @@ public:
|
|||||||
return GetScriptOp(pc, end(), opcodeRet, nullptr);
|
return GetScriptOp(pc, end(), opcodeRet, nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/** Encode/decode small integers: */
|
/** Encode/decode small integers: */
|
||||||
static int DecodeOP_N(opcodetype opcode)
|
static int DecodeOP_N(opcodetype opcode)
|
||||||
{
|
{
|
||||||
|
@ -17,12 +17,16 @@ void test_one_input(const std::vector<uint8_t>& buffer)
|
|||||||
CScript script = ConsumeScript(fuzzed_data_provider);
|
CScript script = ConsumeScript(fuzzed_data_provider);
|
||||||
while (fuzzed_data_provider.remaining_bytes() > 0) {
|
while (fuzzed_data_provider.remaining_bytes() > 0) {
|
||||||
switch (fuzzed_data_provider.ConsumeIntegralInRange(0, 7)) {
|
switch (fuzzed_data_provider.ConsumeIntegralInRange(0, 7)) {
|
||||||
case 0:
|
case 0: {
|
||||||
script += ConsumeScript(fuzzed_data_provider);
|
CScript s = ConsumeScript(fuzzed_data_provider);
|
||||||
|
script = std::move(s);
|
||||||
break;
|
break;
|
||||||
case 1:
|
}
|
||||||
script = script + ConsumeScript(fuzzed_data_provider);
|
case 1: {
|
||||||
|
const CScript& s = ConsumeScript(fuzzed_data_provider);
|
||||||
|
script = s;
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
case 2:
|
case 2:
|
||||||
script << fuzzed_data_provider.ConsumeIntegral<int64_t>();
|
script << fuzzed_data_provider.ConsumeIntegral<int64_t>();
|
||||||
break;
|
break;
|
||||||
|
@ -207,7 +207,6 @@ struct KeyData
|
|||||||
|
|
||||||
KeyData()
|
KeyData()
|
||||||
{
|
{
|
||||||
|
|
||||||
key0.Set(vchKey0, vchKey0 + 32, false);
|
key0.Set(vchKey0, vchKey0 + 32, false);
|
||||||
key0C.Set(vchKey0, vchKey0 + 32, true);
|
key0C.Set(vchKey0, vchKey0 + 32, true);
|
||||||
pubkey0 = key0.GetPubKey();
|
pubkey0 = key0.GetPubKey();
|
||||||
@ -272,10 +271,10 @@ public:
|
|||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
TestBuilder& Add(const CScript& _script)
|
TestBuilder& Opcode(const opcodetype& _op)
|
||||||
{
|
{
|
||||||
DoPush();
|
DoPush();
|
||||||
spendTx.vin[0].scriptSig += _script;
|
spendTx.vin[0].scriptSig << _op;
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -644,22 +643,22 @@ BOOST_AUTO_TEST_CASE(script_build)
|
|||||||
|
|
||||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
||||||
"2-of-2 with two identical keys and sigs pushed using OP_DUP but no SIGPUSHONLY", 0
|
"2-of-2 with two identical keys and sigs pushed using OP_DUP but no SIGPUSHONLY", 0
|
||||||
).Num(0).PushSig(keys.key1).Add(CScript() << OP_DUP));
|
).Num(0).PushSig(keys.key1).Opcode(OP_DUP));
|
||||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
||||||
"2-of-2 with two identical keys and sigs pushed using OP_DUP", SCRIPT_VERIFY_SIGPUSHONLY
|
"2-of-2 with two identical keys and sigs pushed using OP_DUP", SCRIPT_VERIFY_SIGPUSHONLY
|
||||||
).Num(0).PushSig(keys.key1).Add(CScript() << OP_DUP).ScriptError(SCRIPT_ERR_SIG_PUSHONLY));
|
).Num(0).PushSig(keys.key1).Opcode(OP_DUP).ScriptError(SCRIPT_ERR_SIG_PUSHONLY));
|
||||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||||
"P2SH(P2PK) with non-push scriptSig but no P2SH or SIGPUSHONLY", 0, true
|
"P2SH(P2PK) with non-push scriptSig but no P2SH or SIGPUSHONLY", 0, true
|
||||||
).PushSig(keys.key2).Add(CScript() << OP_NOP8).PushRedeem());
|
).PushSig(keys.key2).Opcode(OP_NOP8).PushRedeem());
|
||||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||||
"P2PK with non-push scriptSig but with P2SH validation", 0
|
"P2PK with non-push scriptSig but with P2SH validation", 0
|
||||||
).PushSig(keys.key2).Add(CScript() << OP_NOP8));
|
).PushSig(keys.key2).Opcode(OP_NOP8));
|
||||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||||
"P2SH(P2PK) with non-push scriptSig but no SIGPUSHONLY", SCRIPT_VERIFY_P2SH, true
|
"P2SH(P2PK) with non-push scriptSig but no SIGPUSHONLY", SCRIPT_VERIFY_P2SH, true
|
||||||
).PushSig(keys.key2).Add(CScript() << OP_NOP8).PushRedeem().ScriptError(SCRIPT_ERR_SIG_PUSHONLY));
|
).PushSig(keys.key2).Opcode(OP_NOP8).PushRedeem().ScriptError(SCRIPT_ERR_SIG_PUSHONLY));
|
||||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||||
"P2SH(P2PK) with non-push scriptSig but not P2SH", SCRIPT_VERIFY_SIGPUSHONLY, true
|
"P2SH(P2PK) with non-push scriptSig but not P2SH", SCRIPT_VERIFY_SIGPUSHONLY, true
|
||||||
).PushSig(keys.key2).Add(CScript() << OP_NOP8).PushRedeem().ScriptError(SCRIPT_ERR_SIG_PUSHONLY));
|
).PushSig(keys.key2).Opcode(OP_NOP8).PushRedeem().ScriptError(SCRIPT_ERR_SIG_PUSHONLY));
|
||||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
||||||
"2-of-2 with two identical keys and sigs pushed", SCRIPT_VERIFY_SIGPUSHONLY
|
"2-of-2 with two identical keys and sigs pushed", SCRIPT_VERIFY_SIGPUSHONLY
|
||||||
).Num(0).PushSig(keys.key1).PushSig(keys.key1));
|
).Num(0).PushSig(keys.key1).PushSig(keys.key1));
|
||||||
@ -1403,24 +1402,6 @@ BOOST_AUTO_TEST_CASE(script_FindAndDelete)
|
|||||||
BOOST_CHECK(s == expect);
|
BOOST_CHECK(s == expect);
|
||||||
}
|
}
|
||||||
|
|
||||||
BOOST_AUTO_TEST_CASE(script_can_append_self)
|
|
||||||
{
|
|
||||||
CScript s, d;
|
|
||||||
|
|
||||||
s = ScriptFromHex("00");
|
|
||||||
s += s;
|
|
||||||
d = ScriptFromHex("0000");
|
|
||||||
BOOST_CHECK(s == d);
|
|
||||||
|
|
||||||
// check doubling a script that's large enough to require reallocation
|
|
||||||
static const char hex[] = "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f";
|
|
||||||
s = CScript() << ParseHex(hex) << OP_CHECKSIG;
|
|
||||||
d = CScript() << ParseHex(hex) << OP_CHECKSIG << ParseHex(hex) << OP_CHECKSIG;
|
|
||||||
s += s;
|
|
||||||
BOOST_CHECK(s == d);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#if defined(HAVE_CONSENSUS_LIB)
|
#if defined(HAVE_CONSENSUS_LIB)
|
||||||
|
|
||||||
/* Test simple (successful) usage of dashconsensus_verify_script */
|
/* Test simple (successful) usage of dashconsensus_verify_script */
|
||||||
|
@ -2011,6 +2011,7 @@ bool CChainState::ConnectBlock(const CBlock& block, CValidationState& state, CBl
|
|||||||
return AbortNode(state, "Found EvoDB inconsistency, you must reindex to continue");
|
return AbortNode(state, "Found EvoDB inconsistency, you must reindex to continue");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
nBlocksTotal++;
|
||||||
|
|
||||||
// Special case for the genesis block, skipping connection of its transactions
|
// Special case for the genesis block, skipping connection of its transactions
|
||||||
// (its coinbase is unspendable)
|
// (its coinbase is unspendable)
|
||||||
@ -2020,8 +2021,6 @@ bool CChainState::ConnectBlock(const CBlock& block, CValidationState& state, CBl
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
nBlocksTotal++;
|
|
||||||
|
|
||||||
bool fScriptChecks = true;
|
bool fScriptChecks = true;
|
||||||
if (!hashAssumeValid.IsNull()) {
|
if (!hashAssumeValid.IsNull()) {
|
||||||
// We've been configured with the hash of a block which has been externally verified to have a valid history.
|
// We've been configured with the hash of a block which has been externally verified to have a valid history.
|
||||||
@ -2833,6 +2832,7 @@ bool CChainState::ConnectTip(CValidationState& state, const CChainParams& chainp
|
|||||||
return error("%s: ConnectBlock %s failed, %s", __func__, pindexNew->GetBlockHash().ToString(), FormatStateMessage(state));
|
return error("%s: ConnectBlock %s failed, %s", __func__, pindexNew->GetBlockHash().ToString(), FormatStateMessage(state));
|
||||||
}
|
}
|
||||||
nTime3 = GetTimeMicros(); nTimeConnectTotal += nTime3 - nTime2;
|
nTime3 = GetTimeMicros(); nTimeConnectTotal += nTime3 - nTime2;
|
||||||
|
assert(nBlocksTotal > 0);
|
||||||
LogPrint(BCLog::BENCHMARK, " - Connect total: %.2fms [%.2fs (%.2fms/blk)]\n", (nTime3 - nTime2) * MILLI, nTimeConnectTotal * MICRO, nTimeConnectTotal * MILLI / nBlocksTotal);
|
LogPrint(BCLog::BENCHMARK, " - Connect total: %.2fms [%.2fs (%.2fms/blk)]\n", (nTime3 - nTime2) * MILLI, nTimeConnectTotal * MICRO, nTimeConnectTotal * MILLI / nBlocksTotal);
|
||||||
bool flushed = view.Flush();
|
bool flushed = view.Flush();
|
||||||
assert(flushed);
|
assert(flushed);
|
||||||
|
@ -2288,7 +2288,8 @@ static UniValue listlockunspent(const JSONRPCRequest& request)
|
|||||||
static UniValue settxfee(const JSONRPCRequest& request)
|
static UniValue settxfee(const JSONRPCRequest& request)
|
||||||
{
|
{
|
||||||
RPCHelpMan{"settxfee",
|
RPCHelpMan{"settxfee",
|
||||||
"\nSet the transaction fee per kB for this wallet. Overrides the global -paytxfee command line parameter.\n",
|
"\nSet the transaction fee per kB for this wallet. Overrides the global -paytxfee command line parameter.\n"
|
||||||
|
"Can be deactivated by passing 0 as the fee. In that case automatic fee selection will be used by default.\n",
|
||||||
{
|
{
|
||||||
{"amount", RPCArg::Type::AMOUNT, RPCArg::Optional::NO, "The transaction fee in " + CURRENCY_UNIT + "/kB"},
|
{"amount", RPCArg::Type::AMOUNT, RPCArg::Optional::NO, "The transaction fee in " + CURRENCY_UNIT + "/kB"},
|
||||||
},
|
},
|
||||||
@ -2309,12 +2310,15 @@ static UniValue settxfee(const JSONRPCRequest& request)
|
|||||||
|
|
||||||
CAmount nAmount = AmountFromValue(request.params[0]);
|
CAmount nAmount = AmountFromValue(request.params[0]);
|
||||||
CFeeRate tx_fee_rate(nAmount, 1000);
|
CFeeRate tx_fee_rate(nAmount, 1000);
|
||||||
|
CFeeRate max_tx_fee_rate(pwallet->m_default_max_tx_fee, 1000);
|
||||||
if (tx_fee_rate == 0) {
|
if (tx_fee_rate == 0) {
|
||||||
// automatic selection
|
// automatic selection
|
||||||
} else if (tx_fee_rate < pwallet->chain().relayMinFee()) {
|
} else if (tx_fee_rate < pwallet->chain().relayMinFee()) {
|
||||||
throw JSONRPCError(RPC_INVALID_PARAMETER, strprintf("txfee cannot be less than min relay tx fee (%s)", pwallet->chain().relayMinFee().ToString()));
|
throw JSONRPCError(RPC_INVALID_PARAMETER, strprintf("txfee cannot be less than min relay tx fee (%s)", pwallet->chain().relayMinFee().ToString()));
|
||||||
} else if (tx_fee_rate < pwallet->m_min_fee) {
|
} else if (tx_fee_rate < pwallet->m_min_fee) {
|
||||||
throw JSONRPCError(RPC_INVALID_PARAMETER, strprintf("txfee cannot be less than wallet min fee (%s)", pwallet->m_min_fee.ToString()));
|
throw JSONRPCError(RPC_INVALID_PARAMETER, strprintf("txfee cannot be less than wallet min fee (%s)", pwallet->m_min_fee.ToString()));
|
||||||
|
} else if (tx_fee_rate > max_tx_fee_rate) {
|
||||||
|
throw JSONRPCError(RPC_INVALID_PARAMETER, strprintf("txfee cannot be more than wallet max tx fee (%s)", max_tx_fee_rate.ToString()));
|
||||||
}
|
}
|
||||||
|
|
||||||
pwallet->m_pay_tx_fee = tx_fee_rate;
|
pwallet->m_pay_tx_fee = tx_fee_rate;
|
||||||
|
@ -52,10 +52,7 @@ class HTTPBasicsTest(BitcoinTestFramework):
|
|||||||
f.write(rpcpassword + "\n")
|
f.write(rpcpassword + "\n")
|
||||||
|
|
||||||
def run_test(self):
|
def run_test(self):
|
||||||
|
self.log.info('Check correctness of the rpcauth config option')
|
||||||
##################################################
|
|
||||||
# Check correctness of the rpcauth config option #
|
|
||||||
##################################################
|
|
||||||
url = urllib.parse.urlparse(self.nodes[0].url)
|
url = urllib.parse.urlparse(self.nodes[0].url)
|
||||||
|
|
||||||
#Old authpair
|
#Old authpair
|
||||||
@ -161,9 +158,7 @@ class HTTPBasicsTest(BitcoinTestFramework):
|
|||||||
assert_equal(resp.status, 401)
|
assert_equal(resp.status, 401)
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
###############################################################
|
self.log.info('Check correctness of the rpcuser/rpcpassword config options')
|
||||||
# Check correctness of the rpcuser/rpcpassword config options #
|
|
||||||
###############################################################
|
|
||||||
url = urllib.parse.urlparse(self.nodes[1].url)
|
url = urllib.parse.urlparse(self.nodes[1].url)
|
||||||
|
|
||||||
# rpcuser and rpcpassword authpair
|
# rpcuser and rpcpassword authpair
|
||||||
@ -203,5 +198,13 @@ class HTTPBasicsTest(BitcoinTestFramework):
|
|||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
self.log.info('Check that failure to write cookie file will abort the node gracefully')
|
||||||
|
self.stop_node(0)
|
||||||
|
cookie_file = os.path.join(get_datadir_path(self.options.tmpdir, 0), self.chain, '.cookie.tmp')
|
||||||
|
os.mkdir(cookie_file)
|
||||||
|
init_error = 'Error: Unable to start HTTP server. See debug log for details.'
|
||||||
|
self.nodes[0].assert_start_raises_init_error(expected_msg=init_error)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
HTTPBasicsTest().main()
|
HTTPBasicsTest().main()
|
||||||
|
@ -262,14 +262,14 @@ class TestNode():
|
|||||||
self.rpc_connected = True
|
self.rpc_connected = True
|
||||||
self.url = self.rpc.url
|
self.url = self.rpc.url
|
||||||
return
|
return
|
||||||
except IOError as e:
|
|
||||||
if e.errno != errno.ECONNREFUSED: # Port not yet open?
|
|
||||||
raise # unknown IO error
|
|
||||||
except JSONRPCException as e: # Initialization phase
|
except JSONRPCException as e: # Initialization phase
|
||||||
# -28 RPC in warmup
|
# -28 RPC in warmup
|
||||||
# -342 Service unavailable, RPC server started but is shutting down due to error
|
# -342 Service unavailable, RPC server started but is shutting down due to error
|
||||||
if e.error['code'] != -28 and e.error['code'] != -342:
|
if e.error['code'] != -28 and e.error['code'] != -342:
|
||||||
raise # unknown JSON RPC exception
|
raise # unknown JSON RPC exception
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.ECONNREFUSED: # Port not yet open?
|
||||||
|
raise # unknown OS error
|
||||||
except ValueError as e: # cookie file not found and no rpcuser or rpcassword. dashd still starting
|
except ValueError as e: # cookie file not found and no rpcuser or rpcassword. dashd still starting
|
||||||
if "No RPC credentials" not in str(e):
|
if "No RPC credentials" not in str(e):
|
||||||
raise
|
raise
|
||||||
|
@ -359,6 +359,13 @@ def initialize_datadir(dirname, n, chain):
|
|||||||
os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True)
|
os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True)
|
||||||
return datadir
|
return datadir
|
||||||
|
|
||||||
|
def adjust_bitcoin_conf_for_pre_17(conf_file):
|
||||||
|
with open(conf_file,'r', encoding='utf8') as conf:
|
||||||
|
conf_data = conf.read()
|
||||||
|
with open(conf_file, 'w', encoding='utf8') as conf:
|
||||||
|
conf_data_changed = conf_data.replace('[regtest]', '')
|
||||||
|
conf.write(conf_data_changed)
|
||||||
|
|
||||||
def get_datadir_path(dirname, n):
|
def get_datadir_path(dirname, n):
|
||||||
return os.path.join(dirname, "node" + str(n))
|
return os.path.join(dirname, "node" + str(n))
|
||||||
|
|
||||||
|
@ -202,6 +202,7 @@ BASE_SCRIPTS = [
|
|||||||
'mempool_expiry.py',
|
'mempool_expiry.py',
|
||||||
'wallet_import_rescan.py',
|
'wallet_import_rescan.py',
|
||||||
'wallet_import_with_label.py',
|
'wallet_import_with_label.py',
|
||||||
|
'wallet_upgradewallet.py',
|
||||||
'rpc_bind.py --ipv4',
|
'rpc_bind.py --ipv4',
|
||||||
'rpc_bind.py --ipv6',
|
'rpc_bind.py --ipv6',
|
||||||
'rpc_bind.py --nonloopback',
|
'rpc_bind.py --nonloopback',
|
||||||
|
@ -7,6 +7,7 @@
|
|||||||
Verify that a dashd node can load multiple wallet files
|
Verify that a dashd node can load multiple wallet files
|
||||||
"""
|
"""
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
|
from decimal import Decimal
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import time
|
import time
|
||||||
@ -204,9 +205,9 @@ class MultiWalletTest(BitcoinTestFramework):
|
|||||||
self.log.info('Check for per-wallet settxfee call')
|
self.log.info('Check for per-wallet settxfee call')
|
||||||
assert_equal(w1.getwalletinfo()['paytxfee'], 0)
|
assert_equal(w1.getwalletinfo()['paytxfee'], 0)
|
||||||
assert_equal(w2.getwalletinfo()['paytxfee'], 0)
|
assert_equal(w2.getwalletinfo()['paytxfee'], 0)
|
||||||
w2.settxfee(4.0)
|
w2.settxfee(0.001)
|
||||||
assert_equal(w1.getwalletinfo()['paytxfee'], 0)
|
assert_equal(w1.getwalletinfo()['paytxfee'], 0)
|
||||||
assert_equal(w2.getwalletinfo()['paytxfee'], 4.0)
|
assert_equal(w2.getwalletinfo()['paytxfee'], Decimal('0.00100000'))
|
||||||
|
|
||||||
self.log.info("Test dynamic wallet loading")
|
self.log.info("Test dynamic wallet loading")
|
||||||
|
|
||||||
|
156
test/functional/wallet_upgradewallet.py
Executable file
156
test/functional/wallet_upgradewallet.py
Executable file
@ -0,0 +1,156 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# Copyright (c) 2018-2020 The Bitcoin Core developers
|
||||||
|
# Distributed under the MIT software license, see the accompanying
|
||||||
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||||
|
"""upgradewallet RPC functional test
|
||||||
|
|
||||||
|
Test upgradewallet RPC. Download v0.15.2 v0.16.3 node binaries:
|
||||||
|
|
||||||
|
contrib/devtools/previous_release.sh -b v0.15.2 v0.16.3
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from test_framework.test_framework import BitcoinTestFramework, SkipTest
|
||||||
|
from test_framework.util import (
|
||||||
|
adjust_bitcoin_conf_for_pre_17,
|
||||||
|
assert_equal,
|
||||||
|
assert_greater_than,
|
||||||
|
assert_is_hex_string
|
||||||
|
)
|
||||||
|
|
||||||
|
class UpgradeWalletTest(BitcoinTestFramework):
|
||||||
|
def set_test_params(self):
|
||||||
|
self.setup_clean_chain = True
|
||||||
|
self.num_nodes = 3
|
||||||
|
self.extra_args = [
|
||||||
|
["-addresstype=bech32"], # current wallet version
|
||||||
|
["-usehd=1"], # v0.16.3 wallet
|
||||||
|
["-usehd=0"] # v0.15.2 wallet
|
||||||
|
]
|
||||||
|
|
||||||
|
def skip_test_if_missing_module(self):
|
||||||
|
self.skip_if_no_wallet()
|
||||||
|
|
||||||
|
def setup_network(self):
|
||||||
|
self.setup_nodes()
|
||||||
|
|
||||||
|
def setup_nodes(self):
|
||||||
|
if os.getenv("TEST_PREVIOUS_RELEASES") == "false":
|
||||||
|
raise SkipTest("upgradewallet RPC tests")
|
||||||
|
|
||||||
|
releases_path = os.getenv("PREVIOUS_RELEASES_DIR") or os.getcwd() + "/releases"
|
||||||
|
if not os.path.isdir(releases_path):
|
||||||
|
if os.getenv("TEST_PREVIOUS_RELEASES") == "true":
|
||||||
|
raise AssertionError("TEST_PREVIOUS_RELEASES=1 but releases missing: " + releases_path)
|
||||||
|
raise SkipTest("This test requires binaries for previous releases")
|
||||||
|
|
||||||
|
self.add_nodes(self.num_nodes, extra_args=self.extra_args, versions=[
|
||||||
|
None,
|
||||||
|
160300,
|
||||||
|
150200
|
||||||
|
], binary=[
|
||||||
|
self.options.bitcoind,
|
||||||
|
releases_path + "/v0.16.3/bin/bitcoind",
|
||||||
|
releases_path + "/v0.15.2/bin/bitcoind",
|
||||||
|
], binary_cli=[
|
||||||
|
self.options.bitcoincli,
|
||||||
|
releases_path + "/v0.16.3/bin/bitcoin-cli",
|
||||||
|
releases_path + "/v0.15.2/bin/bitcoin-cli",
|
||||||
|
])
|
||||||
|
# adapt bitcoin.conf, because older bitcoind's don't recognize config sections
|
||||||
|
adjust_bitcoin_conf_for_pre_17(self.nodes[1].bitcoinconf)
|
||||||
|
adjust_bitcoin_conf_for_pre_17(self.nodes[2].bitcoinconf)
|
||||||
|
self.start_nodes()
|
||||||
|
|
||||||
|
def dumb_sync_blocks(self):
|
||||||
|
"""
|
||||||
|
Little helper to sync older wallets.
|
||||||
|
Notice that v0.15.2's regtest is hardforked, so there is
|
||||||
|
no sync for it.
|
||||||
|
v0.15.2 is only being used to test for version upgrade
|
||||||
|
and master hash key presence.
|
||||||
|
v0.16.3 is being used to test for version upgrade and balances.
|
||||||
|
Further info: https://github.com/bitcoin/bitcoin/pull/18774#discussion_r416967844
|
||||||
|
"""
|
||||||
|
node_from = self.nodes[0]
|
||||||
|
v16_3_node = self.nodes[1]
|
||||||
|
to_height = node_from.getblockcount()
|
||||||
|
height = self.nodes[1].getblockcount()
|
||||||
|
for i in range(height, to_height+1):
|
||||||
|
b = node_from.getblock(blockhash=node_from.getblockhash(i), verbose=0)
|
||||||
|
v16_3_node.submitblock(b)
|
||||||
|
assert_equal(v16_3_node.getblockcount(), to_height)
|
||||||
|
|
||||||
|
def run_test(self):
|
||||||
|
self.nodes[0].generatetoaddress(101, self.nodes[0].getnewaddress())
|
||||||
|
self.dumb_sync_blocks()
|
||||||
|
# # Sanity check the test framework:
|
||||||
|
res = self.nodes[0].getblockchaininfo()
|
||||||
|
assert_equal(res['blocks'], 101)
|
||||||
|
node_master = self.nodes[0]
|
||||||
|
v16_3_node = self.nodes[1]
|
||||||
|
v15_2_node = self.nodes[2]
|
||||||
|
|
||||||
|
# Send coins to old wallets for later conversion checks.
|
||||||
|
v16_3_wallet = v16_3_node.get_wallet_rpc('wallet.dat')
|
||||||
|
v16_3_address = v16_3_wallet.getnewaddress()
|
||||||
|
node_master.generatetoaddress(101, v16_3_address)
|
||||||
|
self.dumb_sync_blocks()
|
||||||
|
v16_3_balance = v16_3_wallet.getbalance()
|
||||||
|
|
||||||
|
self.log.info("Test upgradewallet RPC...")
|
||||||
|
# Prepare for copying of the older wallet
|
||||||
|
node_master_wallet_dir = os.path.join(node_master.datadir, "regtest/wallets")
|
||||||
|
v16_3_wallet = os.path.join(v16_3_node.datadir, "regtest/wallets/wallet.dat")
|
||||||
|
v15_2_wallet = os.path.join(v15_2_node.datadir, "regtest/wallet.dat")
|
||||||
|
self.stop_nodes()
|
||||||
|
|
||||||
|
# Copy the 0.16.3 wallet to the last Bitcoin Core version and open it:
|
||||||
|
shutil.rmtree(node_master_wallet_dir)
|
||||||
|
os.mkdir(node_master_wallet_dir)
|
||||||
|
shutil.copy(
|
||||||
|
v16_3_wallet,
|
||||||
|
node_master_wallet_dir
|
||||||
|
)
|
||||||
|
self.restart_node(0, ['-nowallet'])
|
||||||
|
node_master.loadwallet('')
|
||||||
|
|
||||||
|
wallet = node_master.get_wallet_rpc('')
|
||||||
|
old_version = wallet.getwalletinfo()["walletversion"]
|
||||||
|
|
||||||
|
# calling upgradewallet without version arguments
|
||||||
|
# should return nothing if successful
|
||||||
|
assert_equal(wallet.upgradewallet(), "")
|
||||||
|
new_version = wallet.getwalletinfo()["walletversion"]
|
||||||
|
# upgraded wallet version should be greater than older one
|
||||||
|
assert_greater_than(new_version, old_version)
|
||||||
|
# wallet should still contain the same balance
|
||||||
|
assert_equal(wallet.getbalance(), v16_3_balance)
|
||||||
|
|
||||||
|
self.stop_node(0)
|
||||||
|
# Copy the 0.15.2 wallet to the last Bitcoin Core version and open it:
|
||||||
|
shutil.rmtree(node_master_wallet_dir)
|
||||||
|
os.mkdir(node_master_wallet_dir)
|
||||||
|
shutil.copy(
|
||||||
|
v15_2_wallet,
|
||||||
|
node_master_wallet_dir
|
||||||
|
)
|
||||||
|
self.restart_node(0, ['-nowallet'])
|
||||||
|
node_master.loadwallet('')
|
||||||
|
|
||||||
|
wallet = node_master.get_wallet_rpc('')
|
||||||
|
# should have no master key hash before conversion
|
||||||
|
assert_equal('hdseedid' in wallet.getwalletinfo(), False)
|
||||||
|
# calling upgradewallet with explicit version number
|
||||||
|
# should return nothing if successful
|
||||||
|
assert_equal(wallet.upgradewallet(169900), "")
|
||||||
|
new_version = wallet.getwalletinfo()["walletversion"]
|
||||||
|
# upgraded wallet should have version 169900
|
||||||
|
assert_equal(new_version, 169900)
|
||||||
|
# after conversion master key hash should be present
|
||||||
|
assert_is_hex_string(wallet.getwalletinfo()['hdseedid'])
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
UpgradeWalletTest().main()
|
Loading…
Reference in New Issue
Block a user