rpc: split spork manipulation logic to distinct "sporkupdate" call (#4885)

* rpc: split spork manipulation logic to distinct "sporkupdate" call

Co-authored-by: UdjinM6 <UdjinM6@users.noreply.github.com>

* docs: add release notes for dash#4885

Co-authored-by: UdjinM6 <UdjinM6@users.noreply.github.com>
This commit is contained in:
Kittywhiskers Van Gogh 2022-06-18 22:22:45 +05:30 committed by GitHub
parent e090d9a9e7
commit 282b02e6b2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 132 additions and 122 deletions

View File

@ -0,0 +1,4 @@
Updated RPCs
------------
* The `spork` RPC call will no longer offer both get (labelled as "basic mode") and set (labelled as "advanced mode") functionality. `spork` will now only offer "basic" functionality. "Advanced" functionality is now exposed through the `sporkupdate` RPC call.

View File

@ -164,7 +164,7 @@ static const CRPCConvertParam vRPCConvertParams[] =
{ "getmempooldescendants", 1, "verbose" },
{ "logging", 0, "include" },
{ "logging", 1, "exclude" },
{ "spork", 1, "value" },
{ "sporkupdate", 1, "value" },
{ "voteraw", 1, "tx_index" },
{ "voteraw", 5, "time" },
{ "getblockhashes", 0, "high"},

View File

@ -120,80 +120,85 @@ static UniValue mnsync(const JSONRPCRequest& request)
*/
static UniValue spork(const JSONRPCRequest& request)
{
if (request.params.size() == 1) {
// basic mode, show info
std:: string strCommand = request.params[0].get_str();
if (strCommand == "show") {
UniValue ret(UniValue::VOBJ);
for (const auto& sporkDef : sporkDefs) {
ret.pushKV(std::string(sporkDef.name), sporkManager.GetSporkValue(sporkDef.sporkId));
}
return ret;
} else if(strCommand == "active"){
UniValue ret(UniValue::VOBJ);
for (const auto& sporkDef : sporkDefs) {
ret.pushKV(std::string(sporkDef.name), sporkManager.IsSporkActive(sporkDef.sporkId));
}
return ret;
// default help, for basic mode
RPCHelpMan{"spork",
"\nShows information about current state of sporks\n",
{
{"command", RPCArg::Type::STR, RPCArg::Optional::NO, "'show' to show all current spork values, 'active' to show which sporks are active"},
},
{
RPCResult{"For 'show'",
RPCResult::Type::OBJ_DYN, "", "keys are the sporks, and values indicates its value",
{
{RPCResult::Type::NUM, "SPORK_NAME", "The value of the specific spork with the name SPORK_NAME"},
}},
RPCResult{"For 'active'",
RPCResult::Type::OBJ_DYN, "", "keys are the sporks, and values indicates its status",
{
{RPCResult::Type::BOOL, "SPORK_NAME", "'true' for time-based sporks if spork is active and 'false' otherwise"},
}},
},
RPCExamples {
HelpExampleCli("spork", "show")
+ HelpExampleRpc("spork", "\"show\"")
}
}.Check(request);
// basic mode, show info
std:: string strCommand = request.params[0].get_str();
if (strCommand == "show") {
UniValue ret(UniValue::VOBJ);
for (const auto& sporkDef : sporkDefs) {
ret.pushKV(std::string(sporkDef.name), sporkManager.GetSporkValue(sporkDef.sporkId));
}
return ret;
} else if(strCommand == "active"){
UniValue ret(UniValue::VOBJ);
for (const auto& sporkDef : sporkDefs) {
ret.pushKV(std::string(sporkDef.name), sporkManager.IsSporkActive(sporkDef.sporkId));
}
return ret;
}
if (request.fHelp || request.params.size() != 2) {
// default help, for basic mode
RPCHelpMan{"spork",
"\nShows information about current state of sporks\n",
{
{"command", RPCArg::Type::STR, RPCArg::Optional::NO, "'show' to show all current spork values, 'active' to show which sporks are active"},
},
{
RPCResult{"For 'show'",
RPCResult::Type::OBJ_DYN, "", "keys are the sporks, and values indicates its value",
{
{RPCResult::Type::NUM, "SPORK_NAME", "The value of the specific spork with the name SPORK_NAME"},
}},
RPCResult{"For 'active'",
RPCResult::Type::OBJ_DYN, "", "keys are the sporks, and values indicates its status",
{
{RPCResult::Type::BOOL, "SPORK_NAME", "'true' for time-based sporks if spork is active and 'false' otherwise"},
}},
},
RPCExamples {
HelpExampleCli("spork", "show")
+ HelpExampleRpc("spork", "\"show\"")
}}.Check(request);
} else {
// advanced mode, update spork values
SporkId nSporkID = CSporkManager::GetSporkIDByName(request.params[0].get_str());
if(nSporkID == SPORK_INVALID)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid spork name");
return NullUniValue;
}
NodeContext& node = EnsureNodeContext(request.context);
if (!node.connman)
throw JSONRPCError(RPC_CLIENT_P2P_DISABLED, "Error: Peer-to-peer functionality missing or disabled");
static UniValue sporkupdate(const JSONRPCRequest& request)
{
RPCHelpMan{"sporkupdate",
"\nUpdate the value of the specific spork. Requires \"-sporkkey\" to be set to sign the message.\n",
{
{"name", RPCArg::Type::STR, RPCArg::Optional::NO, "The name of the spork to update"},
{"value", RPCArg::Type::NUM, RPCArg::Optional::NO, "The new desired value of the spork"},
},
RPCResult{
RPCResult::Type::STR, "result", "\"success\" if spork value was updated or this help otherwise"
},
RPCExamples{
HelpExampleCli("sporkupdate", "SPORK_2_INSTANTSEND_ENABLED 4070908800")
+ HelpExampleRpc("sporkupdate", "\"SPORK_2_INSTANTSEND_ENABLED\", 4070908800")
},
}.Check(request);
// SPORK VALUE
int64_t nValue = request.params[1].get_int64();
//broadcast new spork
if(sporkManager.UpdateSpork(nSporkID, nValue, *node.connman)){
return "success";
} else {
RPCHelpMan{"spork",
"\nUpdate the value of the specific spork. Requires \"-sporkkey\" to be set to sign the message.\n",
{
{"name", RPCArg::Type::STR, RPCArg::Optional::NO, "The name of the spork to update"},
{"value", RPCArg::Type::NUM, RPCArg::Optional::NO, "The new desired value of the spork"},
},
RPCResult{
RPCResult::Type::STR, "result", "\"success\" if spork value was updated or this help otherwise"
},
RPCExamples{
HelpExampleCli("spork", "SPORK_2_INSTANTSEND_ENABLED 4070908800")
+ HelpExampleRpc("spork", "\"SPORK_2_INSTANTSEND_ENABLED\", 4070908800")
},
}.Check(request);
}
// advanced mode, update spork values
SporkId nSporkID = CSporkManager::GetSporkIDByName(request.params[0].get_str());
if (nSporkID == SPORK_INVALID) {
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid spork name");
}
NodeContext& node = EnsureNodeContext(request.context);
if (!node.connman) {
throw JSONRPCError(RPC_CLIENT_P2P_DISABLED, "Error: Peer-to-peer functionality missing or disabled");
}
// SPORK VALUE
int64_t nValue = request.params[1].get_int64();
// broadcast new spork
if (sporkManager.UpdateSpork(nSporkID, nValue, *node.connman)) {
return "success";
}
return NullUniValue;
}
@ -1317,7 +1322,8 @@ static const CRPCCommand commands[] =
/* Dash features */
{ "dash", "mnsync", &mnsync, {} },
{ "dash", "spork", &spork, {"arg0","value"} },
{ "dash", "spork", &spork, {"command"} },
{ "dash", "sporkupdate", &sporkupdate, {"name","value"} },
/* Not shown in help */
{ "hidden", "setmocktime", &setmocktime, {"timestamp"}},

View File

@ -87,7 +87,7 @@ class LLMQCoinbaseCommitmentsTest(DashTestFramework):
self.nodes[0].generate(1)
oldhash = self.nodes[0].getbestblockhash()
# Have to disable ChainLocks here because they won't let you to invalidate already locked blocks
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 4070908800)
self.nodes[0].sporkupdate("SPORK_19_CHAINLOCKS_ENABLED", 4070908800)
self.wait_for_sporks_same()
# Test DIP8 activation once with a pre-existing quorum and once without (we don't know in which order it will activate on mainnet)
self.test_dip8_quorum_merkle_root_activation(True)
@ -97,8 +97,8 @@ class LLMQCoinbaseCommitmentsTest(DashTestFramework):
first_quorum = self.test_dip8_quorum_merkle_root_activation(False, True)
# Re-enable ChainLocks again
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 0)
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_19_CHAINLOCKS_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
# Verify that the first quorum appears in MNLISTDIFF
@ -243,13 +243,13 @@ class LLMQCoinbaseCommitmentsTest(DashTestFramework):
def test_dip8_quorum_merkle_root_activation(self, with_initial_quorum, slow_mode=False):
if with_initial_quorum:
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
# Mine one quorum before dip8 is activated
self.mine_quorum()
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 4070908800)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 4070908800)
self.wait_for_sporks_same()
cbtx = self.nodes[0].getblock(self.nodes[0].getbestblockhash(), 2)["tx"][0]
@ -269,7 +269,7 @@ class LLMQCoinbaseCommitmentsTest(DashTestFramework):
assert_equal(merkleRootQuorums, 0)
self.bump_mocktime(1)
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
# Mine quorum and verify that merkleRootQuorums has changed

View File

@ -31,7 +31,7 @@ class LLMQChainLocksTest(DashTestFramework):
self.activate_dip8()
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
self.log.info("Mining 4 quorums")
@ -144,10 +144,10 @@ class LLMQChainLocksTest(DashTestFramework):
assert not node0_tip_block["chainlock"]
assert node0_tip_block["previousblockhash"] == good_tip
self.log.info("Disable LLMQ based InstantSend for a very short time (this never gets propagated to other nodes)")
self.nodes[0].spork("SPORK_2_INSTANTSEND_ENABLED", 4070908800)
self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 4070908800)
self.log.info("Now the TXs should be included")
self.nodes[0].generate(1)
self.nodes[0].spork("SPORK_2_INSTANTSEND_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 0)
self.log.info("Assert that TXs got included now")
for txid in txs:
tx = self.nodes[0].getrawtransaction(txid, 1)

View File

@ -22,7 +22,7 @@ class LLMQConnections(DashTestFramework):
self.set_dash_llmq_test_params(5, 3)
def run_test(self):
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
q = self.mine_quorum()
@ -38,7 +38,7 @@ class LLMQConnections(DashTestFramework):
self.check_reconnects(2)
self.log.info("Activating SPORK_23_QUORUM_POSE")
self.nodes[0].spork("SPORK_23_QUORUM_POSE", 0)
self.nodes[0].sporkupdate("SPORK_23_QUORUM_POSE", 0)
self.wait_for_sporks_same()
self.log.info("mining one block and waiting for all members to connect to each other")
@ -64,7 +64,7 @@ class LLMQConnections(DashTestFramework):
wait_until(lambda: self.get_mn_probe_count(mn.node, q, True) == 4)
self.log.info("Activating SPORK_21_QUORUM_ALL_CONNECTED")
self.nodes[0].spork("SPORK_21_QUORUM_ALL_CONNECTED", 0)
self.nodes[0].sporkupdate("SPORK_21_QUORUM_ALL_CONNECTED", 0)
self.wait_for_sporks_same()
self.check_reconnects(4)

View File

@ -121,8 +121,8 @@ class QuorumDataRecoveryTest(DashTestFramework):
def run_test(self):
node = self.nodes[0]
node.spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
node.spork("SPORK_21_QUORUM_ALL_CONNECTED", 0)
node.sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
node.sporkupdate("SPORK_21_QUORUM_ALL_CONNECTED", 0)
self.wait_for_sporks_same()
self.activate_dip8()

View File

@ -19,7 +19,7 @@ class LLMQDKGErrors(DashTestFramework):
def run_test(self):
self.activate_dip8()
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
self.log.info("Mine one quorum without simulating any errors")
@ -83,13 +83,13 @@ class LLMQDKGErrors(DashTestFramework):
def heal_masternodes(self, blockCount):
# We're not testing PoSe here, so lets heal the MNs :)
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 4070908800)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 4070908800)
self.wait_for_sporks_same()
for i in range(blockCount):
self.bump_mocktime(1)
self.nodes[0].generate(1)
self.sync_all()
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()

View File

@ -60,7 +60,7 @@ class LLMQ_IS_CL_Conflicts(DashTestFramework):
self.test_node = self.nodes[0].add_p2p_connection(TestP2PConn())
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
self.mine_quorum()
@ -216,7 +216,7 @@ class LLMQ_IS_CL_Conflicts(DashTestFramework):
# Ensure spork uniqueness in multiple function runs
self.bump_mocktime(1)
# Disable ChainLocks to avoid accidental locking
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 4070908800)
self.nodes[0].sporkupdate("SPORK_19_CHAINLOCKS_ENABLED", 4070908800)
self.wait_for_sporks_same()
# Send tx1, which will later conflict with the ISLOCK
@ -268,7 +268,7 @@ class LLMQ_IS_CL_Conflicts(DashTestFramework):
assert_equal(node.getbestblockhash(), islock_tip)
# Check that the CL-ed block overrides the one with islocks
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 0) # Re-enable ChainLocks to accept clsig
self.nodes[0].sporkupdate("SPORK_19_CHAINLOCKS_ENABLED", 0) # Re-enable ChainLocks to accept clsig
self.test_node.send_clsig(cl) # relay clsig ASAP to prevent nodes from locking islock-ed tip
self.wait_for_sporks_same()
for node in self.nodes:

View File

@ -38,8 +38,8 @@ class LLMQISMigrationTest(DashTestFramework):
self.activate_dip8()
node = self.nodes[0]
node.spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
node.spork("SPORK_2_INSTANTSEND_ENABLED", 0)
node.sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
node.sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 0)
self.wait_for_sporks_same()
self.mine_quorum()

View File

@ -28,9 +28,9 @@ class LLMQ_IS_RetroactiveSigning(DashTestFramework):
def run_test(self):
self.activate_dip8()
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
# Turn mempool IS signing off
self.nodes[0].spork("SPORK_2_INSTANTSEND_ENABLED", 1)
self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 1)
self.wait_for_sporks_same()
self.mine_quorum()
@ -45,18 +45,18 @@ class LLMQ_IS_RetroactiveSigning(DashTestFramework):
# are the only "neighbours" in intra-quorum connections for one of them.
self.wait_for_instantlock(txid, self.nodes[0], False, 5)
# Have to disable ChainLocks to avoid signing a block with a "safe" tx too early
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 4000000000)
self.nodes[0].sporkupdate("SPORK_19_CHAINLOCKS_ENABLED", 4000000000)
self.wait_for_sporks_same()
# We have to wait in order to include tx in block
self.bump_mocktime(10 * 60 + 1)
block = self.nodes[0].generate(1)[0]
self.wait_for_instantlock(txid, self.nodes[0])
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_19_CHAINLOCKS_ENABLED", 0)
self.wait_for_sporks_same()
self.wait_for_chainlocked_block_all_nodes(block)
self.log.info("Enable mempool IS signing")
self.nodes[0].spork("SPORK_2_INSTANTSEND_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 0)
self.wait_for_sporks_same()
self.log.info("trying normal IS lock")

View File

@ -72,7 +72,7 @@ class LLMQQuorumRotationTest(DashTestFramework):
self.activate_dip8()
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
self.activate_dip0024(expected_activation_height=900)
@ -170,7 +170,7 @@ class LLMQQuorumRotationTest(DashTestFramework):
self.log.info("Invalidate the quorum")
self.bump_mocktime(5)
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 4070908800)
self.nodes[0].sporkupdate("SPORK_19_CHAINLOCKS_ENABLED", 4070908800)
self.wait_for_sporks_same()
self.nodes[0].invalidateblock(fallback_blockhash)
assert_equal(self.nodes[0].getbestblockhash(), quorum_blockhash)
@ -178,7 +178,7 @@ class LLMQQuorumRotationTest(DashTestFramework):
self.log.info("Reconsider the quorum")
self.bump_mocktime(5)
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_19_CHAINLOCKS_ENABLED", 0)
self.wait_for_sporks_same()
self.nodes[0].reconsiderblock(fallback_blockhash)
wait_until(lambda: self.nodes[0].getbestblockhash() == new_quorum_blockhash, sleep=1)

View File

@ -27,9 +27,9 @@ class LLMQSigningTest(DashTestFramework):
def run_test(self):
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
if self.options.spork21:
self.nodes[0].spork("SPORK_21_QUORUM_ALL_CONNECTED", 0)
self.nodes[0].sporkupdate("SPORK_21_QUORUM_ALL_CONNECTED", 0)
self.wait_for_sporks_same()
self.mine_quorum()

View File

@ -23,7 +23,7 @@ class LLMQSimplePoSeTest(DashTestFramework):
def run_test(self):
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
# check if mining quorums with all nodes being online succeeds without punishment/banning
@ -34,8 +34,8 @@ class LLMQSimplePoSeTest(DashTestFramework):
self.repair_masternodes(False)
self.nodes[0].spork("SPORK_21_QUORUM_ALL_CONNECTED", 0)
self.nodes[0].spork("SPORK_23_QUORUM_POSE", 0)
self.nodes[0].sporkupdate("SPORK_21_QUORUM_ALL_CONNECTED", 0)
self.nodes[0].sporkupdate("SPORK_23_QUORUM_POSE", 0)
self.wait_for_sporks_same()
self.reset_probe_timeouts()
@ -52,7 +52,7 @@ class LLMQSimplePoSeTest(DashTestFramework):
self.test_banning(self.force_old_mn_proto, 3)
# With PoSe off there should be no punishing for non-reachable and outdated nodes
self.nodes[0].spork("SPORK_23_QUORUM_POSE", 4070908800)
self.nodes[0].sporkupdate("SPORK_23_QUORUM_POSE", 4070908800)
self.wait_for_sporks_same()
self.repair_masternodes(True)

View File

@ -81,8 +81,8 @@ class MultiKeySporkTest(BitcoinTestFramework):
self.bump_mocktime(1)
# first and second signers set spork value
self.nodes[0].spork(spork_name, 1)
self.nodes[1].spork(spork_name, 1)
self.nodes[0].sporkupdate(spork_name, 1)
self.nodes[1].sporkupdate(spork_name, 1)
# spork change requires at least 3 signers
time.sleep(10)
for node in self.nodes:
@ -98,7 +98,7 @@ class MultiKeySporkTest(BitcoinTestFramework):
connect_nodes(self.nodes[0], i)
# third signer set spork value
self.nodes[2].spork(spork_name, 1)
self.nodes[2].sporkupdate(spork_name, 1)
# now spork state is changed
for node in self.nodes:
wait_until(lambda: self.get_test_spork_value(node, spork_name) == 1, sleep=0.1, timeout=10)
@ -117,9 +117,9 @@ class MultiKeySporkTest(BitcoinTestFramework):
self.bump_mocktime(1)
# now set the spork again with other signers to test
# old and new spork messages interaction
self.nodes[2].spork(spork_name, final_value)
self.nodes[3].spork(spork_name, final_value)
self.nodes[4].spork(spork_name, final_value)
self.nodes[2].sporkupdate(spork_name, final_value)
self.nodes[3].sporkupdate(spork_name, final_value)
self.nodes[4].sporkupdate(spork_name, final_value)
for node in self.nodes:
wait_until(lambda: self.get_test_spork_value(node, spork_name) == final_value, sleep=0.1, timeout=10)

View File

@ -32,7 +32,7 @@ class SporkTest(BitcoinTestFramework):
else:
value = 4070908800
# use InstantSend spork for tests
node.spork('SPORK_2_INSTANTSEND_ENABLED', value)
node.sporkupdate("SPORK_2_INSTANTSEND_ENABLED", value)
def run_test(self):
spork_default_state = self.get_test_spork_state(self.nodes[0])

View File

@ -127,7 +127,7 @@ class DashZMQTest (DashTestFramework):
self.zmq_context = zmq.Context()
# Initialize the network
self.activate_dip8()
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
# Create an LLMQ for testing
self.quorum_type = 100 # llmq_test

View File

@ -21,7 +21,7 @@ class InstantSendTest(DashTestFramework):
self.sender_idx = 3
def run_test(self):
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
self.mine_quorum()

View File

@ -395,8 +395,8 @@ class QuorumDataMessagesTest(DashTestFramework):
"0000000000000000000000000000000000000000000000000000000000000000")
# Enable DKG and disable ChainLocks
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 4070908800)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_19_CHAINLOCKS_ENABLED", 4070908800)
self.wait_for_sporks_same()
quorum_hash = self.mine_quorum()

View File

@ -24,7 +24,7 @@ class RPCVerifyChainLockTest(DashTestFramework):
node0 = self.nodes[0]
node1 = self.nodes[1]
self.activate_dip8()
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
self.mine_quorum()
self.wait_for_chainlocked_block(node0, node0.generate(1)[0])

View File

@ -31,7 +31,7 @@ class RPCVerifyISLockTest(DashTestFramework):
def run_test(self):
node = self.nodes[0]
node.spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
node.sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
self.mine_quorum()

View File

@ -958,9 +958,9 @@ class DashTestFramework(BitcoinTestFramework):
force_finish_mnsync(self.nodes[i + 1])
# Enable InstantSend (including block filtering) and ChainLocks by default
self.nodes[0].spork("SPORK_2_INSTANTSEND_ENABLED", 0)
self.nodes[0].spork("SPORK_3_INSTANTSEND_BLOCK_FILTERING", 0)
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 0)
self.nodes[0].sporkupdate("SPORK_3_INSTANTSEND_BLOCK_FILTERING", 0)
self.nodes[0].sporkupdate("SPORK_19_CHAINLOCKS_ENABLED", 0)
self.wait_for_sporks_same()
self.bump_mocktime(1)