mirror of
https://github.com/dashpay/dash.git
synced 2024-12-26 04:22:55 +01:00
f1e8452c5f
f4ba2bb769
feat: enforce DIP0001 from first block on regtest and drop fDIP0001ActiveAtTip (Konstantin Akimov)5fa64bc4f8
feat: put brr activation to height=1 (Konstantin Akimov)593c6cff14
feat: instant activation dip-0008 on regtest on first block (Konstantin Akimov)cfd7ea2bc3
feat: activate DIP0020 on regtest from block 1 (Konstantin Akimov)d1676b0280
Merge bitcoin/bitcoin#22818: test: Activate all regtest softforks at height 1, unless overridden (merge-script) Pull request description: ## Issue being fixed or feature implemented ## What was done? Backport bitcoin#22818 which helped to activate all forks from block-1 at regtest. Activate next dash's softforks at block 1: - DIP-0001 (blocksize 2mb) - DIP-0020 (opcodes) - DIP-0008 (chainlocks) - BRR (block reward reallocation) ## How Has This Been Tested? ## Breaking Changes All changes are relevant to RegTest only ## Checklist: - [x] I have performed a self-review of my own code - [ ] I have commented my code, particularly in hard-to-understand areas - [ ] I have added or updated relevant unit/integration/functional/e2e tests - [ ] I have made corresponding changes to the documentation - [x] I have assigned this pull request to a milestone ACKs for top commit: PastaPastaPasta: utACKf4ba2bb769
Tree-SHA512: 8d095365ff9e06ddcf47dbd457310ea2326998f0627d409651ab2fd35f6c1407cd3d2a23a4c636de359547782f4c43821944528229f3ea800cc65d3537595ea8
348 lines
18 KiB
Python
Executable File
348 lines
18 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
# Copyright (c) 2018-2024 The Dash Core developers
|
|
# Distributed under the MIT software license, see the accompanying
|
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
|
"""Tests around dash governance."""
|
|
|
|
import json
|
|
|
|
from test_framework.messages import uint256_to_string
|
|
from test_framework.test_framework import DashTestFramework
|
|
from test_framework.governance import have_trigger_for_height, prepare_object
|
|
from test_framework.util import assert_equal, satoshi_round, set_node_times, wait_until_helper
|
|
|
|
class DashGovernanceTest (DashTestFramework):
|
|
def set_test_params(self):
|
|
self.v20_start_time = 1417713500 + 80
|
|
# using adjusted v20 deployment params to test an edge case where superblock maturity window is equal to deployment window size
|
|
self.set_dash_test_params(6, 5, [["-budgetparams=10:10:10", f"-vbparams=v20:{self.v20_start_time}:999999999999:0:10:8:6:5:0"]] * 6, fast_dip3_enforcement=True)
|
|
|
|
def check_superblockbudget(self, v20_active):
|
|
v20_state = self.nodes[0].getblockchaininfo()["softforks"]["v20"]
|
|
assert_equal(v20_state["active"], v20_active)
|
|
assert_equal(self.nodes[0].getsuperblockbudget(200), self.expected_old_budget)
|
|
assert_equal(self.nodes[0].getsuperblockbudget(220), self.expected_old_budget)
|
|
if v20_state["bip9"]["status"] == "locked_in" or v20_state["bip9"]["status"] == "active":
|
|
assert_equal(self.nodes[0].getsuperblockbudget(240), self.expected_v20_budget)
|
|
assert_equal(self.nodes[0].getsuperblockbudget(260), self.expected_v20_budget)
|
|
assert_equal(self.nodes[0].getsuperblockbudget(280), self.expected_v20_budget)
|
|
else:
|
|
assert_equal(self.nodes[0].getsuperblockbudget(240), self.expected_old_budget)
|
|
assert_equal(self.nodes[0].getsuperblockbudget(260), self.expected_old_budget)
|
|
assert_equal(self.nodes[0].getsuperblockbudget(280), self.expected_old_budget)
|
|
|
|
def check_superblock(self):
|
|
# Make sure Superblock has only payments that fit into the budget
|
|
# p0 must always be included because it has most votes
|
|
# p1 and p2 have equal number of votes (but less votes than p0)
|
|
# so only one of them can be included (depends on proposal hashes).
|
|
|
|
coinbase_outputs = self.nodes[0].getblock(self.nodes[0].getbestblockhash(), 2)["tx"][0]["vout"]
|
|
payments_found = 0
|
|
for txout in coinbase_outputs:
|
|
if txout["value"] == self.p0_amount and txout["scriptPubKey"]["address"] == self.p0_payout_address:
|
|
payments_found += 1
|
|
if txout["value"] == self.p1_amount and txout["scriptPubKey"]["address"] == self.p1_payout_address:
|
|
if self.p1_hash > self.p2_hash:
|
|
payments_found += 1
|
|
else:
|
|
assert False
|
|
if txout["value"] == self.p2_amount and txout["scriptPubKey"]["address"] == self.p2_payout_address:
|
|
if self.p2_hash > self.p1_hash:
|
|
payments_found += 1
|
|
else:
|
|
assert False
|
|
|
|
assert_equal(payments_found, 2)
|
|
|
|
def run_test(self):
|
|
governance_info = self.nodes[0].getgovernanceinfo()
|
|
assert_equal(governance_info['governanceminquorum'], 1)
|
|
assert_equal(governance_info['proposalfee'], 1)
|
|
assert_equal(governance_info['superblockcycle'], 20)
|
|
assert_equal(governance_info['superblockmaturitywindow'], 10)
|
|
assert_equal(governance_info['lastsuperblock'], 120)
|
|
assert_equal(governance_info['nextsuperblock'], governance_info['lastsuperblock'] + governance_info['superblockcycle'])
|
|
assert_equal(governance_info['governancebudget'], 1000)
|
|
|
|
map_vote_outcomes = {
|
|
0: "none",
|
|
1: "yes",
|
|
2: "no",
|
|
3: "abstain"
|
|
}
|
|
map_vote_signals = {
|
|
0: "none",
|
|
1: "funding",
|
|
2: "valid",
|
|
3: "delete",
|
|
4: "endorsed"
|
|
}
|
|
sb_cycle = 20
|
|
sb_maturity_window = 10
|
|
sb_immaturity_window = sb_cycle - sb_maturity_window
|
|
self.expected_old_budget = satoshi_round("928.57142840")
|
|
self.expected_v20_budget = satoshi_round("18.57142860")
|
|
|
|
self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 4070908800)
|
|
self.nodes[0].sporkupdate("SPORK_9_SUPERBLOCKS_ENABLED", 0)
|
|
self.wait_for_sporks_same()
|
|
|
|
assert_equal(len(self.nodes[0].gobject("list-prepared")), 0)
|
|
|
|
# TODO: drop these extra 80 blocks - doesn't work without them
|
|
self.nodes[0].generate(80)
|
|
self.bump_mocktime(80)
|
|
|
|
self.nodes[0].generate(3)
|
|
self.bump_mocktime(3)
|
|
self.sync_blocks()
|
|
assert_equal(self.nodes[0].getblockcount(), 210)
|
|
assert_equal(self.nodes[0].getblockchaininfo()["softforks"]["v20"]["bip9"]["status"], "defined")
|
|
self.check_superblockbudget(False)
|
|
|
|
assert self.mocktime < self.v20_start_time
|
|
self.mocktime = self.v20_start_time
|
|
set_node_times(self.nodes, self.mocktime)
|
|
|
|
self.nodes[0].generate(10)
|
|
self.bump_mocktime(10)
|
|
self.sync_blocks()
|
|
assert_equal(self.nodes[0].getblockcount(), 220)
|
|
assert_equal(self.nodes[0].getblockchaininfo()["softforks"]["v20"]["bip9"]["status"], "started")
|
|
self.check_superblockbudget(False)
|
|
|
|
proposal_time = self.mocktime
|
|
self.p0_payout_address = self.nodes[0].getnewaddress()
|
|
self.p1_payout_address = self.nodes[0].getnewaddress()
|
|
self.p2_payout_address = self.nodes[0].getnewaddress()
|
|
self.p0_amount = satoshi_round("1.1")
|
|
self.p1_amount = satoshi_round("3.3")
|
|
self.p2_amount = self.expected_v20_budget - self.p1_amount
|
|
|
|
p0_collateral_prepare = prepare_object(self.nodes[0], 1, uint256_to_string(0), proposal_time, 1, "Proposal_0", self.p0_amount, self.p0_payout_address)
|
|
p1_collateral_prepare = prepare_object(self.nodes[0], 1, uint256_to_string(0), proposal_time, 1, "Proposal_1", self.p1_amount, self.p1_payout_address)
|
|
p2_collateral_prepare = prepare_object(self.nodes[0], 1, uint256_to_string(0), proposal_time, 1, "Proposal_2", self.p2_amount, self.p2_payout_address)
|
|
|
|
self.nodes[0].generate(6)
|
|
self.bump_mocktime(6)
|
|
self.sync_blocks()
|
|
|
|
assert_equal(len(self.nodes[0].gobject("list-prepared")), 3)
|
|
assert_equal(len(self.nodes[0].gobject("list")), 0)
|
|
|
|
self.p0_hash = self.nodes[0].gobject("submit", "0", 1, proposal_time, p0_collateral_prepare["hex"], p0_collateral_prepare["collateralHash"])
|
|
self.p1_hash = self.nodes[0].gobject("submit", "0", 1, proposal_time, p1_collateral_prepare["hex"], p1_collateral_prepare["collateralHash"])
|
|
self.p2_hash = self.nodes[0].gobject("submit", "0", 1, proposal_time, p2_collateral_prepare["hex"], p2_collateral_prepare["collateralHash"])
|
|
|
|
assert_equal(len(self.nodes[0].gobject("list")), 3)
|
|
|
|
assert_equal(self.nodes[0].gobject("get", self.p0_hash)["FundingResult"]["YesCount"], 0)
|
|
assert_equal(self.nodes[0].gobject("get", self.p0_hash)["FundingResult"]["NoCount"], 0)
|
|
|
|
assert_equal(self.nodes[0].gobject("get", self.p1_hash)["FundingResult"]["YesCount"], 0)
|
|
assert_equal(self.nodes[0].gobject("get", self.p1_hash)["FundingResult"]["NoCount"], 0)
|
|
|
|
assert_equal(self.nodes[0].gobject("get", self.p2_hash)["FundingResult"]["YesCount"], 0)
|
|
assert_equal(self.nodes[0].gobject("get", self.p2_hash)["FundingResult"]["NoCount"], 0)
|
|
|
|
self.nodes[0].gobject("vote-alias", self.p0_hash, map_vote_signals[1], map_vote_outcomes[2], self.mninfo[0].proTxHash)
|
|
self.nodes[0].gobject("vote-many", self.p0_hash, map_vote_signals[1], map_vote_outcomes[1])
|
|
assert_equal(self.nodes[0].gobject("get", self.p0_hash)["FundingResult"]["YesCount"], self.mn_count - 1)
|
|
assert_equal(self.nodes[0].gobject("get", self.p0_hash)["FundingResult"]["NoCount"], 1)
|
|
|
|
self.nodes[0].gobject("vote-alias", self.p1_hash, map_vote_signals[1], map_vote_outcomes[2], self.mninfo[0].proTxHash)
|
|
self.nodes[0].gobject("vote-alias", self.p1_hash, map_vote_signals[1], map_vote_outcomes[2], self.mninfo[1].proTxHash)
|
|
self.nodes[0].gobject("vote-many", self.p1_hash, map_vote_signals[1], map_vote_outcomes[1])
|
|
assert_equal(self.nodes[0].gobject("get", self.p1_hash)["FundingResult"]["YesCount"], self.mn_count - 2)
|
|
assert_equal(self.nodes[0].gobject("get", self.p1_hash)["FundingResult"]["NoCount"], 2)
|
|
|
|
self.nodes[0].gobject("vote-alias", self.p2_hash, map_vote_signals[1], map_vote_outcomes[2], self.mninfo[0].proTxHash)
|
|
self.nodes[0].gobject("vote-alias", self.p2_hash, map_vote_signals[1], map_vote_outcomes[2], self.mninfo[1].proTxHash)
|
|
self.nodes[0].gobject("vote-many", self.p2_hash, map_vote_signals[1], map_vote_outcomes[1])
|
|
assert_equal(self.nodes[0].gobject("get", self.p2_hash)["FundingResult"]["YesCount"], self.mn_count - 2)
|
|
assert_equal(self.nodes[0].gobject("get", self.p2_hash)["FundingResult"]["NoCount"], 2)
|
|
|
|
assert_equal(len(self.nodes[0].gobject("list", "valid", "triggers")), 0)
|
|
|
|
block_count = self.nodes[0].getblockcount()
|
|
|
|
# Move until 1 block before the Superblock maturity window starts
|
|
n = sb_immaturity_window - block_count % sb_cycle
|
|
# v20 is expected to be activate since block 240
|
|
assert block_count + n < 240
|
|
for _ in range(n - 1):
|
|
self.nodes[0].generate(1)
|
|
self.bump_mocktime(1)
|
|
self.sync_blocks()
|
|
self.check_superblockbudget(False)
|
|
|
|
assert_equal(len(self.nodes[0].gobject("list", "valid", "triggers")), 0)
|
|
|
|
# Detect payee node
|
|
mn_list = self.nodes[0].protx("list", "registered", True)
|
|
height_protx_list = []
|
|
for mn in mn_list:
|
|
height_protx_list.append((mn['state']['lastPaidHeight'], mn['proTxHash']))
|
|
|
|
height_protx_list = sorted(height_protx_list)
|
|
_, mn_payee_protx = height_protx_list[1]
|
|
|
|
payee_idx = None
|
|
for mn in self.mninfo:
|
|
if mn.proTxHash == mn_payee_protx:
|
|
payee_idx = mn.nodeIdx
|
|
break
|
|
assert payee_idx is not None
|
|
|
|
# Isolate payee node and create a trigger
|
|
self.isolate_node(payee_idx)
|
|
isolated = self.nodes[payee_idx]
|
|
|
|
# Move 1 block inside the Superblock maturity window on the isolated node
|
|
isolated.generate(1)
|
|
self.bump_mocktime(1)
|
|
# The isolated "winner" should submit new trigger and vote for it
|
|
self.wait_until(lambda: len(isolated.gobject("list", "valid", "triggers")) == 1, timeout=5)
|
|
isolated_trigger_hash = list(isolated.gobject("list", "valid", "triggers").keys())[0]
|
|
self.wait_until(lambda: list(isolated.gobject("list", "valid", "triggers").values())[0]['YesCount'] == 1, timeout=5)
|
|
more_votes = wait_until_helper(lambda: list(isolated.gobject("list", "valid", "triggers").values())[0]['YesCount'] > 1, timeout=5, do_assert=False)
|
|
assert_equal(more_votes, False)
|
|
|
|
# Move 1 block enabling the Superblock maturity window on non-isolated nodes
|
|
self.nodes[0].generate(1)
|
|
self.bump_mocktime(1)
|
|
assert_equal(self.nodes[0].getblockcount(), 230)
|
|
assert_equal(self.nodes[0].getblockchaininfo()["softforks"]["v20"]["bip9"]["status"], "locked_in")
|
|
self.check_superblockbudget(False)
|
|
|
|
# The "winner" should submit new trigger and vote for it, but it's isolated so no triggers should be found
|
|
has_trigger = wait_until_helper(lambda: len(self.nodes[0].gobject("list", "valid", "triggers")) >= 1, timeout=5, do_assert=False)
|
|
assert_equal(has_trigger, False)
|
|
|
|
# Move 1 block inside the Superblock maturity window on non-isolated nodes
|
|
self.nodes[0].generate(1)
|
|
self.bump_mocktime(1)
|
|
|
|
# There is now new "winner" who should submit new trigger and vote for it
|
|
self.wait_until(lambda: len(self.nodes[0].gobject("list", "valid", "triggers")) == 1, timeout=5)
|
|
winning_trigger_hash = list(self.nodes[0].gobject("list", "valid", "triggers").keys())[0]
|
|
self.wait_until(lambda: list(self.nodes[0].gobject("list", "valid", "triggers").values())[0]['YesCount'] == 1, timeout=5)
|
|
more_votes = wait_until_helper(lambda: list(self.nodes[0].gobject("list", "valid", "triggers").values())[0]['YesCount'] > 1, timeout=5, do_assert=False)
|
|
assert_equal(more_votes, False)
|
|
|
|
# Make sure amounts aren't trimmed
|
|
payment_amounts_expected = [str(satoshi_round(str(self.p0_amount))), str(satoshi_round(str(self.p1_amount))), str(satoshi_round(str(self.p2_amount)))]
|
|
data_string = list(self.nodes[0].gobject("list", "valid", "triggers").values())[0]["DataString"]
|
|
payment_amounts_trigger = json.loads(data_string)["payment_amounts"].split("|")
|
|
for amount_str in payment_amounts_trigger:
|
|
assert(amount_str in payment_amounts_expected)
|
|
|
|
# Move another block inside the Superblock maturity window on non-isolated nodes
|
|
self.nodes[0].generate(1)
|
|
self.bump_mocktime(1)
|
|
|
|
# Every non-isolated MN should vote for the same trigger now, no new triggers should be created
|
|
self.wait_until(lambda: list(self.nodes[0].gobject("list", "valid", "triggers").values())[0]['YesCount'] == self.mn_count - 1, timeout=5)
|
|
more_triggers = wait_until_helper(lambda: len(self.nodes[0].gobject("list", "valid", "triggers")) > 1, timeout=5, do_assert=False)
|
|
assert_equal(more_triggers, False)
|
|
|
|
self.reconnect_isolated_node(payee_idx, 0)
|
|
# self.connect_nodes(0, payee_idx)
|
|
self.sync_blocks()
|
|
|
|
# re-sync helper
|
|
def sync_gov(node):
|
|
self.bump_mocktime(1)
|
|
return node.mnsync("status")["IsSynced"]
|
|
|
|
# make sure isolated node is fully synced at this point
|
|
self.wait_until(lambda: sync_gov(isolated))
|
|
# let all fulfilled requests expire for re-sync to work correctly
|
|
self.bump_mocktime(5 * 60)
|
|
|
|
for node in self.nodes:
|
|
# Force sync
|
|
node.mnsync("reset")
|
|
# fast-forward to governance sync
|
|
node.mnsync("next")
|
|
self.wait_until(lambda: sync_gov(node))
|
|
|
|
# Should see two triggers now
|
|
self.wait_until(lambda: len(isolated.gobject("list", "valid", "triggers")) == 2, timeout=5)
|
|
self.wait_until(lambda: len(self.nodes[0].gobject("list", "valid", "triggers")) == 2, timeout=5)
|
|
more_triggers = wait_until_helper(lambda: len(self.nodes[0].gobject("list", "valid", "triggers")) > 2, timeout=5, do_assert=False)
|
|
assert_equal(more_triggers, False)
|
|
|
|
# Move another block inside the Superblock maturity window
|
|
self.nodes[0].generate(1)
|
|
self.bump_mocktime(1)
|
|
self.sync_blocks()
|
|
|
|
# Should see NO votes on both triggers now
|
|
self.wait_until(lambda: self.nodes[0].gobject("list", "valid", "triggers")[winning_trigger_hash]['NoCount'] == 1, timeout=5)
|
|
self.wait_until(lambda: self.nodes[0].gobject("list", "valid", "triggers")[isolated_trigger_hash]['NoCount'] == self.mn_count - 1, timeout=5)
|
|
|
|
block_count = self.nodes[0].getblockcount()
|
|
n = sb_cycle - block_count % sb_cycle
|
|
|
|
# Move remaining n blocks until actual Superblock
|
|
for i in range(n):
|
|
self.nodes[0].generate(1)
|
|
self.bump_mocktime(1)
|
|
self.sync_blocks()
|
|
# comparing to 239 because bip9 forks are active when the tip is one block behind the activation height
|
|
self.check_superblockbudget(block_count + i + 1 >= 239)
|
|
|
|
self.check_superblockbudget(True)
|
|
self.check_superblock()
|
|
|
|
# Move a few block past the recent superblock height and make sure we have no new votes
|
|
for _ in range(5):
|
|
with self.nodes[1].assert_debug_log("", [f"Voting NO-FUNDING for trigger:{winning_trigger_hash} success"]):
|
|
self.nodes[0].generate(1)
|
|
self.bump_mocktime(1)
|
|
self.sync_blocks()
|
|
# Votes on both triggers should NOT change
|
|
assert_equal(self.nodes[0].gobject("list", "valid", "triggers")[winning_trigger_hash]['NoCount'], 1)
|
|
assert_equal(self.nodes[0].gobject("list", "valid", "triggers")[isolated_trigger_hash]['NoCount'], self.mn_count - 1)
|
|
|
|
block_count = self.nodes[0].getblockcount()
|
|
n = sb_cycle - block_count % sb_cycle
|
|
|
|
# Move remaining n blocks until the next Superblock
|
|
for _ in range(n - 1):
|
|
self.nodes[0].generate(1)
|
|
self.bump_mocktime(1)
|
|
self.sync_blocks()
|
|
# Wait for new trigger and votes
|
|
self.wait_until(lambda: have_trigger_for_height(self.nodes, 260))
|
|
# Mine superblock
|
|
self.nodes[0].generate(1)
|
|
self.bump_mocktime(1)
|
|
self.sync_blocks()
|
|
assert_equal(self.nodes[0].getblockcount(), 260)
|
|
assert_equal(self.nodes[0].getblockchaininfo()["softforks"]["v20"]["bip9"]["status"], "active")
|
|
|
|
# Mine and check a couple more superblocks
|
|
for i in range(2):
|
|
for _ in range(sb_cycle - 1):
|
|
self.nodes[0].generate(1)
|
|
self.bump_mocktime(1)
|
|
self.sync_blocks()
|
|
# Wait for new trigger and votes
|
|
sb_block_height = 260 + (i + 1) * sb_cycle
|
|
self.wait_until(lambda: have_trigger_for_height(self.nodes, sb_block_height))
|
|
# Mine superblock
|
|
self.nodes[0].generate(1)
|
|
self.bump_mocktime(1)
|
|
self.sync_blocks()
|
|
assert_equal(self.nodes[0].getblockcount(), sb_block_height)
|
|
assert_equal(self.nodes[0].getblockchaininfo()["softforks"]["v20"]["bip9"]["status"], "active")
|
|
self.check_superblockbudget(True)
|
|
self.check_superblock()
|
|
|
|
|
|
if __name__ == '__main__':
|
|
DashGovernanceTest().main()
|