2019-01-23 07:08:13 +01:00
|
|
|
#!/usr/bin/env python3
|
2021-04-20 21:33:02 +02:00
|
|
|
# Copyright (c) 2015-2021 The Dash Core developers
|
2019-01-23 07:08:13 +01:00
|
|
|
# Distributed under the MIT software license, see the accompanying
|
|
|
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
|
|
|
|
|
|
|
'''
|
2020-07-17 01:44:20 +02:00
|
|
|
feature_llmq_chainlocks.py
|
2019-01-23 07:08:13 +01:00
|
|
|
|
|
|
|
Checks LLMQs based ChainLocks
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
Merge #13054: tests: Enable automatic detection of undefined names in Python tests scripts. Remove wildcard imports.
68400d8b96 tests: Use explicit imports (practicalswift)
Pull request description:
Enable automatic detection of undefined names in Python tests scripts. Remove wildcard imports.
Wildcard imports make it unclear which names are present in the namespace, confusing both readers and many automated tools.
An additional benefit of not using wildcard imports in tests scripts is that readers of a test script then can infer the rough testing scope just by looking at the imports.
Before this commit:
```
$ contrib/devtools/lint-python.sh | head -10
./test/functional/feature_rbf.py:8:1: F403 'from test_framework.util import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:9:1: F403 'from test_framework.script import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:10:1: F403 'from test_framework.mininode import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:15:12: F405 bytes_to_hex_str may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:17:58: F405 CScript may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:25:13: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:26:31: F405 satoshi_round may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:26:60: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:30:41: F405 satoshi_round may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:30:68: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
$
```
After this commit:
```
$ contrib/devtools/lint-python.sh | head -10
$
```
Tree-SHA512: 3f826d39cffb6438388e5efcb20a9622ff8238247e882d68f7b38609877421b2a8e10e9229575f8eb6a8fa42dec4256986692e92922c86171f750a0e887438d9
2018-08-13 14:24:43 +02:00
|
|
|
import time
|
|
|
|
|
|
|
|
from test_framework.test_framework import DashTestFramework
|
|
|
|
from test_framework.util import connect_nodes, isolate_node, reconnect_isolated_node
|
|
|
|
|
|
|
|
|
2019-01-23 07:08:13 +01:00
|
|
|
class LLMQChainLocksTest(DashTestFramework):
|
2019-09-24 00:57:30 +02:00
|
|
|
def set_test_params(self):
|
2020-01-07 13:49:51 +01:00
|
|
|
self.set_dash_test_params(4, 3, fast_dip3_enforcement=True)
|
2019-01-23 07:08:13 +01:00
|
|
|
|
|
|
|
def run_test(self):
|
|
|
|
|
2020-03-21 13:30:35 +01:00
|
|
|
# Connect all nodes to node1 so that we always have the whole network connected
|
|
|
|
# Otherwise only masternode connections will be established between nodes, which won't propagate TXs/blocks
|
|
|
|
# Usually node0 is the one that does this, but in this test we isolate it multiple times
|
|
|
|
for i in range(len(self.nodes)):
|
|
|
|
if i != 1:
|
|
|
|
connect_nodes(self.nodes[i], 1)
|
|
|
|
|
2020-12-28 12:21:01 +01:00
|
|
|
self.activate_dip8()
|
2019-03-22 11:51:50 +01:00
|
|
|
|
2019-01-23 07:08:13 +01:00
|
|
|
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
|
|
|
|
self.wait_for_sporks_same()
|
|
|
|
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Mining 4 quorums")
|
2019-01-23 07:08:13 +01:00
|
|
|
for i in range(4):
|
|
|
|
self.mine_quorum()
|
|
|
|
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Mine single block, wait for chainlock")
|
2019-01-23 07:08:13 +01:00
|
|
|
self.nodes[0].generate(1)
|
2019-10-02 15:24:57 +02:00
|
|
|
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash())
|
2019-01-23 07:08:13 +01:00
|
|
|
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Mine many blocks, wait for chainlock")
|
2019-01-23 07:08:13 +01:00
|
|
|
self.nodes[0].generate(20)
|
2019-10-02 15:24:57 +02:00
|
|
|
# We need more time here due to 20 blocks being generated at once
|
|
|
|
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash(), timeout=30)
|
2019-01-23 07:08:13 +01:00
|
|
|
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Assert that all blocks up until the tip are chainlocked")
|
2019-01-23 07:08:13 +01:00
|
|
|
for h in range(1, self.nodes[0].getblockcount()):
|
|
|
|
block = self.nodes[0].getblock(self.nodes[0].getblockhash(h))
|
|
|
|
assert(block['chainlock'])
|
|
|
|
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Isolate node, mine on another, and reconnect")
|
2019-05-06 16:58:38 +02:00
|
|
|
isolate_node(self.nodes[0])
|
2019-09-15 22:08:21 +02:00
|
|
|
node0_mining_addr = self.nodes[0].getnewaddress()
|
2019-01-23 07:08:13 +01:00
|
|
|
node0_tip = self.nodes[0].getbestblockhash()
|
2019-09-15 22:08:21 +02:00
|
|
|
self.nodes[1].generatetoaddress(5, node0_mining_addr)
|
2019-10-02 15:24:57 +02:00
|
|
|
self.wait_for_chainlocked_block(self.nodes[1], self.nodes[1].getbestblockhash())
|
2019-01-23 07:08:13 +01:00
|
|
|
assert(self.nodes[0].getbestblockhash() == node0_tip)
|
2019-05-06 16:58:38 +02:00
|
|
|
reconnect_isolated_node(self.nodes[0], 1)
|
2019-09-15 22:08:21 +02:00
|
|
|
self.nodes[1].generatetoaddress(1, node0_mining_addr)
|
2021-05-11 18:55:40 +02:00
|
|
|
self.wait_for_chainlocked_block_all_nodes(self.nodes[1].getbestblockhash())
|
2019-01-23 07:08:13 +01:00
|
|
|
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Isolate node, mine on both parts of the network, and reconnect")
|
2019-05-06 16:58:38 +02:00
|
|
|
isolate_node(self.nodes[0])
|
2021-01-16 20:47:13 +01:00
|
|
|
bad_tip = self.nodes[0].generate(5)[-1]
|
2019-09-15 22:08:21 +02:00
|
|
|
self.nodes[1].generatetoaddress(1, node0_mining_addr)
|
2019-01-23 07:08:13 +01:00
|
|
|
good_tip = self.nodes[1].getbestblockhash()
|
2019-10-02 15:24:57 +02:00
|
|
|
self.wait_for_chainlocked_block(self.nodes[1], good_tip)
|
2019-01-23 07:08:13 +01:00
|
|
|
assert(not self.nodes[0].getblock(self.nodes[0].getbestblockhash())["chainlock"])
|
2019-05-06 16:58:38 +02:00
|
|
|
reconnect_isolated_node(self.nodes[0], 1)
|
2019-09-15 22:08:21 +02:00
|
|
|
self.nodes[1].generatetoaddress(1, node0_mining_addr)
|
2021-05-11 18:55:40 +02:00
|
|
|
self.wait_for_chainlocked_block_all_nodes(self.nodes[1].getbestblockhash())
|
2019-01-23 07:08:13 +01:00
|
|
|
assert(self.nodes[0].getblock(self.nodes[0].getbestblockhash())["previousblockhash"] == good_tip)
|
|
|
|
assert(self.nodes[1].getblock(self.nodes[1].getbestblockhash())["previousblockhash"] == good_tip)
|
|
|
|
|
2021-01-16 20:47:13 +01:00
|
|
|
self.log.info("The tip mined while this node was isolated should be marked conflicting now")
|
|
|
|
found = False
|
|
|
|
for tip in self.nodes[0].getchaintips(2):
|
|
|
|
if tip["hash"] == bad_tip:
|
|
|
|
assert(tip["status"] == "conflicting")
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
assert(found)
|
|
|
|
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Keep node connected and let it try to reorg the chain")
|
2019-01-23 07:08:13 +01:00
|
|
|
good_tip = self.nodes[0].getbestblockhash()
|
2021-01-16 20:47:13 +01:00
|
|
|
self.log.info("Restart it so that it forgets all the chainlock messages from the past")
|
2019-07-04 16:48:01 +02:00
|
|
|
self.stop_node(0)
|
2019-09-24 00:56:31 +02:00
|
|
|
self.start_node(0)
|
2019-01-23 07:08:13 +01:00
|
|
|
connect_nodes(self.nodes[0], 1)
|
2021-01-21 17:18:25 +01:00
|
|
|
assert(self.nodes[0].getbestblockhash() == good_tip)
|
|
|
|
self.nodes[0].invalidateblock(good_tip)
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Now try to reorg the chain")
|
2019-01-23 07:08:13 +01:00
|
|
|
self.nodes[0].generate(2)
|
2019-08-28 13:51:59 +02:00
|
|
|
time.sleep(6)
|
2019-01-23 07:08:13 +01:00
|
|
|
assert(self.nodes[1].getbestblockhash() == good_tip)
|
2021-01-21 17:18:25 +01:00
|
|
|
bad_tip = self.nodes[0].generate(2)[-1]
|
2019-08-28 13:51:59 +02:00
|
|
|
time.sleep(6)
|
2021-01-21 17:18:25 +01:00
|
|
|
assert(self.nodes[0].getbestblockhash() == bad_tip)
|
2019-01-23 07:08:13 +01:00
|
|
|
assert(self.nodes[1].getbestblockhash() == good_tip)
|
|
|
|
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Now let the node which is on the wrong chain reorg back to the locked chain")
|
2019-01-23 07:08:13 +01:00
|
|
|
self.nodes[0].reconsiderblock(good_tip)
|
|
|
|
assert(self.nodes[0].getbestblockhash() != good_tip)
|
2021-01-21 17:18:25 +01:00
|
|
|
good_fork = good_tip
|
|
|
|
good_tip = self.nodes[1].generatetoaddress(1, node0_mining_addr)[-1] # this should mark bad_tip as conflicting
|
2021-05-11 18:55:40 +02:00
|
|
|
self.wait_for_chainlocked_block_all_nodes(good_tip)
|
2021-01-21 17:18:25 +01:00
|
|
|
assert(self.nodes[0].getbestblockhash() == good_tip)
|
|
|
|
found = False
|
|
|
|
for tip in self.nodes[0].getchaintips(2):
|
|
|
|
if tip["hash"] == bad_tip:
|
|
|
|
assert(tip["status"] == "conflicting")
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
assert(found)
|
|
|
|
|
|
|
|
self.log.info("Should switch to the best non-conflicting tip (not to the most work chain) on restart")
|
|
|
|
assert(int(self.nodes[0].getblock(bad_tip)["chainwork"], 16) > int(self.nodes[1].getblock(good_tip)["chainwork"], 16))
|
|
|
|
self.stop_node(0)
|
|
|
|
self.start_node(0)
|
|
|
|
self.nodes[0].invalidateblock(good_fork)
|
|
|
|
self.stop_node(0)
|
|
|
|
self.start_node(0)
|
|
|
|
time.sleep(1)
|
|
|
|
assert(self.nodes[0].getbestblockhash() == good_tip)
|
2019-01-23 07:08:13 +01:00
|
|
|
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Isolate a node and let it create some transactions which won't get IS locked")
|
2019-05-06 16:58:38 +02:00
|
|
|
isolate_node(self.nodes[0])
|
2019-03-19 11:55:51 +01:00
|
|
|
txs = []
|
|
|
|
for i in range(3):
|
|
|
|
txs.append(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1))
|
|
|
|
txs += self.create_chained_txs(self.nodes[0], 1)
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Assert that after block generation these TXs are NOT included (as they are \"unsafe\")")
|
2021-01-21 17:18:25 +01:00
|
|
|
node0_tip = self.nodes[0].generate(1)[-1]
|
2019-03-19 11:55:51 +01:00
|
|
|
for txid in txs:
|
|
|
|
tx = self.nodes[0].getrawtransaction(txid, 1)
|
|
|
|
assert("confirmations" not in tx)
|
2019-08-28 13:51:59 +02:00
|
|
|
time.sleep(1)
|
2021-01-21 17:18:25 +01:00
|
|
|
node0_tip_block = self.nodes[0].getblock(node0_tip)
|
|
|
|
assert(not node0_tip_block["chainlock"])
|
|
|
|
assert(node0_tip_block["previousblockhash"] == good_tip)
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Disable LLMQ based InstantSend for a very short time (this never gets propagated to other nodes)")
|
2019-07-09 16:50:08 +02:00
|
|
|
self.nodes[0].spork("SPORK_2_INSTANTSEND_ENABLED", 4070908800)
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Now the TXs should be included")
|
2019-03-19 11:55:51 +01:00
|
|
|
self.nodes[0].generate(1)
|
2019-07-09 16:50:08 +02:00
|
|
|
self.nodes[0].spork("SPORK_2_INSTANTSEND_ENABLED", 0)
|
2019-10-02 15:24:57 +02:00
|
|
|
self.log.info("Assert that TXs got included now")
|
2019-03-19 11:55:51 +01:00
|
|
|
for txid in txs:
|
|
|
|
tx = self.nodes[0].getrawtransaction(txid, 1)
|
|
|
|
assert("confirmations" in tx and tx["confirmations"] > 0)
|
|
|
|
# Enable network on first node again, which will cause the blocks to propagate and IS locks to happen retroactively
|
|
|
|
# for the mined TXs, which will then allow the network to create a CLSIG
|
2021-07-17 21:15:21 +02:00
|
|
|
self.log.info("Re-enable network on first node and wait for chainlock")
|
2019-05-06 16:58:38 +02:00
|
|
|
reconnect_isolated_node(self.nodes[0], 1)
|
2019-12-06 10:05:58 +01:00
|
|
|
self.wait_for_chainlocked_block(self.nodes[0], self.nodes[0].getbestblockhash(), timeout=30)
|
2019-01-23 07:08:13 +01:00
|
|
|
|
2019-03-19 11:55:51 +01:00
|
|
|
def create_chained_txs(self, node, amount):
|
|
|
|
txid = node.sendtoaddress(node.getnewaddress(), amount)
|
|
|
|
tx = node.getrawtransaction(txid, 1)
|
|
|
|
inputs = []
|
|
|
|
valueIn = 0
|
|
|
|
for txout in tx["vout"]:
|
|
|
|
inputs.append({"txid": txid, "vout": txout["n"]})
|
|
|
|
valueIn += txout["value"]
|
|
|
|
outputs = {
|
|
|
|
node.getnewaddress(): round(float(valueIn) - 0.0001, 6)
|
|
|
|
}
|
|
|
|
|
|
|
|
rawtx = node.createrawtransaction(inputs, outputs)
|
2020-12-11 03:25:55 +01:00
|
|
|
rawtx = node.signrawtransactionwithwallet(rawtx)
|
2019-03-19 11:55:51 +01:00
|
|
|
rawtxid = node.sendrawtransaction(rawtx["hex"])
|
|
|
|
|
|
|
|
return [txid, rawtxid]
|
|
|
|
|
2019-01-23 07:08:13 +01:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
LLMQChainLocksTest().main()
|