2016-05-06 11:23:48 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# Copyright (c) 2014-2016 The Bitcoin Core developers
|
2015-07-15 20:47:45 +02:00
|
|
|
# Distributed under the MIT software license, see the accompanying
|
|
|
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
2019-01-07 10:55:35 +01:00
|
|
|
"""Test descendant package tracking code."""
|
2015-07-15 20:47:45 +02:00
|
|
|
|
Merge #13054: tests: Enable automatic detection of undefined names in Python tests scripts. Remove wildcard imports.
68400d8b96 tests: Use explicit imports (practicalswift)
Pull request description:
Enable automatic detection of undefined names in Python tests scripts. Remove wildcard imports.
Wildcard imports make it unclear which names are present in the namespace, confusing both readers and many automated tools.
An additional benefit of not using wildcard imports in tests scripts is that readers of a test script then can infer the rough testing scope just by looking at the imports.
Before this commit:
```
$ contrib/devtools/lint-python.sh | head -10
./test/functional/feature_rbf.py:8:1: F403 'from test_framework.util import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:9:1: F403 'from test_framework.script import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:10:1: F403 'from test_framework.mininode import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:15:12: F405 bytes_to_hex_str may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:17:58: F405 CScript may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:25:13: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:26:31: F405 satoshi_round may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:26:60: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:30:41: F405 satoshi_round may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:30:68: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
$
```
After this commit:
```
$ contrib/devtools/lint-python.sh | head -10
$
```
Tree-SHA512: 3f826d39cffb6438388e5efcb20a9622ff8238247e882d68f7b38609877421b2a8e10e9229575f8eb6a8fa42dec4256986692e92922c86171f750a0e887438d9
2018-08-13 14:24:43 +02:00
|
|
|
from decimal import Decimal
|
|
|
|
|
|
|
|
from test_framework.messages import COIN
|
2015-07-15 20:47:45 +02:00
|
|
|
from test_framework.test_framework import BitcoinTestFramework
|
Merge #13054: tests: Enable automatic detection of undefined names in Python tests scripts. Remove wildcard imports.
68400d8b96 tests: Use explicit imports (practicalswift)
Pull request description:
Enable automatic detection of undefined names in Python tests scripts. Remove wildcard imports.
Wildcard imports make it unclear which names are present in the namespace, confusing both readers and many automated tools.
An additional benefit of not using wildcard imports in tests scripts is that readers of a test script then can infer the rough testing scope just by looking at the imports.
Before this commit:
```
$ contrib/devtools/lint-python.sh | head -10
./test/functional/feature_rbf.py:8:1: F403 'from test_framework.util import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:9:1: F403 'from test_framework.script import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:10:1: F403 'from test_framework.mininode import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:15:12: F405 bytes_to_hex_str may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:17:58: F405 CScript may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:25:13: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:26:31: F405 satoshi_round may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:26:60: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:30:41: F405 satoshi_round may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:30:68: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
$
```
After this commit:
```
$ contrib/devtools/lint-python.sh | head -10
$
```
Tree-SHA512: 3f826d39cffb6438388e5efcb20a9622ff8238247e882d68f7b38609877421b2a8e10e9229575f8eb6a8fa42dec4256986692e92922c86171f750a0e887438d9
2018-08-13 14:24:43 +02:00
|
|
|
from test_framework.util import assert_equal, assert_raises_rpc_error, satoshi_round
|
2015-07-15 20:47:45 +02:00
|
|
|
|
2019-11-12 20:53:31 +01:00
|
|
|
# default limits
|
2015-10-06 04:16:15 +02:00
|
|
|
MAX_ANCESTORS = 25
|
|
|
|
MAX_DESCENDANTS = 25
|
2019-11-12 20:53:31 +01:00
|
|
|
# custom limits for node1
|
|
|
|
MAX_ANCESTORS_CUSTOM = 5
|
2015-10-06 04:16:15 +02:00
|
|
|
|
2015-07-15 20:47:45 +02:00
|
|
|
class MempoolPackagesTest(BitcoinTestFramework):
|
2017-09-01 18:47:13 +02:00
|
|
|
def set_test_params(self):
|
2016-05-20 15:16:51 +02:00
|
|
|
self.num_nodes = 2
|
2019-11-12 20:53:31 +01:00
|
|
|
self.extra_args = [
|
|
|
|
["-maxorphantxsize=1000"],
|
|
|
|
["-maxorphantxsize=1000", "-limitancestorcount={}".format(MAX_ANCESTORS_CUSTOM)],
|
|
|
|
]
|
2015-07-15 20:47:45 +02:00
|
|
|
|
2018-09-13 12:33:15 +02:00
|
|
|
def skip_test_if_missing_module(self):
|
|
|
|
self.skip_if_no_wallet()
|
|
|
|
|
2015-07-15 20:47:45 +02:00
|
|
|
# Build a transaction that spends parent_txid:vout
|
|
|
|
# Return amount sent
|
2015-09-23 17:46:36 +02:00
|
|
|
def chain_transaction(self, node, parent_txid, vout, value, fee, num_outputs):
|
2015-07-15 20:47:45 +02:00
|
|
|
send_value = satoshi_round((value - fee)/num_outputs)
|
|
|
|
inputs = [ {'txid' : parent_txid, 'vout' : vout} ]
|
|
|
|
outputs = {}
|
2016-05-06 11:23:48 +02:00
|
|
|
for i in range(num_outputs):
|
2015-09-23 17:46:36 +02:00
|
|
|
outputs[node.getnewaddress()] = send_value
|
|
|
|
rawtx = node.createrawtransaction(inputs, outputs)
|
2018-02-20 03:29:22 +01:00
|
|
|
signedtx = node.signrawtransactionwithwallet(rawtx)
|
2015-09-23 17:46:36 +02:00
|
|
|
txid = node.sendrawtransaction(signedtx['hex'])
|
|
|
|
fulltx = node.getrawtransaction(txid, 1)
|
2021-08-27 21:03:02 +02:00
|
|
|
assert len(fulltx['vout']) == num_outputs # make sure we didn't generate a change output
|
2015-07-15 20:47:45 +02:00
|
|
|
return (txid, send_value)
|
|
|
|
|
|
|
|
def run_test(self):
|
2018-09-10 22:58:15 +02:00
|
|
|
# Mine some blocks and have them mature.
|
2015-07-15 20:47:45 +02:00
|
|
|
self.nodes[0].generate(101)
|
|
|
|
utxo = self.nodes[0].listunspent(10)
|
|
|
|
txid = utxo[0]['txid']
|
|
|
|
vout = utxo[0]['vout']
|
|
|
|
value = utxo[0]['amount']
|
|
|
|
|
|
|
|
fee = Decimal("0.0001")
|
2015-10-06 04:16:15 +02:00
|
|
|
# MAX_ANCESTORS transactions off a confirmed tx should be fine
|
2015-07-15 20:47:45 +02:00
|
|
|
chain = []
|
2016-05-06 11:23:48 +02:00
|
|
|
for i in range(MAX_ANCESTORS):
|
2015-09-23 17:46:36 +02:00
|
|
|
(txid, sent_value) = self.chain_transaction(self.nodes[0], txid, 0, value, fee, 1)
|
2015-07-15 20:47:45 +02:00
|
|
|
value = sent_value
|
|
|
|
chain.append(txid)
|
|
|
|
|
2018-03-06 22:14:44 +01:00
|
|
|
# Check mempool has MAX_ANCESTORS transactions in it, and descendant and ancestor
|
2015-07-15 20:47:45 +02:00
|
|
|
# count and fees should look correct
|
|
|
|
mempool = self.nodes[0].getrawmempool(True)
|
2015-10-06 04:16:15 +02:00
|
|
|
assert_equal(len(mempool), MAX_ANCESTORS)
|
2015-07-15 20:47:45 +02:00
|
|
|
descendant_count = 1
|
|
|
|
descendant_fees = 0
|
2019-03-26 17:56:08 +01:00
|
|
|
descendant_vsize = 0
|
2015-07-15 20:47:45 +02:00
|
|
|
|
2019-03-26 17:56:08 +01:00
|
|
|
ancestor_vsize = sum([mempool[tx]['vsize'] for tx in mempool])
|
2018-03-06 22:14:44 +01:00
|
|
|
ancestor_count = MAX_ANCESTORS
|
|
|
|
ancestor_fees = sum([mempool[tx]['fee'] for tx in mempool])
|
|
|
|
|
2016-06-09 16:04:34 +02:00
|
|
|
descendants = []
|
|
|
|
ancestors = list(chain)
|
2015-07-15 20:47:45 +02:00
|
|
|
for x in reversed(chain):
|
2016-06-09 16:04:34 +02:00
|
|
|
# Check that getmempoolentry is consistent with getrawmempool
|
|
|
|
entry = self.nodes[0].getmempoolentry(x)
|
|
|
|
assert_equal(entry, mempool[x])
|
|
|
|
|
|
|
|
# Check that the descendant calculations are correct
|
2015-07-15 20:47:45 +02:00
|
|
|
assert_equal(mempool[x]['descendantcount'], descendant_count)
|
|
|
|
descendant_fees += mempool[x]['fee']
|
2015-11-19 17:18:28 +01:00
|
|
|
assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee'])
|
2018-04-26 12:34:31 +02:00
|
|
|
assert_equal(mempool[x]['fees']['base'], mempool[x]['fee'])
|
|
|
|
assert_equal(mempool[x]['fees']['modified'], mempool[x]['modifiedfee'])
|
2016-01-19 17:47:55 +01:00
|
|
|
assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN)
|
2018-04-26 12:34:31 +02:00
|
|
|
assert_equal(mempool[x]['fees']['descendant'], descendant_fees)
|
2019-03-26 17:56:08 +01:00
|
|
|
descendant_vsize += mempool[x]['vsize']
|
|
|
|
assert_equal(mempool[x]['descendantsize'], descendant_vsize)
|
2015-07-15 20:47:45 +02:00
|
|
|
descendant_count += 1
|
|
|
|
|
2018-03-06 22:14:44 +01:00
|
|
|
# Check that ancestor calculations are correct
|
|
|
|
assert_equal(mempool[x]['ancestorcount'], ancestor_count)
|
|
|
|
assert_equal(mempool[x]['ancestorfees'], ancestor_fees * COIN)
|
2019-03-26 17:56:08 +01:00
|
|
|
assert_equal(mempool[x]['ancestorsize'], ancestor_vsize)
|
|
|
|
ancestor_vsize -= mempool[x]['vsize']
|
2018-03-06 22:14:44 +01:00
|
|
|
ancestor_fees -= mempool[x]['fee']
|
|
|
|
ancestor_count -= 1
|
|
|
|
|
|
|
|
# Check that parent/child list is correct
|
|
|
|
assert_equal(mempool[x]['spentby'], descendants[-1:])
|
|
|
|
assert_equal(mempool[x]['depends'], ancestors[-2:-1])
|
|
|
|
|
2016-06-09 16:04:34 +02:00
|
|
|
# Check that getmempooldescendants is correct
|
|
|
|
assert_equal(sorted(descendants), sorted(self.nodes[0].getmempooldescendants(x)))
|
2018-03-06 22:14:44 +01:00
|
|
|
|
|
|
|
# Check getmempooldescendants verbose output is correct
|
|
|
|
for descendant, dinfo in self.nodes[0].getmempooldescendants(x, True).items():
|
|
|
|
assert_equal(dinfo['depends'], [chain[chain.index(descendant)-1]])
|
|
|
|
if dinfo['descendantcount'] > 1:
|
|
|
|
assert_equal(dinfo['spentby'], [chain[chain.index(descendant)+1]])
|
|
|
|
else:
|
|
|
|
assert_equal(dinfo['spentby'], [])
|
2016-06-09 16:04:34 +02:00
|
|
|
descendants.append(x)
|
|
|
|
|
|
|
|
# Check that getmempoolancestors is correct
|
|
|
|
ancestors.remove(x)
|
|
|
|
assert_equal(sorted(ancestors), sorted(self.nodes[0].getmempoolancestors(x)))
|
|
|
|
|
2018-03-06 22:14:44 +01:00
|
|
|
# Check that getmempoolancestors verbose output is correct
|
|
|
|
for ancestor, ainfo in self.nodes[0].getmempoolancestors(x, True).items():
|
|
|
|
assert_equal(ainfo['spentby'], [chain[chain.index(ancestor)+1]])
|
|
|
|
if ainfo['ancestorcount'] > 1:
|
|
|
|
assert_equal(ainfo['depends'], [chain[chain.index(ancestor)-1]])
|
|
|
|
else:
|
|
|
|
assert_equal(ainfo['depends'], [])
|
|
|
|
|
|
|
|
|
2016-06-09 16:04:34 +02:00
|
|
|
# Check that getmempoolancestors/getmempooldescendants correctly handle verbose=true
|
|
|
|
v_ancestors = self.nodes[0].getmempoolancestors(chain[-1], True)
|
|
|
|
assert_equal(len(v_ancestors), len(chain)-1)
|
|
|
|
for x in v_ancestors.keys():
|
|
|
|
assert_equal(mempool[x], v_ancestors[x])
|
2021-08-27 21:03:02 +02:00
|
|
|
assert chain[-1] not in v_ancestors.keys()
|
2016-06-09 16:04:34 +02:00
|
|
|
|
|
|
|
v_descendants = self.nodes[0].getmempooldescendants(chain[0], True)
|
|
|
|
assert_equal(len(v_descendants), len(chain)-1)
|
|
|
|
for x in v_descendants.keys():
|
|
|
|
assert_equal(mempool[x], v_descendants[x])
|
2021-08-27 21:03:02 +02:00
|
|
|
assert chain[0] not in v_descendants.keys()
|
2016-06-09 16:04:34 +02:00
|
|
|
|
2017-04-05 08:36:34 +02:00
|
|
|
# Check that ancestor modified fees includes fee deltas from
|
|
|
|
# prioritisetransaction
|
2019-03-14 15:44:42 +01:00
|
|
|
self.nodes[0].prioritisetransaction(chain[0], 1000)
|
2017-04-05 08:36:34 +02:00
|
|
|
mempool = self.nodes[0].getrawmempool(True)
|
|
|
|
ancestor_fees = 0
|
|
|
|
for x in chain:
|
|
|
|
ancestor_fees += mempool[x]['fee']
|
2018-04-26 12:34:31 +02:00
|
|
|
assert_equal(mempool[x]['fees']['ancestor'], ancestor_fees + Decimal('0.00001'))
|
2017-04-05 08:36:34 +02:00
|
|
|
assert_equal(mempool[x]['ancestorfees'], ancestor_fees * COIN + 1000)
|
2018-03-06 22:14:44 +01:00
|
|
|
|
2017-04-05 08:36:34 +02:00
|
|
|
# Undo the prioritisetransaction for later tests
|
2019-03-14 15:44:42 +01:00
|
|
|
self.nodes[0].prioritisetransaction(chain[0], -1000)
|
2017-04-05 08:36:34 +02:00
|
|
|
|
2015-11-19 17:18:28 +01:00
|
|
|
# Check that descendant modified fees includes fee deltas from
|
|
|
|
# prioritisetransaction
|
2019-03-14 15:44:42 +01:00
|
|
|
self.nodes[0].prioritisetransaction(chain[-1], 1000)
|
2015-11-19 17:18:28 +01:00
|
|
|
mempool = self.nodes[0].getrawmempool(True)
|
|
|
|
|
|
|
|
descendant_fees = 0
|
|
|
|
for x in reversed(chain):
|
|
|
|
descendant_fees += mempool[x]['fee']
|
2018-04-26 12:34:31 +02:00
|
|
|
assert_equal(mempool[x]['fees']['descendant'], descendant_fees + Decimal('0.00001'))
|
2016-01-19 17:47:55 +01:00
|
|
|
assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 1000)
|
2015-11-19 17:18:28 +01:00
|
|
|
|
2015-07-15 20:47:45 +02:00
|
|
|
# Adding one more transaction on to the chain should fail.
|
2019-09-25 11:34:51 +02:00
|
|
|
assert_raises_rpc_error(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], txid, vout, value, fee, 1)
|
2015-07-15 20:47:45 +02:00
|
|
|
|
2015-11-19 17:18:28 +01:00
|
|
|
# Check that prioritising a tx before it's added to the mempool works
|
2016-01-15 02:35:21 +01:00
|
|
|
# First clear the mempool by mining a block.
|
2015-11-19 17:18:28 +01:00
|
|
|
self.nodes[0].generate(1)
|
2020-04-14 12:00:16 +02:00
|
|
|
self.sync_blocks()
|
2016-01-15 02:35:21 +01:00
|
|
|
assert_equal(len(self.nodes[0].getrawmempool()), 0)
|
|
|
|
# Prioritise a transaction that has been mined, then add it back to the
|
|
|
|
# mempool by using invalidateblock.
|
2019-03-14 15:44:42 +01:00
|
|
|
self.nodes[0].prioritisetransaction(chain[-1], 2000)
|
2015-11-19 17:18:28 +01:00
|
|
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
2016-01-15 02:35:21 +01:00
|
|
|
# Keep node1's tip synced with node0
|
|
|
|
self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash())
|
|
|
|
|
|
|
|
# Now check that the transaction is in the mempool, with the right modified fee
|
2015-11-19 17:18:28 +01:00
|
|
|
mempool = self.nodes[0].getrawmempool(True)
|
|
|
|
|
|
|
|
descendant_fees = 0
|
|
|
|
for x in reversed(chain):
|
|
|
|
descendant_fees += mempool[x]['fee']
|
|
|
|
if (x == chain[-1]):
|
|
|
|
assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee']+satoshi_round(0.00002))
|
2018-04-26 12:34:31 +02:00
|
|
|
assert_equal(mempool[x]['fees']['modified'], mempool[x]['fee']+satoshi_round(0.00002))
|
2016-01-19 17:47:55 +01:00
|
|
|
assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 2000)
|
2018-04-26 12:34:31 +02:00
|
|
|
assert_equal(mempool[x]['fees']['descendant'], descendant_fees+satoshi_round(0.00002))
|
2015-11-19 17:18:28 +01:00
|
|
|
|
2019-11-12 20:53:31 +01:00
|
|
|
# Check that node1's mempool is as expected (-> custom ancestor limit)
|
|
|
|
mempool0 = self.nodes[0].getrawmempool(False)
|
|
|
|
mempool1 = self.nodes[1].getrawmempool(False)
|
|
|
|
assert_equal(len(mempool1), MAX_ANCESTORS_CUSTOM)
|
|
|
|
assert set(mempool1).issubset(set(mempool0))
|
|
|
|
for tx in chain[:MAX_ANCESTORS_CUSTOM]:
|
|
|
|
assert tx in mempool1
|
|
|
|
# TODO: more detailed check of node1's mempool (fees etc.)
|
2015-09-23 17:46:36 +02:00
|
|
|
|
2015-07-15 20:47:45 +02:00
|
|
|
# TODO: test ancestor size limits
|
|
|
|
|
|
|
|
# Now test descendant chain limits
|
|
|
|
txid = utxo[1]['txid']
|
|
|
|
value = utxo[1]['amount']
|
|
|
|
vout = utxo[1]['vout']
|
|
|
|
|
|
|
|
transaction_package = []
|
2018-03-06 22:14:44 +01:00
|
|
|
tx_children = []
|
2015-07-15 20:47:45 +02:00
|
|
|
# First create one parent tx with 10 children
|
2015-09-23 17:46:36 +02:00
|
|
|
(txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 10)
|
2015-07-15 20:47:45 +02:00
|
|
|
parent_transaction = txid
|
2016-05-06 11:23:48 +02:00
|
|
|
for i in range(10):
|
2015-07-15 20:47:45 +02:00
|
|
|
transaction_package.append({'txid': txid, 'vout': i, 'amount': sent_value})
|
|
|
|
|
2017-03-16 11:57:09 +01:00
|
|
|
# Sign and send up to MAX_DESCENDANT transactions chained off the parent tx
|
|
|
|
for i in range(MAX_DESCENDANTS - 1):
|
2015-07-15 20:47:45 +02:00
|
|
|
utxo = transaction_package.pop(0)
|
2017-03-16 11:57:09 +01:00
|
|
|
(txid, sent_value) = self.chain_transaction(self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10)
|
2018-03-06 22:14:44 +01:00
|
|
|
if utxo['txid'] is parent_transaction:
|
|
|
|
tx_children.append(txid)
|
2017-03-16 11:57:09 +01:00
|
|
|
for j in range(10):
|
|
|
|
transaction_package.append({'txid': txid, 'vout': j, 'amount': sent_value})
|
|
|
|
|
|
|
|
mempool = self.nodes[0].getrawmempool(True)
|
|
|
|
assert_equal(mempool[parent_transaction]['descendantcount'], MAX_DESCENDANTS)
|
2018-03-06 22:14:44 +01:00
|
|
|
assert_equal(sorted(mempool[parent_transaction]['spentby']), sorted(tx_children))
|
|
|
|
|
|
|
|
for child in tx_children:
|
|
|
|
assert_equal(mempool[child]['depends'], [parent_transaction])
|
2017-03-16 11:57:09 +01:00
|
|
|
|
|
|
|
# Sending one more chained transaction will fail
|
|
|
|
utxo = transaction_package.pop(0)
|
2019-09-25 11:34:51 +02:00
|
|
|
assert_raises_rpc_error(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10)
|
2015-07-15 20:47:45 +02:00
|
|
|
|
2015-09-23 17:46:36 +02:00
|
|
|
# TODO: check that node1's mempool is as expected
|
|
|
|
|
2015-07-15 20:47:45 +02:00
|
|
|
# TODO: test descendant size limits
|
|
|
|
|
2015-09-23 17:46:36 +02:00
|
|
|
# Test reorg handling
|
|
|
|
# First, the basics:
|
|
|
|
self.nodes[0].generate(1)
|
2020-04-14 12:00:16 +02:00
|
|
|
self.sync_blocks()
|
2015-09-23 17:46:36 +02:00
|
|
|
self.nodes[1].invalidateblock(self.nodes[0].getbestblockhash())
|
|
|
|
self.nodes[1].reconsiderblock(self.nodes[0].getbestblockhash())
|
|
|
|
|
|
|
|
# Now test the case where node1 has a transaction T in its mempool that
|
|
|
|
# depends on transactions A and B which are in a mined block, and the
|
|
|
|
# block containing A and B is disconnected, AND B is not accepted back
|
|
|
|
# into node1's mempool because its ancestor count is too high.
|
|
|
|
|
|
|
|
# Create 8 transactions, like so:
|
|
|
|
# Tx0 -> Tx1 (vout0)
|
|
|
|
# \--> Tx2 (vout1) -> Tx3 -> Tx4 -> Tx5 -> Tx6 -> Tx7
|
|
|
|
#
|
|
|
|
# Mine them in the next block, then generate a new tx8 that spends
|
|
|
|
# Tx1 and Tx7, and add to node1's mempool, then disconnect the
|
|
|
|
# last block.
|
|
|
|
|
|
|
|
# Create tx0 with 2 outputs
|
|
|
|
utxo = self.nodes[0].listunspent()
|
|
|
|
txid = utxo[0]['txid']
|
|
|
|
value = utxo[0]['amount']
|
|
|
|
vout = utxo[0]['vout']
|
|
|
|
|
|
|
|
send_value = satoshi_round((value - fee)/2)
|
|
|
|
inputs = [ {'txid' : txid, 'vout' : vout} ]
|
|
|
|
outputs = {}
|
2016-05-06 11:23:48 +02:00
|
|
|
for i in range(2):
|
2015-09-23 17:46:36 +02:00
|
|
|
outputs[self.nodes[0].getnewaddress()] = send_value
|
|
|
|
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
|
2018-02-20 03:29:22 +01:00
|
|
|
signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx)
|
2015-09-23 17:46:36 +02:00
|
|
|
txid = self.nodes[0].sendrawtransaction(signedtx['hex'])
|
|
|
|
tx0_id = txid
|
|
|
|
value = send_value
|
|
|
|
|
|
|
|
# Create tx1
|
2017-08-28 22:53:34 +02:00
|
|
|
tx1_id, _ = self.chain_transaction(self.nodes[0], tx0_id, 0, value, fee, 1)
|
2015-09-23 17:46:36 +02:00
|
|
|
|
|
|
|
# Create tx2-7
|
|
|
|
vout = 1
|
|
|
|
txid = tx0_id
|
2016-05-06 11:23:48 +02:00
|
|
|
for i in range(6):
|
2015-09-23 17:46:36 +02:00
|
|
|
(txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 1)
|
|
|
|
vout = 0
|
|
|
|
value = sent_value
|
|
|
|
|
|
|
|
# Mine these in a block
|
|
|
|
self.nodes[0].generate(1)
|
|
|
|
self.sync_all()
|
|
|
|
|
|
|
|
# Now generate tx8, with a big fee
|
|
|
|
inputs = [ {'txid' : tx1_id, 'vout': 0}, {'txid' : txid, 'vout': 0} ]
|
|
|
|
outputs = { self.nodes[0].getnewaddress() : send_value + value - 4*fee }
|
|
|
|
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
|
2018-02-20 03:29:22 +01:00
|
|
|
signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx)
|
2015-09-23 17:46:36 +02:00
|
|
|
txid = self.nodes[0].sendrawtransaction(signedtx['hex'])
|
2020-04-14 12:00:16 +02:00
|
|
|
self.sync_mempools()
|
2018-03-06 22:14:44 +01:00
|
|
|
|
2015-09-23 17:46:36 +02:00
|
|
|
# Now try to disconnect the tip on each node...
|
|
|
|
self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash())
|
|
|
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
2020-04-14 12:00:16 +02:00
|
|
|
self.sync_blocks()
|
2015-09-23 17:46:36 +02:00
|
|
|
|
2015-07-15 20:47:45 +02:00
|
|
|
if __name__ == '__main__':
|
|
|
|
MempoolPackagesTest().main()
|