merge bitcoin#22788: Use generate* from TestFramework

This commit is contained in:
Kittywhiskers Van Gogh 2024-10-01 19:25:52 +00:00
parent c17fd8bc59
commit 7d3c3b4b64
No known key found for this signature in database
GPG Key ID: 30CD0C065E5C4AAD
143 changed files with 590 additions and 590 deletions

View File

@ -148,7 +148,7 @@ class ExampleTest(BitcoinTestFramework):
peer_messaging = self.nodes[0].add_p2p_connection(BaseNode())
# Generating a block on one of the nodes will get us out of IBD
blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)]
blocks = [int(self.generate(self.nodes[0], nblocks=1)[0], 16)]
self.sync_all(self.nodes[0:2])
# Notice above how we called an RPC by calling a method with the same

View File

@ -26,7 +26,7 @@ class AbortNodeTest(BitcoinTestFramework):
# We'll connect the nodes later
def run_test(self):
self.nodes[0].generate(3)
self.generate(self.nodes[0], 3)
datadir = get_datadir_path(self.options.tmpdir, 0)
# Deleting the undo file will result in reorg failure
@ -34,10 +34,10 @@ class AbortNodeTest(BitcoinTestFramework):
# Connecting to a node with a more work chain will trigger a reorg
# attempt.
self.nodes[1].generate(3)
self.generate(self.nodes[1], 3)
with self.nodes[0].assert_debug_log(["Failed to disconnect block"]):
self.connect_nodes(0, 1)
self.nodes[1].generate(1)
self.generate(self.nodes[1], 1)
# Check that node0 aborted
self.log.info("Waiting for crash")

View File

@ -52,7 +52,7 @@ class AddressIndexTest(BitcoinTestFramework):
self.log.info("Mining blocks...")
mining_address = self.nodes[0].getnewaddress()
self.nodes[0].generatetoaddress(105, mining_address)
self.generatetoaddress(self.nodes[0], 105, mining_address)
self.sync_all()
chain_height = self.nodes[1].getblockcount()
@ -72,22 +72,22 @@ class AddressIndexTest(BitcoinTestFramework):
self.log.info("Testing p2pkh and p2sh address index...")
txid0 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 10)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
txidb0 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 10)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
txid1 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 15)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
txidb1 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 15)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
txid2 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 20)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
txidb2 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 20)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
@ -141,7 +141,7 @@ class AddressIndexTest(BitcoinTestFramework):
signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex())
sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
txidsmany = self.nodes[1].getaddresstxids("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB")
@ -170,7 +170,7 @@ class AddressIndexTest(BitcoinTestFramework):
tx.rehash()
signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex())
spending_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
balance1 = self.nodes[1].getaddressbalance(address2)
assert_equal(balance1["balance"], amount)
@ -184,7 +184,7 @@ class AddressIndexTest(BitcoinTestFramework):
signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex())
sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
balance2 = self.nodes[1].getaddressbalance(address2)
@ -233,12 +233,12 @@ class AddressIndexTest(BitcoinTestFramework):
assert_equal(utxos2[0]["satoshis"], amount)
# Check sorting of utxos
self.nodes[2].generate(150)
self.generate(self.nodes[2], 150)
self.nodes[2].sendtoaddress(address2, 50)
self.nodes[2].generate(1)
self.generate(self.nodes[2], 1)
self.nodes[2].sendtoaddress(address2, 50)
self.nodes[2].generate(1)
self.generate(self.nodes[2], 1)
self.sync_all()
utxos3 = self.nodes[1].getaddressutxos({"addresses": [address2]})
@ -291,7 +291,7 @@ class AddressIndexTest(BitcoinTestFramework):
assert_equal(mempool[2]["txid"], memtxid2)
assert_equal(mempool[2]["index"], 1)
self.nodes[2].generate(1)
self.generate(self.nodes[2], 1)
self.sync_all()
mempool2 = self.nodes[2].getaddressmempool({"addresses": [address3]})
assert_equal(len(mempool2), 0)
@ -322,7 +322,7 @@ class AddressIndexTest(BitcoinTestFramework):
address1script = CScript([OP_DUP, OP_HASH160, address1hash, OP_EQUALVERIFY, OP_CHECKSIG])
self.nodes[0].sendtoaddress(address1, 10)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
utxos = self.nodes[1].getaddressutxos({"addresses": [address1]})

View File

@ -230,7 +230,7 @@ class AssetLocksTest(DashTestFramework):
batch = min(50, count)
count -= batch
self.bump_mocktime(batch)
self.nodes[1].generate(batch)
self.generate(self.nodes[1], batch)
self.sync_all()
# This functional test intentionally setup only 2 MN and only 2 Evo nodes
@ -256,11 +256,11 @@ class AssetLocksTest(DashTestFramework):
for _ in range(2):
self.dynamically_add_masternode(evo=True)
node.generate(8)
self.generate(node, 8)
self.sync_blocks()
self.set_sporks()
node.generate(1)
self.generate(node, 1)
self.sync_all()
self.mempool_size = 0
@ -295,10 +295,10 @@ class AssetLocksTest(DashTestFramework):
assert_equal(rpc_tx["assetLockTx"]["creditOutputs"][0]["scriptPubKey"]["hex"], key_to_p2pkh_script(pubkey).hex())
assert_equal(rpc_tx["assetLockTx"]["creditOutputs"][1]["scriptPubKey"]["hex"], key_to_p2pkh_script(pubkey).hex())
self.validate_credit_pool_balance(0)
node.generate(1)
self.generate(node, 1)
assert_equal(self.get_credit_pool_balance(node=node), locked_1)
self.log.info("Generate a number of blocks to ensure this is the longest chain for later in the test when we reconsiderblock")
node.generate(12)
self.generate(node, 12)
self.sync_all()
self.validate_credit_pool_balance(locked_1)
@ -309,14 +309,14 @@ class AssetLocksTest(DashTestFramework):
for inode in self.nodes:
inode.invalidateblock(self.block_hash_1)
assert_equal(self.get_credit_pool_balance(node=inode), 0)
node.generate(3)
self.generate(node, 3)
self.sync_all()
self.validate_credit_pool_balance(0)
self.log.info("Resubmit asset lock tx to new chain...")
# NEW tx appears
asset_lock_tx_2 = self.create_assetlock(coin, locked_2, pubkey)
txid_in_block = self.send_tx(asset_lock_tx_2)
node.generate(1)
self.generate(node, 1)
self.sync_all()
self.validate_credit_pool_balance(locked_2)
self.log.info("Reconsider old blocks...")
@ -401,7 +401,7 @@ class AssetLocksTest(DashTestFramework):
self.mempool_size += 2
self.check_mempool_size()
self.validate_credit_pool_balance(locked)
node.generate(1)
self.generate(node, 1)
self.sync_all()
assert_equal(rawtx["instantlock"], False)
assert_equal(rawtx["chainlock"], False)
@ -424,14 +424,14 @@ class AssetLocksTest(DashTestFramework):
self.log.info("Checking credit pool amount still is same...")
self.validate_credit_pool_balance(locked - 1 * COIN)
self.send_tx(asset_unlock_tx_late)
node.generate(1)
self.generate(node, 1)
self.sync_all()
self.validate_credit_pool_balance(locked - 2 * COIN)
self.log.info("Generating many blocks to make quorum far behind (even still active)...")
self.generate_batch(too_late_height - node.getblockcount() - 1)
self.check_mempool_result(tx=asset_unlock_tx_too_late, result_expected={'allowed': True, 'fees': {'base': Decimal(str(tiny_amount / COIN))}})
node.generate(1)
self.generate(node, 1)
self.sync_all()
self.check_mempool_result(tx=asset_unlock_tx_too_late,
result_expected={'allowed': False, 'reject-reason' : 'bad-assetunlock-too-late'})
@ -456,7 +456,7 @@ class AssetLocksTest(DashTestFramework):
self.log.info("Forcibly mining asset_unlock_tx_too_late and ensure block is invalid")
self.create_and_check_block([asset_unlock_tx_too_late], expected_error = "bad-assetunlock-not-active-quorum")
node.generate(1)
self.generate(node, 1)
self.sync_all()
self.validate_credit_pool_balance(locked - 2 * COIN)
@ -476,7 +476,7 @@ class AssetLocksTest(DashTestFramework):
self.check_mempool_result(tx=asset_unlock_tx_full, result_expected={'allowed': True, 'fees': {'base': Decimal(str(tiny_amount / COIN))}})
txid_in_block = self.send_tx(asset_unlock_tx_full)
node.generate(1)
self.generate(node, 1)
self.sync_all()
self.ensure_tx_is_not_mined(txid_in_block)
@ -490,7 +490,7 @@ class AssetLocksTest(DashTestFramework):
txid_in_block = self.send_tx(asset_unlock_tx_full)
expected_balance = (Decimal(self.get_credit_pool_balance()) - Decimal(tiny_amount))
node.generate(1)
self.generate(node, 1)
self.sync_all()
self.log.info("Check txid_in_block was mined")
block = node.getblock(node.getbestblockhash())
@ -508,7 +508,7 @@ class AssetLocksTest(DashTestFramework):
self.check_mempool_result(tx=spend_withdrawal, result_expected={'allowed': True, 'fees': {'base': Decimal(str(tiny_amount / COIN))}})
spend_txid_in_block = self.send_tx(spend_withdrawal)
node.generate(1)
self.generate(node, 1)
block = node.getblock(node.getbestblockhash())
assert spend_txid_in_block in block['tx']
@ -528,7 +528,7 @@ class AssetLocksTest(DashTestFramework):
self.send_tx_simple(tx)
self.log.info(f"Collecting coins in pool... Collected {total}/{10_901 * COIN}")
self.sync_mempools()
node.generate(1)
self.generate(node, 1)
self.sync_all()
credit_pool_balance_1 = self.get_credit_pool_balance()
assert_greater_than(credit_pool_balance_1, 10_901 * COIN)
@ -548,7 +548,7 @@ class AssetLocksTest(DashTestFramework):
node.prioritisetransaction(last_txid, next_amount // 10000)
self.sync_mempools()
node.generate(1)
self.generate(node, 1)
self.sync_all()
new_total = self.get_credit_pool_balance()
@ -561,7 +561,7 @@ class AssetLocksTest(DashTestFramework):
assert_greater_than_or_equal(limit_amount_1, amount_actually_withdrawn)
assert_equal(amount_actually_withdrawn, 900 * COIN + 10001)
node.generate(1)
self.generate(node, 1)
self.sync_all()
self.log.info("Checking that exactly 1 tx stayed in mempool...")
self.mempool_size = 1
@ -575,7 +575,7 @@ class AssetLocksTest(DashTestFramework):
asset_unlock_tx = self.create_assetunlock(index, amount_to_withdraw_2, pubkey)
self.send_tx_simple(asset_unlock_tx)
self.sync_mempools()
node.generate(1)
self.generate(node, 1)
self.sync_all()
new_total = self.get_credit_pool_balance()
amount_actually_withdrawn = total - new_total
@ -599,10 +599,10 @@ class AssetLocksTest(DashTestFramework):
index += 1
asset_unlock_tx = self.create_assetunlock(index, limit_amount_2, pubkey)
self.send_tx(asset_unlock_tx)
node.generate(1)
self.generate(node, 1)
self.sync_all()
assert_equal(new_total, self.get_credit_pool_balance())
node.generate(1)
self.generate(node, 1)
self.sync_all()
new_total -= limit_amount_2
assert_equal(new_total, self.get_credit_pool_balance())
@ -610,7 +610,7 @@ class AssetLocksTest(DashTestFramework):
index += 1
asset_unlock_tx = self.create_assetunlock(index, COIN, pubkey)
self.send_tx(asset_unlock_tx)
node.generate(1)
self.generate(node, 1)
self.sync_all()
tip = self.nodes[0].getblockcount()
@ -644,7 +644,7 @@ class AssetLocksTest(DashTestFramework):
assert_equal(platform_reward, all_mn_rewards * 375 // 1000) # 0.375 platform share
assert_equal(platform_reward, 34371430)
assert_equal(locked, self.get_credit_pool_balance())
node.generate(1)
self.generate(node, 1)
self.sync_all()
locked += platform_reward
assert_equal(locked, self.get_credit_pool_balance())
@ -653,7 +653,7 @@ class AssetLocksTest(DashTestFramework):
coin = coins.pop()
self.send_tx(self.create_assetlock(coin, COIN, pubkey))
locked += platform_reward + COIN
node.generate(1)
self.generate(node, 1)
self.sync_all()
assert_equal(locked, self.get_credit_pool_balance())

View File

@ -63,7 +63,7 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
self.import_deterministic_coinbase_privkeys()
def run_test(self):
self.nodes[0].generatetoaddress(101, self.nodes[0].getnewaddress())
self.generatetoaddress(self.nodes[0], 101, self.nodes[0].getnewaddress())
self.sync_blocks()
@ -92,7 +92,7 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
address = wallet.getnewaddress()
self.nodes[0].sendtoaddress(address, 1)
self.sync_mempools()
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_blocks()
# w1_v19: regular wallet, created with v0.19

View File

@ -54,7 +54,7 @@ class BIP68Test(BitcoinTestFramework):
self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
# Generate some coins
self.nodes[0].generate(110)
self.generate(self.nodes[0], 110)
self.log.info("Running test disable flag")
self.test_disable_flag()
@ -142,7 +142,7 @@ class BIP68Test(BitcoinTestFramework):
for i in range(num_outputs):
outputs[addresses[i]] = random.randint(1, 20)*0.01
self.nodes[0].sendmany("", outputs)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
utxos = self.nodes[0].listunspent()
@ -272,7 +272,7 @@ class BIP68Test(BitcoinTestFramework):
cur_time = self.mocktime
for _ in range(10):
self.nodes[0].setmocktime(cur_time + 600)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
cur_time += 600
assert tx2.hash in self.nodes[0].getrawmempool()
@ -287,7 +287,7 @@ class BIP68Test(BitcoinTestFramework):
self.nodes[0].setmocktime(cur_time+600)
# Save block template now to use for the reorg later
tmpl = self.nodes[0].getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
assert tx2.hash not in self.nodes[0].getrawmempool()
# Now that tx2 is not in the mempool, a sequence locked spend should
@ -295,7 +295,7 @@ class BIP68Test(BitcoinTestFramework):
tx3 = test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
assert tx3.hash in self.nodes[0].getrawmempool()
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
assert tx3.hash not in self.nodes[0].getrawmempool()
# One more test, this time using height locks
@ -348,7 +348,7 @@ class BIP68Test(BitcoinTestFramework):
# Reset the chain and get rid of the mocktimed-blocks
self.nodes[0].setmocktime(self.mocktime)
self.nodes[0].invalidateblock(self.nodes[0].getblockhash(cur_height+1))
self.nodes[0].generate(10)
self.generate(self.nodes[0], 10)
# Make sure that BIP68 isn't being used to validate blocks prior to
# activation height. If more blocks are mined prior to this test
@ -401,9 +401,9 @@ class BIP68Test(BitcoinTestFramework):
min_activation_height = 432
height = self.nodes[0].getblockcount()
assert_greater_than(min_activation_height - height, 2)
self.nodes[0].generate(min_activation_height - height - 2)
self.generate(self.nodes[0], min_activation_height - height - 2)
assert not softfork_active(self.nodes[0], 'csv')
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
assert softfork_active(self.nodes[0], 'csv')
self.sync_blocks()

View File

@ -26,9 +26,9 @@ class FeatureBlockfilterindexPruneTest(BitcoinTestFramework):
self.sync_index(height=200)
assert_greater_than(len(self.nodes[0].getblockfilter(self.nodes[0].getbestblockhash())['filter']), 0)
# Mine two batches of blocks to avoid hitting NODE_NETWORK_LIMITED_MIN_BLOCKS disconnection
self.nodes[0].generate(250)
self.generate(self.nodes[0], 250)
self.sync_all()
self.nodes[0].generate(250)
self.generate(self.nodes[0], 250)
self.sync_all()
self.sync_index(height=700)
@ -47,7 +47,7 @@ class FeatureBlockfilterindexPruneTest(BitcoinTestFramework):
self.log.info("make sure accessing the blockfilters throws an error")
assert_raises_rpc_error(-1, "Index is not enabled for filtertype basic", self.nodes[0].getblockfilter, self.nodes[0].getblockhash(2))
self.nodes[0].generate(1000)
self.generate(self.nodes[0], 1000)
self.log.info("prune below the blockfilterindexes best block while blockfilters are disabled")
pruneheight_new = self.nodes[0].pruneblockchain(1000)

View File

@ -30,7 +30,7 @@ class BlocksdirTest(BitcoinTestFramework):
self.log.info("Starting with existing blocksdir ...")
self.start_node(0, ["-blocksdir=" + blocksdir_path])
self.log.info("mining blocks..")
self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address)
self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address)
assert os.path.isfile(os.path.join(blocksdir_path, self.chain, "blocks", "blk00000.dat"))
assert os.path.isdir(os.path.join(self.nodes[0].datadir, self.chain, "blocks", "index"))

View File

@ -110,8 +110,8 @@ class BIP65Test(BitcoinTestFramework):
self.test_cltv_info(is_active=False)
self.log.info("Mining %d blocks", CLTV_HEIGHT - 2)
wallet.generate(10)
self.nodes[0].generate(CLTV_HEIGHT - 2 - 10)
self.generate(wallet, 10)
self.generate(self.nodes[0], CLTV_HEIGHT - 2 - 10)
assert_equal(self.nodes[0].getblockcount(), CLTV_HEIGHT - 2)
self.log.info("Test that invalid-according-to-CLTV transactions can still appear in a block")

View File

@ -81,10 +81,10 @@ class CoinStatsIndexTest(BitcoinTestFramework):
index_hash_options = ['none', 'muhash']
# Generate a normal transaction and mine it
node.generate(101)
self.generate(node, 101)
address = self.nodes[0].get_deterministic_priv_key().address
node.sendtoaddress(address=address, amount=10, subtractfeefromamount=True)
node.generate(1)
self.generate(node, 1)
self.sync_blocks(timeout=120)
@ -106,7 +106,7 @@ class CoinStatsIndexTest(BitcoinTestFramework):
self.log.info("Test that gettxoutsetinfo() can get fetch data on specific heights with index")
# Generate a new tip
node.generate(5)
self.generate(node, 5)
for hash_option in index_hash_options:
# Fetch old stats by height
@ -183,7 +183,7 @@ class CoinStatsIndexTest(BitcoinTestFramework):
self.nodes[0].sendrawtransaction(tx2_hex)
# Include both txs in a block
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
for hash_option in index_hash_options:
@ -242,7 +242,7 @@ class CoinStatsIndexTest(BitcoinTestFramework):
res9 = index_node.gettxoutsetinfo('muhash')
assert_equal(res8, res9)
index_node.generate(1)
self.generate(index_node, 1)
res10 = index_node.gettxoutsetinfo('muhash')
assert(res8['txouts'] < res10['txouts'])
@ -261,14 +261,14 @@ class CoinStatsIndexTest(BitcoinTestFramework):
# Generate two block, let the index catch up, then invalidate the blocks
index_node = self.nodes[1]
reorg_blocks = index_node.generatetoaddress(2, index_node.getnewaddress())
reorg_blocks = self.generatetoaddress(index_node, 2, index_node.getnewaddress())
reorg_block = reorg_blocks[1]
res_invalid = index_node.gettxoutsetinfo('muhash')
index_node.invalidateblock(reorg_blocks[0])
assert_equal(index_node.gettxoutsetinfo('muhash')['height'], 110)
# Add two new blocks
block = index_node.generate(2)[1]
block = self.generate(index_node, 2)[1]
res = index_node.gettxoutsetinfo(hash_type='muhash', hash_or_height=None, use_index=False)
# Test that the result of the reorged block is not returned for its old block height
@ -284,7 +284,7 @@ class CoinStatsIndexTest(BitcoinTestFramework):
# Add another block, so we don't depend on reconsiderblock remembering which
# blocks were touched by invalidateblock
index_node.generate(1)
self.generate(index_node, 1)
self.sync_all()
# Ensure that removing and re-adding blocks yields consistent results

View File

@ -198,7 +198,7 @@ class BIP68_112_113Test(BitcoinTestFramework):
self.miniwallet = MiniWallet(self.nodes[0], mode=MiniWalletMode.RAW_P2PK)
self.log.info("Generate blocks in the past for coinbase outputs.")
self.coinbase_blocks = self.miniwallet.generate(COINBASE_BLOCK_COUNT) # blocks generated for inputs
self.coinbase_blocks = self.generate(self.miniwallet, COINBASE_BLOCK_COUNT) # blocks generated for inputs
# set time so that there was enough time to build up to 1000 blocks 10 minutes apart on top of the last one
# without worrying about getting into the future
self.nodes[0].setmocktime(TIME_GENESIS_BLOCK + 600 * 1000 + 100)
@ -246,7 +246,7 @@ class BIP68_112_113Test(BitcoinTestFramework):
bip113input = self.send_generic_input_tx(self.coinbase_blocks)
self.nodes[0].setmocktime(self.last_block_time + 600)
inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 431
inputblockhash = self.generate(self.nodes[0], 1)[0] # 1 block generated for inputs to be in chain at height 431
self.nodes[0].setmocktime(TIME_GENESIS_BLOCK + 600 * 1000 + 100)
self.tip = int(inputblockhash, 16)
self.tipheight += 1

View File

@ -78,7 +78,7 @@ class BIP66Test(BitcoinTestFramework):
self.test_dersig_info(is_active=False)
self.log.info("Mining %d blocks", DERSIG_HEIGHT - 2)
self.coinbase_txids = [self.nodes[0].getblock(b)['tx'][0] for b in self.miniwallet.generate(DERSIG_HEIGHT - 2)]
self.coinbase_txids = [self.nodes[0].getblock(b)['tx'][0] for b in self.generate(self.miniwallet, DERSIG_HEIGHT - 2)]
self.log.info("Test that a transaction with non-DER signature can still appear in a block")

View File

@ -58,7 +58,7 @@ class DIP0020ActivationTest(BitcoinTestFramework):
# This tx should be completely valid, should be included in mempool and mined in the next block
assert txid in set(node.getrawmempool())
node.generate(1)
self.generate(node, 1)
assert txid not in set(node.getrawmempool())
# Create spending tx
@ -83,9 +83,9 @@ class DIP0020ActivationTest(BitcoinTestFramework):
helper_peer.send_blocks_and_test([test_block], node, success=False, reject_reason='block-validation-failed', expect_disconnect=True)
self.log.info("Generate enough blocks to activate DIP0020 opcodes")
node.generate(97)
self.generate(node, 97)
assert not softfork_active(node, 'dip0020')
node.generate(1)
self.generate(node, 1)
assert softfork_active(node, 'dip0020')
# flush state to disk before potential crashes below
@ -103,7 +103,7 @@ class DIP0020ActivationTest(BitcoinTestFramework):
# txes spending new opcodes still won't be accepted into mempool if we roll back to the previous tip
node.invalidateblock(node.getbestblockhash())
assert tx0id not in set(node.getrawmempool())
node.generate(1)
self.generate(node, 1)
self.log.info("Transactions spending coins with new opcodes are accepted one block after DIP0020 activation block")
node.sendrawtransaction(tx0_hex)

View File

@ -49,7 +49,7 @@ class DIP3Test(BitcoinTestFramework):
def run_test(self):
self.log.info("funding controller node")
while self.nodes[0].getbalance() < (self.num_initial_mn + 3) * 1000:
self.nodes[0].generate(10) # generate enough for collaterals
self.generate(self.nodes[0], 10) # generate enough for collaterals
self.log.info("controller node has {} dash".format(self.nodes[0].getbalance()))
# Make sure we're below block 135 (which activates dip3)
@ -65,11 +65,11 @@ class DIP3Test(BitcoinTestFramework):
mns.append(before_dip3_mn)
# block 150 starts enforcing DIP3 MN payments
self.nodes[0].generate(150 - self.nodes[0].getblockcount())
self.generate(self.nodes[0], 150 - self.nodes[0].getblockcount())
assert self.nodes[0].getblockcount() == 150
self.log.info("mining final block for DIP3 activation")
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
# We have hundreds of blocks to sync here, give it more time
self.log.info("syncing blocks for all nodes")
@ -101,7 +101,7 @@ class DIP3Test(BitcoinTestFramework):
self.log.info("register %s" % mn.alias)
self.register_mn(self.nodes[0], mn)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
if not start:
self.start_mn(mn)
@ -125,7 +125,7 @@ class DIP3Test(BitcoinTestFramework):
assert_equal(rpc_collateral_address, old_collateral_address)
dummy_txin = self.spend_mn_collateral(mns[i], with_dummy_input_output=True)
dummy_txins.append(dummy_txin)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
mns_tmp.remove(mns[i])
self.assert_mnlists(mns_tmp)
@ -144,7 +144,7 @@ class DIP3Test(BitcoinTestFramework):
self.log.info("cause a reorg with a double spend and check that mnlists are still correct on all nodes")
self.mine_double_spend(mns, self.nodes[0], dummy_txins, self.nodes[0].getnewaddress())
self.nodes[0].generate(spend_mns_count)
self.generate(self.nodes[0], spend_mns_count)
self.sync_all()
self.assert_mnlists(mns_tmp)
@ -152,7 +152,7 @@ class DIP3Test(BitcoinTestFramework):
for i in range(20):
node = self.nodes[i % len(self.nodes)]
self.test_invalid_mn_payment(mns, node)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
self.log.info("testing ProUpServTx")
@ -175,7 +175,7 @@ class DIP3Test(BitcoinTestFramework):
bt = self.nodes[0].getblocktemplate()
expected_payee = bt['masternode'][0]['payee']
expected_amount = bt['masternode'][0]['amount']
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
if expected_payee == multisig:
block = self.nodes[0].getblock(self.nodes[0].getbestblockhash())
@ -198,7 +198,7 @@ class DIP3Test(BitcoinTestFramework):
self.register_mn(self.nodes[0], new_mn)
mns[i] = new_mn
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
self.assert_mnlists(mns)
self.log.info("restarting MN %s" % new_mn.alias)
@ -217,7 +217,7 @@ class DIP3Test(BitcoinTestFramework):
# also check if funds from payout address are used when no fee source address is specified
node.sendtoaddress(mn.rewards_address, 0.001)
node.protx('update_registrar', mn.protx_hash, "", new_voting_address, "")
node.generate(1)
self.generate(node, 1)
self.sync_all()
new_dmnState = mn.node.masternode("status")["dmnState"]
new_voting_address_from_rpc = new_dmnState["votingAddress"]
@ -245,7 +245,7 @@ class DIP3Test(BitcoinTestFramework):
mn.collateral_address = node.getnewaddress()
mn.collateral_txid = node.sendtoaddress(mn.collateral_address, 1000)
mn.collateral_vout = None
node.generate(1)
self.generate(node, 1)
rawtx = node.getrawtransaction(mn.collateral_txid, 1)
for txout in rawtx['vout']:
@ -277,7 +277,7 @@ class DIP3Test(BitcoinTestFramework):
mn.rewards_address = node.getnewaddress()
mn.protx_hash = node.protx('register', mn.collateral_txid, mn.collateral_vout, '127.0.0.1:%d' % mn.p2p_port, mn.ownerAddr, mn.operatorAddr, mn.votingAddr, mn.operator_reward, mn.rewards_address, mn.fundsAddr)
node.generate(1)
self.generate(node, 1)
def start_mn(self, mn):
if len(self.nodes) <= mn.idx:
@ -295,7 +295,7 @@ class DIP3Test(BitcoinTestFramework):
def update_mn_payee(self, mn, payee):
self.nodes[0].sendtoaddress(mn.fundsAddr, 0.001)
self.nodes[0].protx('update_registrar', mn.protx_hash, '', '', payee, mn.fundsAddr)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
info = self.nodes[0].protx('info', mn.protx_hash)
assert info['state']['payoutAddress'] == payee
@ -303,7 +303,7 @@ class DIP3Test(BitcoinTestFramework):
def test_protx_update_service(self, mn):
self.nodes[0].sendtoaddress(mn.fundsAddr, 0.001)
self.nodes[0].protx('update_service', mn.protx_hash, '127.0.0.2:%d' % mn.p2p_port, mn.blsMnkey, "", mn.fundsAddr)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
for node in self.nodes:
protx_info = node.protx('info', mn.protx_hash)
@ -313,7 +313,7 @@ class DIP3Test(BitcoinTestFramework):
# undo
self.nodes[0].protx('update_service', mn.protx_hash, '127.0.0.1:%d' % mn.p2p_port, mn.blsMnkey, "", mn.fundsAddr)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
def assert_mnlists(self, mns):
for node in self.nodes:

View File

@ -86,7 +86,7 @@ class DIP3V19Test(DashTestFramework):
evo_info_0 = self.dynamically_add_masternode(evo=True, rnd=7)
assert evo_info_0 is not None
self.nodes[0].generate(8)
self.generate(self.nodes[0], 8)
self.sync_blocks(self.nodes)
self.log.info("Checking that protxs with duplicate EvoNodes fields are rejected")
@ -97,7 +97,7 @@ class DIP3V19Test(DashTestFramework):
assert evo_info_2 is None
evo_info_3 = self.dynamically_add_masternode(evo=True, rnd=9)
assert evo_info_3 is not None
self.nodes[0].generate(8)
self.generate(self.nodes[0], 8)
self.sync_blocks(self.nodes)
self.dynamically_evo_update_service(evo_info_0, 9, should_be_rejected=True)
@ -126,13 +126,13 @@ class DIP3V19Test(DashTestFramework):
funds_address = self.nodes[0].getnewaddress()
fund_txid = self.nodes[0].sendtoaddress(funds_address, 1)
self.wait_for_instantlock(fund_txid, self.nodes[0])
tip = self.nodes[0].generate(1)[0]
tip = self.generate(self.nodes[0], 1)[0]
assert_equal(self.nodes[0].getrawtransaction(fund_txid, 1, tip)['confirmations'], 1)
self.sync_all(self.nodes)
protx_result = self.nodes[0].protx('revoke', revoke_protx, revoke_keyoperator, 1, funds_address)
self.wait_for_instantlock(protx_result, self.nodes[0])
tip = self.nodes[0].generate(1)[0]
tip = self.generate(self.nodes[0], 1)[0]
assert_equal(self.nodes[0].getrawtransaction(protx_result, 1, tip)['confirmations'], 1)
# Revoking a MN results in disconnects. Wait for disconnects to actually happen
# and then reconnect the corresponding node back to let sync_blocks finish correctly.

View File

@ -91,7 +91,7 @@ class LLMQCoinbaseCommitmentsTest(DashTestFramework):
#############################
# Now start testing quorum commitment merkle roots
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
oldhash = self.nodes[0].getbestblockhash()
# Test DIP8 activation once with a pre-existing quorum and once without (we don't know in which order it will activate on mainnet)
@ -248,7 +248,7 @@ class LLMQCoinbaseCommitmentsTest(DashTestFramework):
self.log.info("Wait for dip0008 activation")
while self.nodes[0].getblockcount() < DIP0008_HEIGHT:
self.bump_mocktime(10)
self.nodes[0].generate(10)
self.generate(self.nodes[0], 10)
if slow_mode:
self.sync_blocks()
self.sync_blocks()
@ -301,7 +301,7 @@ class LLMQCoinbaseCommitmentsTest(DashTestFramework):
break
if not found_unconfirmed:
break
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_blocks()
if __name__ == '__main__':

View File

@ -207,7 +207,7 @@ class EstimateFeeTest(BitcoinTestFramework):
tx_kbytes = (len(txhex) // 2) / 1000.0
self.fees_per_kb.append(float(fee) / tx_kbytes)
self.sync_mempools(wait=.1)
mined = mining_node.getblock(mining_node.generate(1)[0], True)["tx"]
mined = mining_node.getblock(self.generate(mining_node, 1)[0], True)["tx"]
self.sync_blocks(wait=.1)
# update which txouts are confirmed
newmem = []
@ -231,7 +231,7 @@ class EstimateFeeTest(BitcoinTestFramework):
# Mine
while len(self.nodes[0].getrawmempool()) > 0:
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
# Repeatedly split those 2 outputs, doubling twice for each rep
# Use txouts to monitor the available utxo, since these won't be tracked in wallet
@ -241,12 +241,12 @@ class EstimateFeeTest(BitcoinTestFramework):
while len(self.txouts) > 0:
split_inputs(self.nodes[0], self.txouts, self.txouts2)
while len(self.nodes[0].getrawmempool()) > 0:
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
# Double txouts2 to txouts
while len(self.txouts2) > 0:
split_inputs(self.nodes[0], self.txouts2, self.txouts)
while len(self.nodes[0].getrawmempool()) > 0:
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
reps += 1
self.log.info("Finished splitting")
@ -279,7 +279,7 @@ class EstimateFeeTest(BitcoinTestFramework):
# Finish by mining a normal-sized block:
while len(self.nodes[1].getrawmempool()) > 0:
self.nodes[1].generate(1)
self.generate(self.nodes[1], 1)
self.sync_blocks(self.nodes[0:3], wait=.1)
self.log.info("Final estimates after emptying mempools")

View File

@ -89,7 +89,7 @@ class DashGovernanceTest (DashTestFramework):
assert_equal(len(self.nodes[0].gobject("list-prepared")), 0)
self.log.info("Check 1st superblock before v20")
self.nodes[0].generate(3)
self.generate(self.nodes[0], 3)
self.bump_mocktime(3)
self.sync_blocks()
assert_equal(self.nodes[0].getblockcount(), 130)
@ -97,7 +97,7 @@ class DashGovernanceTest (DashTestFramework):
self.check_superblockbudget(False)
self.log.info("Check 2nd superblock before v20")
self.nodes[0].generate(10)
self.generate(self.nodes[0], 10)
self.bump_mocktime(10)
self.sync_blocks()
assert_equal(self.nodes[0].getblockcount(), 140)
@ -117,7 +117,7 @@ class DashGovernanceTest (DashTestFramework):
p1_collateral_prepare = prepare_object(self.nodes[0], 1, uint256_to_string(0), proposal_time, 1, "Proposal_1", self.p1_amount, self.p1_payout_address)
p2_collateral_prepare = prepare_object(self.nodes[0], 1, uint256_to_string(0), proposal_time, 1, "Proposal_2", self.p2_amount, self.p2_payout_address)
self.nodes[0].generate(6)
self.generate(self.nodes[0], 6)
self.bump_mocktime(6)
self.sync_blocks()
@ -165,7 +165,7 @@ class DashGovernanceTest (DashTestFramework):
self.log.info("v20 is expected to be activate since block 160")
assert block_count + n < 160
for _ in range(n - 1):
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.bump_mocktime(1)
self.sync_blocks()
self.check_superblockbudget(False)
@ -203,7 +203,7 @@ class DashGovernanceTest (DashTestFramework):
assert_equal(more_votes, False)
self.log.info("Move 1 block enabling the Superblock maturity window on non-isolated nodes")
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.bump_mocktime(1)
assert_equal(self.nodes[0].getblockcount(), 150)
assert_equal(self.nodes[0].getblockchaininfo()["softforks"]["v20"]["active"], False)
@ -214,7 +214,7 @@ class DashGovernanceTest (DashTestFramework):
assert_equal(has_trigger, False)
self.log.info("Move 1 block inside the Superblock maturity window on non-isolated nodes")
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.bump_mocktime(1)
self.log.info("There is now new 'winner' who should submit new trigger and vote for it")
@ -232,7 +232,7 @@ class DashGovernanceTest (DashTestFramework):
assert(amount_str in payment_amounts_expected)
self.log.info("Move another block inside the Superblock maturity window on non-isolated nodes")
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.bump_mocktime(1)
self.log.info("Every non-isolated MN should vote for the same trigger now, no new triggers should be created")
@ -268,7 +268,7 @@ class DashGovernanceTest (DashTestFramework):
assert_equal(more_triggers, False)
self.log.info("Move another block inside the Superblock maturity window")
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.bump_mocktime(1)
self.sync_blocks()
@ -284,7 +284,7 @@ class DashGovernanceTest (DashTestFramework):
self.log.info("Move another block inside the Superblock maturity window")
with self.nodes[1].assert_debug_log(["CGovernanceManager::VoteGovernanceTriggers"]):
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.bump_mocktime(1)
self.sync_blocks()
@ -298,7 +298,7 @@ class DashGovernanceTest (DashTestFramework):
self.log.info("Move remaining n blocks until actual Superblock")
for i in range(n):
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.bump_mocktime(1)
self.sync_blocks()
# comparing to 159 because bip9 forks are active when the tip is one block behind the activation height
@ -310,7 +310,7 @@ class DashGovernanceTest (DashTestFramework):
self.log.info("Move a few block past the recent superblock height and make sure we have no new votes")
for _ in range(5):
with self.nodes[1].assert_debug_log("", [f"Voting NO-FUNDING for trigger:{winning_trigger_hash} success"]):
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.bump_mocktime(1)
self.sync_blocks()
# Votes on both triggers should NOT change
@ -322,13 +322,13 @@ class DashGovernanceTest (DashTestFramework):
self.log.info("Move remaining n blocks until the next Superblock")
for _ in range(n - 1):
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.bump_mocktime(1)
self.sync_blocks()
self.log.info("Wait for new trigger and votes")
self.wait_until(lambda: have_trigger_for_height(self.nodes, 180))
self.log.info("Mine superblock")
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.bump_mocktime(1)
self.sync_blocks()
assert_equal(self.nodes[0].getblockcount(), 180)
@ -337,14 +337,14 @@ class DashGovernanceTest (DashTestFramework):
self.log.info("Mine and check a couple more superblocks")
for i in range(2):
for _ in range(sb_cycle - 1):
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.bump_mocktime(1)
self.sync_blocks()
# Wait for new trigger and votes
sb_block_height = 180 + (i + 1) * sb_cycle
self.wait_until(lambda: have_trigger_for_height(self.nodes, sb_block_height))
# Mine superblock
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.bump_mocktime(1)
self.sync_blocks()
assert_equal(self.nodes[0].getblockcount(), sb_block_height)

View File

@ -69,7 +69,7 @@ class DashGovernanceTest (DashTestFramework):
n = sb_cycle - self.nodes[0].getblockcount() % sb_cycle
for _ in range(n):
self.bump_mocktime(156)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_blocks()
self.log.info("Prepare proposals")
@ -84,7 +84,7 @@ class DashGovernanceTest (DashTestFramework):
p1_collateral_prepare = self.prepare_object(1, uint256_to_string(0), proposal_time, 1, "Proposal_1", self.p1_amount, self.p1_payout_address)
self.bump_mocktime(60 * 10 + 1)
self.nodes[0].generate(6)
self.generate(self.nodes[0], 6)
self.bump_mocktime(6 * 156)
self.sync_blocks()
@ -116,7 +116,7 @@ class DashGovernanceTest (DashTestFramework):
assert n >= 0
for _ in range(n + 1):
self.bump_mocktime(156)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_blocks(self.nodes[0:5])
self.log.info("Wait for new trigger and votes on non-isolated nodes")
@ -130,7 +130,7 @@ class DashGovernanceTest (DashTestFramework):
self.log.info("Move remaining n blocks until the next Superblock")
for _ in range(n - 1):
self.bump_mocktime(156)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_blocks(self.nodes[0:5])
# Confirm all is good
@ -138,20 +138,20 @@ class DashGovernanceTest (DashTestFramework):
self.log.info("Mine superblock")
self.bump_mocktime(156)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_blocks(self.nodes[0:5])
self.wait_for_chainlocked_block(self.nodes[0], self.nodes[0].getbestblockhash())
self.log.info("Mine (superblock cycle + 1) blocks on non-isolated nodes to forget about this trigger")
for _ in range(sb_cycle):
self.bump_mocktime(156)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_blocks(self.nodes[0:5])
# Should still have at least 1 trigger for the old sb cycle and 0 for the current one
assert len(self.nodes[0].gobject("list", "valid", "triggers")) >= 1
assert not have_trigger_for_height(self.nodes[0:5], sb_block_height + sb_cycle)
self.bump_mocktime(156)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_blocks(self.nodes[0:5])
# Trigger scheduler to mark old triggers for deletion
self.bump_mocktime(5 * 60)
@ -164,7 +164,7 @@ class DashGovernanceTest (DashTestFramework):
self.log.info("Reconnect isolated node and confirm the next ChainLock will let it sync")
self.reconnect_isolated_node(5, 0)
assert_equal(self.nodes[5].mnsync("status")["IsSynced"], False)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
# NOTE: bumping mocktime too much after recent reconnect can result in "timeout downloading block"
self.bump_mocktime(1)
self.sync_blocks()

View File

@ -38,7 +38,7 @@ class LLMQChainLocksTest(DashTestFramework):
self.test_coinbase_best_cl(self.nodes[0], expected_cl_in_cb=False)
# v20 is active, no quorums, no CLs - null CL in CbTx
nocl_block_hash = self.nodes[0].generate(1)[0]
nocl_block_hash = self.generate(self.nodes[0], 1)[0]
self.test_coinbase_best_cl(self.nodes[0], expected_cl_in_cb=True, expected_null_cl=True)
cbtx = self.nodes[0].getspecialtxes(nocl_block_hash, 5, 1, 0, 2)[0]
assert_equal(cbtx["instantlock"], False)
@ -59,7 +59,7 @@ class LLMQChainLocksTest(DashTestFramework):
self.log.info("Mine single block, wait for chainlock")
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash())
self.test_coinbase_best_cl(self.nodes[0])
@ -70,7 +70,7 @@ class LLMQChainLocksTest(DashTestFramework):
assert_equal(cbtx["chainlock"], True)
self.log.info("Mine many blocks, wait for chainlock")
self.nodes[0].generate(20)
self.generate(self.nodes[0], 20)
# We need more time here due to 20 blocks being generated at once
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash(), timeout=30)
self.test_coinbase_best_cl(self.nodes[0])
@ -90,7 +90,7 @@ class LLMQChainLocksTest(DashTestFramework):
self.log.info("Generate new blocks and verify that they are not chainlocked")
previous_block_hash = self.nodes[0].getbestblockhash()
for _ in range(2):
block_hash = self.nodes[0].generate(1)[0]
block_hash = self.generate(self.nodes[0], 1)[0]
self.wait_for_chainlocked_block_all_nodes(block_hash, expected=False)
assert self.nodes[0].getblock(previous_block_hash)["chainlock"]
@ -101,18 +101,18 @@ class LLMQChainLocksTest(DashTestFramework):
self.isolate_node(0)
node0_mining_addr = self.nodes[0].getnewaddress()
node0_tip = self.nodes[0].getbestblockhash()
self.nodes[1].generatetoaddress(5, node0_mining_addr)
self.generatetoaddress(self.nodes[1], 5, node0_mining_addr)
self.wait_for_chainlocked_block(self.nodes[1], self.nodes[1].getbestblockhash())
self.test_coinbase_best_cl(self.nodes[0])
assert self.nodes[0].getbestblockhash() == node0_tip
self.reconnect_isolated_node(0, 1)
self.nodes[1].generatetoaddress(1, node0_mining_addr)
self.generatetoaddress(self.nodes[1], 1, node0_mining_addr)
self.wait_for_chainlocked_block_all_nodes(self.nodes[1].getbestblockhash())
self.test_coinbase_best_cl(self.nodes[0])
self.log.info("Isolate node, mine on another, reconnect and submit CL via RPC")
self.isolate_node(0)
self.nodes[1].generate(1)
self.generate(self.nodes[1], 1)
self.wait_for_chainlocked_block(self.nodes[1], self.nodes[1].getbestblockhash())
best_0 = self.nodes[0].getbestchainlock()
best_1 = self.nodes[1].getbestchainlock()
@ -134,13 +134,13 @@ class LLMQChainLocksTest(DashTestFramework):
self.log.info("Isolate node, mine on both parts of the network, and reconnect")
self.isolate_node(0)
bad_tip = self.nodes[0].generate(5)[-1]
self.nodes[1].generatetoaddress(1, node0_mining_addr)
bad_tip = self.generate(self.nodes[0], 5)[-1]
self.generatetoaddress(self.nodes[1], 1, node0_mining_addr)
good_tip = self.nodes[1].getbestblockhash()
self.wait_for_chainlocked_block(self.nodes[1], good_tip)
assert not self.nodes[0].getblock(self.nodes[0].getbestblockhash())["chainlock"]
self.reconnect_isolated_node(0, 1)
self.nodes[1].generatetoaddress(1, node0_mining_addr)
self.generatetoaddress(self.nodes[1], 1, node0_mining_addr)
self.wait_for_chainlocked_block_all_nodes(self.nodes[1].getbestblockhash())
self.test_coinbase_best_cl(self.nodes[0])
assert self.nodes[0].getblock(self.nodes[0].getbestblockhash())["previousblockhash"] == good_tip
@ -163,10 +163,10 @@ class LLMQChainLocksTest(DashTestFramework):
assert self.nodes[0].getbestblockhash() == good_tip
self.nodes[0].invalidateblock(good_tip)
self.log.info("Now try to reorg the chain")
self.nodes[0].generate(2)
self.generate(self.nodes[0], 2)
time.sleep(6)
assert self.nodes[1].getbestblockhash() == good_tip
bad_tip = self.nodes[0].generate(2)[-1]
bad_tip = self.generate(self.nodes[0], 2)[-1]
time.sleep(6)
assert self.nodes[0].getbestblockhash() == bad_tip
assert self.nodes[1].getbestblockhash() == good_tip
@ -175,7 +175,7 @@ class LLMQChainLocksTest(DashTestFramework):
self.nodes[0].reconsiderblock(good_tip)
assert self.nodes[0].getbestblockhash() != good_tip
good_fork = good_tip
good_tip = self.nodes[1].generatetoaddress(1, node0_mining_addr)[-1] # this should mark bad_tip as conflicting
good_tip = self.generatetoaddress(self.nodes[1], 1, node0_mining_addr)[-1] # this should mark bad_tip as conflicting
self.wait_for_chainlocked_block_all_nodes(good_tip)
self.test_coinbase_best_cl(self.nodes[0])
assert self.nodes[0].getbestblockhash() == good_tip
@ -203,7 +203,7 @@ class LLMQChainLocksTest(DashTestFramework):
txs.append(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1))
txs += self.create_chained_txs(self.nodes[0], 1)
self.log.info("Assert that after block generation these TXs are NOT included (as they are \"unsafe\")")
node0_tip = self.nodes[0].generate(1)[-1]
node0_tip = self.generate(self.nodes[0], 1)[-1]
for txid in txs:
tx = self.nodes[0].getrawtransaction(txid, 1)
assert "confirmations" not in tx
@ -214,7 +214,7 @@ class LLMQChainLocksTest(DashTestFramework):
self.log.info("Disable LLMQ based InstantSend for a very short time (this never gets propagated to other nodes)")
self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 4070908800)
self.log.info("Now the TXs should be included")
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 0)
self.log.info("Assert that TXs got included now")
for txid in txs:
@ -238,7 +238,7 @@ class LLMQChainLocksTest(DashTestFramework):
self.log.info("Test that new node can mine without Chainlock info")
tip_0 = self.nodes[0].getblock(self.nodes[0].getbestblockhash(), 2)
self.nodes[added_idx].generate(1)
self.generate(self.nodes[added_idx], 1)
self.sync_blocks(self.nodes)
tip_1 = self.nodes[0].getblock(self.nodes[0].getbestblockhash(), 2)
assert_equal(tip_1['cbTx']['bestCLSignature'], tip_0['cbTx']['bestCLSignature'])
@ -297,12 +297,12 @@ class LLMQChainLocksTest(DashTestFramework):
def test_bestCLHeightDiff(self, mn_rr_active):
# We need 2 blocks we can grab clsigs from
for _ in range(2):
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].generate(1)[0])
self.wait_for_chainlocked_block_all_nodes(self.generate(self.nodes[0], 1)[0])
assert_equal(softfork_active(self.nodes[1], "mn_rr"), mn_rr_active)
tip1_hash = self.nodes[1].getbestblockhash()
self.isolate_node(1)
tip0_hash = self.nodes[0].generate(1)[0]
tip0_hash = self.generate(self.nodes[0], 1)[0]
block_hex = self.nodes[0].getblock(tip0_hash, 0)
mal_block = CBlock()
mal_block.deserialize(BytesIO(bytes.fromhex(block_hex)))

View File

@ -44,7 +44,7 @@ class LLMQConnections(DashTestFramework):
self.wait_for_sporks_same()
self.log.info("mining one block and waiting for all members to connect to each other")
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
for mn in self.get_quorum_masternodes(q):
self.wait_for_mnauth(mn.node, 4)

View File

@ -46,7 +46,7 @@ class QuorumDataRecoveryTest(DashTestFramework):
self.connect_nodes(mn.node.index, 0)
if qdata_recovery_enabled:
# trigger recovery threads and wait for them to start
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.bump_mocktime(self.quorum_data_thread_request_timeout_seconds + 1)
time.sleep(1)
@ -177,14 +177,14 @@ class QuorumDataRecoveryTest(DashTestFramework):
self.test_mns(llmq_test_v17, quorum_hash_recover, valid_mns=[last_resort_v17], all_mns=member_mns_recover_v17)
# If recovery would be enabled it would trigger after the mocktime bump / mined block
self.bump_mocktime(self.quorum_data_request_expiration_timeout + 1)
node.generate(1)
self.generate(node, 1)
time.sleep(10)
# Make sure they are still invalid
self.test_mns(llmq_test, quorum_hash_recover, valid_mns=[last_resort_test], all_mns=member_mns_recover_test)
self.test_mns(llmq_test_v17, quorum_hash_recover, valid_mns=[last_resort_v17], all_mns=member_mns_recover_v17)
# Mining a block should not result in a chainlock now because the responsible quorum shouldn't have enough
# valid members.
self.wait_for_chainlocked_block(node, node.generate(1)[0], False, 5)
self.wait_for_chainlocked_block(node, self.generate(node, 1)[0], False, 5)
# Now restart with recovery enabled
self.restart_mns(mns=recover_members, exclude=exclude_members, reindex=True, qdata_recovery_enabled=True)
# Validate that all invalid members recover. Note: recover=True leads to mocktime bumps and mining while waiting

View File

@ -85,7 +85,7 @@ class LLMQDKGErrors(DashTestFramework):
self.wait_for_sporks_same()
for _ in range(blockCount):
self.bump_mocktime(1)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()

View File

@ -89,7 +89,7 @@ class LLMQEvoNodesTest(DashTestFramework):
for i in range(self.evo_count):
evo_info = self.dynamically_add_masternode(evo=True)
evo_protxhash_list.append(evo_info.proTxHash)
self.nodes[0].generate(8)
self.generate(self.nodes[0], 8)
self.sync_blocks(self.nodes)
expectedUpdated.append(evo_info.proTxHash)
@ -116,7 +116,7 @@ class LLMQEvoNodesTest(DashTestFramework):
# Generate a few blocks to make EvoNode/MN analysis on a pure MN RewardReallocation window
self.bump_mocktime(1)
self.nodes[0].generate(4)
self.generate(self.nodes[0], 4)
self.sync_blocks()
self.log.info("Test that EvoNodes are paid 1 block in a row after MN RewardReallocation activation")
@ -167,7 +167,7 @@ class LLMQEvoNodesTest(DashTestFramework):
current_evo = None
consecutive_payments = 0
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
if i % 8 == 0:
self.sync_blocks()
@ -215,7 +215,7 @@ class LLMQEvoNodesTest(DashTestFramework):
collateral_amount = 4000
outputs = {collateral_address: collateral_amount, funds_address: 1}
collateral_txid = self.nodes[0].sendmany("", outputs)
self.nodes[0].generate(8)
self.generate(self.nodes[0], 8)
self.sync_all(self.nodes)
rawtx = self.nodes[0].getrawtransaction(collateral_txid, 1)

View File

@ -71,7 +71,7 @@ class LLMQ_IS_CL_Conflicts(DashTestFramework):
self.mine_cycle_quorum(llmq_type_name='llmq_test_dip0024', llmq_type=103)
# mine single block, wait for chainlock
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash())
self.test_chainlock_overrides_islock(False)
@ -118,7 +118,7 @@ class LLMQ_IS_CL_Conflicts(DashTestFramework):
cl = self.create_chainlock(self.nodes[0].getblockcount() + 1, block)
if mine_confllicting:
islock_tip = self.nodes[0].generate(1)[-1]
islock_tip = self.generate(self.nodes[0], 1)[-1]
# Make sure we won't sent clsig too early
self.sync_blocks()
@ -223,7 +223,7 @@ class LLMQ_IS_CL_Conflicts(DashTestFramework):
# Mine the conflicting TX into a block
good_tip = self.nodes[0].getbestblockhash()
self.nodes[0].generate(2)
self.generate(self.nodes[0], 2)
self.sync_all()
# Assert that the conflicting tx got mined and the locked TX is not valid
@ -255,7 +255,7 @@ class LLMQ_IS_CL_Conflicts(DashTestFramework):
# Should not allow competing txes now
assert_raises_rpc_error(-26, "tx-txlock-conflict", self.nodes[0].sendrawtransaction, rawtx1)
islock_tip = self.nodes[0].generate(1)[0]
islock_tip = self.generate(self.nodes[0], 1)[0]
self.sync_all()
for node in self.nodes:

View File

@ -54,7 +54,7 @@ class LLMQ_IS_RetroactiveSigning(DashTestFramework):
self.wait_for_sporks_same()
# We have to wait in order to include tx in block
self.bump_mocktime(10 * 60 + 1)
block = self.nodes[0].generate(1)[0]
block = self.generate(self.nodes[0], 1)[0]
self.wait_for_instantlock(txid, self.nodes[0])
self.nodes[0].sporkupdate("SPORK_19_CHAINLOCKS_ENABLED", 0)
self.wait_for_sporks_same()
@ -70,7 +70,7 @@ class LLMQ_IS_RetroactiveSigning(DashTestFramework):
# are the only "neighbours" in intra-quorum connections for one of them.
self.wait_for_instantlock(txid, self.nodes[0])
self.bump_mocktime(1)
block = self.nodes[0].generate(1)[0]
block = self.generate(self.nodes[0], 1)[0]
self.wait_for_chainlocked_block_all_nodes(block)
self.log.info("testing normal signing with partially known TX")
@ -100,7 +100,7 @@ class LLMQ_IS_RetroactiveSigning(DashTestFramework):
txid = self.nodes[3].sendrawtransaction(rawtx)
# Make node 3 consider the TX as safe
self.bump_mocktime(10 * 60 + 1)
block = self.nodes[3].generatetoaddress(1, self.nodes[0].getnewaddress())[0]
block = self.generatetoaddress(self.nodes[3], 1, self.nodes[0].getnewaddress())[0]
self.reconnect_isolated_node(3, 0)
self.wait_for_chainlocked_block_all_nodes(block)
self.nodes[0].setmocktime(self.mocktime)
@ -120,7 +120,7 @@ class LLMQ_IS_RetroactiveSigning(DashTestFramework):
self.wait_for_instantlock(txid, self.nodes[0], False, 5)
# Make node0 consider the TX as safe
self.bump_mocktime(10 * 60 + 1)
block = self.nodes[0].generate(1)[0]
block = self.generate(self.nodes[0], 1)[0]
assert txid in self.nodes[0].getblock(block, 1)['tx']
self.wait_for_chainlocked_block_all_nodes(block)
@ -166,7 +166,7 @@ class LLMQ_IS_RetroactiveSigning(DashTestFramework):
self.wait_for_instantlock(txid, self.nodes[0], False, 5)
# Make node 0 consider the TX as safe
self.bump_mocktime(10 * 60 + 1)
block = self.nodes[0].generate(1)[0]
block = self.generate(self.nodes[0], 1)[0]
assert txid in self.nodes[0].getblock(block, 1)['tx']
self.wait_for_chainlocked_block_all_nodes(block)
@ -198,7 +198,7 @@ class LLMQ_IS_RetroactiveSigning(DashTestFramework):
self.wait_for_instantlock(txid, self.nodes[0], False, 5)
# Make node 0 consider the TX as safe
self.bump_mocktime(10 * 60 + 1)
block = self.nodes[0].generate(1)[0]
block = self.generate(self.nodes[0], 1)[0]
assert txid in self.nodes[0].getblock(block, 1)['tx']
self.wait_for_chainlocked_block_all_nodes(block)

View File

@ -88,7 +88,7 @@ class LLMQQuorumRotationTest(DashTestFramework):
h_104_1 = QuorumId(104, int(h_1, 16))
self.log.info("Mine single block, wait for chainlock")
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash())
b_h_1 = self.nodes[0].getbestblockhash()
@ -119,7 +119,7 @@ class LLMQQuorumRotationTest(DashTestFramework):
assert_equal(projected_activation_height, softfork_info['height'])
# v20 is active for the next block, not for the tip
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.log.info("Wait for chainlock")
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash())
@ -144,14 +144,14 @@ class LLMQQuorumRotationTest(DashTestFramework):
# At this point, we want to wait for CLs just before the self.mine_cycle_quorum to diversify the CLs in CbTx.
# Although because here a new quorum cycle is starting, and we don't want to mine them now, mine 8 blocks (to skip all DKG phases)
nodes = [self.nodes[0]] + [mn.node for mn in self.mninfo.copy()]
self.nodes[0].generate(8)
self.generate(self.nodes[0], 8)
self.sync_blocks(nodes)
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash())
# And for the remaining blocks, enforce new CL in CbTx
skip_count = 23 - (self.nodes[0].getblockcount() % 24)
for _ in range(skip_count):
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_blocks(nodes)
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash())
@ -202,7 +202,7 @@ class LLMQQuorumRotationTest(DashTestFramework):
self.sync_blocks(nodes)
quorum_list = self.nodes[0].quorum("list", llmq_type)
quorum_blockhash = self.nodes[0].getbestblockhash()
fallback_blockhash = self.nodes[0].generate(1)[0]
fallback_blockhash = self.generate(self.nodes[0], 1)[0]
self.log.info("h("+str(self.nodes[0].getblockcount())+") quorum_list:"+str(quorum_list))
assert_greater_than_or_equal(len(intersection(quorum_members_0_0, quorum_members_1_0)), 3)

View File

@ -107,7 +107,7 @@ class LLMQSimplePoSeTest(DashTestFramework):
skip_count = 24 - (self.nodes[0].getblockcount() % 24)
if skip_count != 0:
self.bump_mocktime(skip_count, nodes=nodes)
self.nodes[0].generate(skip_count)
self.generate(self.nodes[0], skip_count)
self.sync_blocks(nodes)
q = self.nodes[0].getbestblockhash()
@ -141,7 +141,7 @@ class LLMQSimplePoSeTest(DashTestFramework):
self.log.info("Mining final commitment")
self.bump_mocktime(1, nodes=nodes)
self.nodes[0].getblocktemplate() # this calls CreateNewBlock
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_blocks(nodes)
self.log.info("Waiting for quorum to appear in the list")
@ -153,7 +153,7 @@ class LLMQSimplePoSeTest(DashTestFramework):
# Mine 8 (SIGN_HEIGHT_OFFSET) more blocks to make sure that the new quorum gets eligible for signing sessions
self.bump_mocktime(8)
self.nodes[0].generate(8)
self.generate(self.nodes[0], 8)
self.sync_blocks(nodes)
self.log.info("New quorum: height=%d, quorumHash=%s, quorumIndex=%d, minedBlock=%s" % (quorum_info["height"], new_quorum, quorum_info["quorumIndex"], quorum_info["minedBlock"]))
@ -213,7 +213,7 @@ class LLMQSimplePoSeTest(DashTestFramework):
# Make sure protxes are "safe" to mine even when InstantSend and ChainLocks are no longer functional
self.bump_mocktime(60 * 10 + 1)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
# Isolate and re-connect all MNs (otherwise there might be open connections with no MNAUTH for MNs which were banned before)

View File

@ -29,7 +29,7 @@ class LoadblockTest(BitcoinTestFramework):
def run_test(self):
self.nodes[1].setnetworkactive(state=False)
self.nodes[0].generate(COINBASE_MATURITY)
self.generate(self.nodes[0], COINBASE_MATURITY)
# Parsing the url of our node to get settings for config file
data_dir = self.nodes[0].datadir

View File

@ -60,7 +60,7 @@ class MaxUploadTest(BitcoinTestFramework):
self.nodes[0].setmocktime(old_mocktime)
# Generate some old blocks
self.nodes[0].generate(130)
self.generate(self.nodes[0], 130)
# p2p_conns[0] will only request old blocks
# p2p_conns[1] will only request new blocks

View File

@ -57,7 +57,7 @@ class MinimumChainWorkTest(BitcoinTestFramework):
num_blocks_to_generate = int((self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK)
self.log.info("Generating %d blocks on node0", num_blocks_to_generate)
hashes = self.nodes[0].generatetoaddress(num_blocks_to_generate,
hashes = self.generatetoaddress(self.nodes[0], num_blocks_to_generate,
self.nodes[0].get_deterministic_priv_key().address)
self.log.info("Node0 current chain work: %s", self.nodes[0].getblockheader(hashes[-1])['chainwork'])
@ -88,7 +88,7 @@ class MinimumChainWorkTest(BitcoinTestFramework):
assert ("headers" not in peer.last_message or len(peer.last_message["headers"].headers) == 0)
self.log.info("Generating one more block")
self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)
self.generatetoaddress(self.nodes[0], 1, self.nodes[0].get_deterministic_priv_key().address)
self.log.info("Verifying nodes are all synced")

View File

@ -148,7 +148,7 @@ class MnehfTest(DashTestFramework):
ehf_unknown_tx_sent = self.send_tx(ehf_unknown_tx)
self.log.info(f"unknown ehf tx: {ehf_unknown_tx_sent}")
self.sync_all()
ehf_blockhash = self.nodes[1].generate(1)[0]
ehf_blockhash = self.generate(self.nodes[1], 1)[0]
self.sync_blocks()
self.sync_all()
@ -163,7 +163,7 @@ class MnehfTest(DashTestFramework):
while (node.getblockcount() + 1) % 4 != 0:
self.check_fork('defined')
node.generate(1)
self.generate(node, 1)
self.sync_all()
@ -171,13 +171,13 @@ class MnehfTest(DashTestFramework):
for _ in range(4):
self.check_fork('started')
node.generate(1)
self.generate(node, 1)
self.sync_all()
for i in range(4):
self.check_fork('locked_in')
node.generate(1)
self.generate(node, 1)
self.sync_all()
if i == 7:
self.restart_all_nodes()
@ -192,13 +192,13 @@ class MnehfTest(DashTestFramework):
self.log.info("Expecting for fork to be defined in next blocks because no MnEHF tx here")
for _ in range(4):
self.check_fork('defined')
node.generate(1)
self.generate(node, 1)
self.sync_all()
self.log.info("Re-sending MnEHF for new fork")
tx_sent_2 = self.send_tx(ehf_tx)
ehf_blockhash_2 = node.generate(1)[0]
ehf_blockhash_2 = self.generate(node, 1)[0]
self.sync_all()
self.log.info(f"Check MnEhfTx again {tx_sent_2} was mined in {ehf_blockhash_2}")
@ -206,7 +206,7 @@ class MnehfTest(DashTestFramework):
self.log.info(f"Generate some more block to jump to `started` status")
for _ in range(4):
node.generate(1)
self.generate(node, 1)
self.check_fork('started')
self.restart_all_nodes()
self.check_fork('started')
@ -223,14 +223,14 @@ class MnehfTest(DashTestFramework):
self.log.info("Testing duplicate EHF signal with same bit")
ehf_tx_duplicate = self.send_tx(self.create_mnehf(28, pubkey))
tip_blockhash = node.generate(1)[0]
tip_blockhash = self.generate(node, 1)[0]
self.sync_blocks()
block = node.getblock(tip_blockhash)
assert ehf_tx_duplicate in node.getrawmempool() and ehf_tx_duplicate not in block['tx']
self.log.info("Testing EHF signal with same bit but with newer start time")
self.bump_mocktime(int(60 * 60 * 24 * 14), update_schedulers=False)
node.generate(1)
self.generate(node, 1)
self.sync_blocks()
self.restart_all_nodes(params=[self.mocktime, self.mocktime + 1000000])
self.check_fork('defined')
@ -243,7 +243,7 @@ class MnehfTest(DashTestFramework):
for _ in range(4 * 4):
time.sleep(1)
self.bump_mocktime(1)
self.nodes[1].generate(1)
self.generate(self.nodes[1], 1)
self.sync_all()
self.check_fork('active')

View File

@ -22,17 +22,17 @@ class NewQuorumTypeActivationTest(BitcoinTestFramework):
def run_test(self):
self.log.info(get_bip9_details(self.nodes[0], 'testdummy'))
assert_equal(get_bip9_details(self.nodes[0], 'testdummy')['status'], 'defined')
self.nodes[0].generate(9)
self.generate(self.nodes[0], 9)
assert_equal(get_bip9_details(self.nodes[0], 'testdummy')['status'], 'started')
ql = self.nodes[0].quorum("list")
assert_equal(len(ql), 3)
assert "llmq_test_v17" not in ql
self.nodes[0].generate(10)
self.generate(self.nodes[0], 10)
assert_equal(get_bip9_details(self.nodes[0], 'testdummy')['status'], 'locked_in')
ql = self.nodes[0].quorum("list")
assert_equal(len(ql), 3)
assert "llmq_test_v17" not in ql
self.nodes[0].generate(10)
self.generate(self.nodes[0], 10)
assert_equal(get_bip9_details(self.nodes[0], 'testdummy')['status'], 'active')
ql = self.nodes[0].quorum("list")
assert_equal(len(ql), 4)

View File

@ -64,7 +64,7 @@ class NotificationsTest(DashTestFramework):
self.log.info("test -blocknotify")
block_count = 10
blocks = self.nodes[1].generatetoaddress(block_count, self.nodes[1].getnewaddress() if self.is_wallet_compiled() else ADDRESS_BCRT1_UNSPENDABLE)
blocks = self.generatetoaddress(self.nodes[1], block_count, self.nodes[1].getnewaddress() if self.is_wallet_compiled() else ADDRESS_BCRT1_UNSPENDABLE)
# wait at most 10 seconds for expected number of files before reading the content
self.wait_until(lambda: len(os.listdir(self.blocknotify_dir)) == block_count, timeout=10)
@ -116,7 +116,7 @@ class NotificationsTest(DashTestFramework):
self.log.info("Mine single block, wait for chainlock")
self.bump_mocktime(1)
tip = self.nodes[0].generate(1)[-1]
tip = self.generate(self.nodes[0], 1)[-1]
self.wait_for_chainlocked_block_all_nodes(tip)
# directory content should equal the chainlocked block hash
assert_equal([tip], sorted(os.listdir(self.chainlocknotify_dir)))

View File

@ -67,11 +67,11 @@ class NULLDUMMYTest(BitcoinTestFramework):
# Legacy wallets need to import these so that they are watched by the wallet. This is unnecessary (and does not need to be tested) for descriptor wallets
wmulti.importaddress(self.ms_address)
self.coinbase_blocks = self.nodes[0].generate(2) # block height = 2
self.coinbase_blocks = self.generate(self.nodes[0], 2) # block height = 2
coinbase_txid = []
for i in self.coinbase_blocks:
coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
self.nodes[0].generate(COINBASE_MATURITY) # block height = COINBASE_MATURITY + 2
self.generate(self.nodes[0], COINBASE_MATURITY) # block height = COINBASE_MATURITY + 2
self.lastblockhash = self.nodes[0].getbestblockhash()
self.lastblockheight = COINBASE_MATURITY + 2
self.lastblocktime = self.mocktime + self.lastblockheight

View File

@ -120,9 +120,9 @@ class PruneTest(BitcoinTestFramework):
def create_big_chain(self):
# Start by creating some coinbases we can spend later
self.nodes[1].generate(200)
self.generate(self.nodes[1], 200)
self.sync_blocks(self.nodes[0:2])
self.nodes[0].generate(150)
self.generate(self.nodes[0], 150)
# Then mine enough full blocks to create more than 550MiB of data
mine_large_blocks(self.nodes[0], 645)
@ -214,13 +214,13 @@ class PruneTest(BitcoinTestFramework):
self.log.info("New best height: %d" % self.nodes[1].getblockcount())
# Mine one block to avoid automatic recovery from forks on restart
self.nodes[1].generate(1)
self.generate(self.nodes[1], 1)
# Disconnect node1 and generate the new chain
self.disconnect_nodes(0, 1)
self.disconnect_nodes(1, 2)
self.log.info("Generating new longer chain of 300 more blocks")
self.nodes[1].generate(299)
self.generate(self.nodes[1], 299)
self.log.info("Reconnect nodes")
self.connect_nodes(0, 1)
@ -272,7 +272,7 @@ class PruneTest(BitcoinTestFramework):
self.nodes[0].invalidateblock(curchainhash)
assert_equal(self.nodes[0].getblockcount(), self.mainchainheight)
assert_equal(self.nodes[0].getbestblockhash(), self.mainchainhash2)
goalbesthash = self.nodes[0].generate(blocks_to_mine)[-1]
goalbesthash = self.generate(self.nodes[0], blocks_to_mine)[-1]
goalbestheight = first_reorg_height + 1
self.log.info("Verify node 2 reorged back to the main chain, some blocks of which it had to redownload")
@ -315,7 +315,7 @@ class PruneTest(BitcoinTestFramework):
assert_equal(block1_details["nTx"], len(block1_details["tx"]))
# mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight)
node.generate(6)
self.generate(node, 6)
assert_equal(node.getblockchaininfo()["blocks"], 1001)
# prune parameter in the future (block or timestamp) should raise an exception
@ -353,7 +353,7 @@ class PruneTest(BitcoinTestFramework):
assert has_block(2), "blk00002.dat is still there, should be pruned by now"
# advance the tip so blk00002.dat and blk00003.dat can be pruned (the last 288 blocks should now be in blk00004.dat)
node.generate(288)
self.generate(node, 288)
prune(1000)
assert not has_block(2), "blk00002.dat is still there, should be pruned by now"
assert not has_block(3), "blk00003.dat is still there, should be pruned by now"

View File

@ -19,7 +19,7 @@ class ReindexTest(BitcoinTestFramework):
self.num_nodes = 1
def reindex(self, justchainstate=False):
self.nodes[0].generatetoaddress(3, self.nodes[0].get_deterministic_priv_key().address)
self.generatetoaddress(self.nodes[0], 3, self.nodes[0].get_deterministic_priv_key().address)
blockcount = self.nodes[0].getblockcount()
self.stop_nodes()
extra_args = [["-reindex-chainstate" if justchainstate else "-reindex"]]

View File

@ -53,7 +53,7 @@ class SpentIndexTest(BitcoinTestFramework):
self.sync_all()
self.log.info("Mining blocks...")
self.nodes[0].generate(105)
self.generate(self.nodes[0], 105)
self.sync_all()
chain_height = self.nodes[1].getblockcount()
@ -76,7 +76,7 @@ class SpentIndexTest(BitcoinTestFramework):
signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex())
txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
self.log.info("Testing getspentinfo method...")
@ -120,7 +120,7 @@ class SpentIndexTest(BitcoinTestFramework):
assert_equal(txVerbose3["vin"][0]["valueSat"], amount)
# Check the database index
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
txVerbose4 = self.nodes[3].getrawtransaction(txid2, 1)

View File

@ -53,7 +53,7 @@ class SporkTest(BitcoinTestFramework):
assert self.get_test_spork_state(self.nodes[1]) == spork_new_state
# Generate one block to kick off masternode sync, which also starts sporks syncing for node2
self.nodes[1].generate(1)
self.generate(self.nodes[1], 1)
# connect new node and check spork propagation after restoring from cache
self.connect_nodes(1, 2)

View File

@ -46,7 +46,7 @@ class TimestampIndexTest(BitcoinTestFramework):
self.sync_all()
self.log.info("Mining 5 blocks...")
blockhashes = self.nodes[0].generate(5)
blockhashes = self.generate(self.nodes[0], 5)
low = self.nodes[0].getblock(blockhashes[0])["time"]
high = self.nodes[0].getblock(blockhashes[4])["time"]
self.sync_all()

View File

@ -38,7 +38,7 @@ class TxIndexTest(BitcoinTestFramework):
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(105)
self.generate(self.nodes[0], 105)
self.sync_all()
chain_height = self.nodes[1].getblockcount()
@ -58,7 +58,7 @@ class TxIndexTest(BitcoinTestFramework):
signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex())
txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
# Check verbose raw transaction results

View File

@ -31,13 +31,13 @@ class UTXOSetHashTest(BitcoinTestFramework):
# Generate 100 blocks and remove the first since we plan to spend its
# coinbase
block_hashes = wallet.generate(1) + node.generate(99)
block_hashes = self.generate(wallet, 1) + self.generate(node, 99)
blocks = list(map(lambda block: from_hex(CBlock(), node.getblock(block, False)), block_hashes))
blocks.pop(0)
# Create a spending transaction and mine a block which includes it
txid = wallet.send_self_transfer(from_node=node)['txid']
tx_block = node.generateblock(output=wallet.get_address(), transactions=[txid])['hash']
tx_block = self.generateblock(node, output=wallet.get_address(), transactions=[txid])['hash']
blocks.append(from_hex(CBlock(), node.getblock(tx_block, False)))
# Serialize the outputs that should be in the UTXO set and add them to

View File

@ -65,12 +65,12 @@ class VersionBitsWarningTest(BitcoinTestFramework):
node_deterministic_address = node.get_deterministic_priv_key().address
# Mine one period worth of blocks
node.generatetoaddress(VB_PERIOD, node_deterministic_address)
self.generatetoaddress(node, VB_PERIOD, node_deterministic_address)
self.log.info("Check that there is no warning if previous VB_BLOCKS have <VB_THRESHOLD blocks with unknown versionbits version.")
# Build one period of blocks with < VB_THRESHOLD blocks signaling some unknown bit
self.send_blocks_with_version(peer, VB_THRESHOLD - 1, VB_UNKNOWN_VERSION)
node.generatetoaddress(VB_PERIOD - VB_THRESHOLD + 1, node_deterministic_address)
self.generatetoaddress(node, VB_PERIOD - VB_THRESHOLD + 1, node_deterministic_address)
# Check that we're not getting any versionbit-related errors in get*info()
assert not VB_PATTERN.match(node.getmininginfo()["warnings"])
@ -78,21 +78,21 @@ class VersionBitsWarningTest(BitcoinTestFramework):
# Build one period of blocks with VB_THRESHOLD blocks signaling some unknown bit
self.send_blocks_with_version(peer, VB_THRESHOLD, VB_UNKNOWN_VERSION)
node.generatetoaddress(VB_PERIOD - VB_THRESHOLD, node_deterministic_address)
self.generatetoaddress(node, VB_PERIOD - VB_THRESHOLD, node_deterministic_address)
self.log.info("Check that there is a warning if previous VB_BLOCKS have >=VB_THRESHOLD blocks with unknown versionbits version.")
# Mine a period worth of expected blocks so the generic block-version warning
# is cleared. This will move the versionbit state to ACTIVE.
node.generatetoaddress(VB_PERIOD, node_deterministic_address)
self.generatetoaddress(node, VB_PERIOD, node_deterministic_address)
# Stop-start the node. This is required because dashd will only warn once about unknown versions or unknown rules activating.
self.restart_node(0)
# Generating one block guarantees that we'll get out of IBD
node.generatetoaddress(1, node_deterministic_address)
self.generatetoaddress(node, 1, node_deterministic_address)
self.wait_until(lambda: not node.getblockchaininfo()['initialblockdownload'])
# Generating one more block will be enough to generate an error.
node.generatetoaddress(1, node_deterministic_address)
self.generatetoaddress(node, 1, node_deterministic_address)
# Check that get*info() shows the versionbits unknown rules warning
assert WARN_UNKNOWN_RULES_ACTIVE in node.getmininginfo()["warnings"]
assert WARN_UNKNOWN_RULES_ACTIVE in node.getnetworkinfo()["warnings"]

View File

@ -77,7 +77,7 @@ class TestBitcoinCli(BitcoinTestFramework):
def run_test(self):
"""Main test logic"""
self.nodes[0].generate(BLOCKS)
self.generate(self.nodes[0], BLOCKS)
self.log.info("Compare responses from getblockchaininfo RPC and `dash-cli getblockchaininfo`")
cli_response = self.nodes[0].cli.getblockchaininfo()
@ -175,7 +175,7 @@ class TestBitcoinCli(BitcoinTestFramework):
w1.sendtoaddress(w3.getnewaddress(), amounts[2])
# Mine a block to confirm; adds a block reward (500 DASH) to the default wallet.
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.log.info("Test -getinfo with multiple wallets and -rpcwallet returns specified wallet balance")
for i in range(len(wallets)):
@ -308,7 +308,7 @@ class TestBitcoinCli(BitcoinTestFramework):
assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 1, 2, 3).echo)
else:
self.log.info("*** Wallet not compiled; cli getwalletinfo and -getinfo wallet tests skipped")
self.nodes[0].generate(25) # maintain block parity with the wallet_compiled conditional branch
self.generate(self.nodes[0], 25) # maintain block parity with the wallet_compiled conditional branch
self.log.info("Test -version with node stopped")
self.stop_node(0)

View File

@ -84,9 +84,9 @@ class RESTTest (BitcoinTestFramework):
# Random address so node1's balance doesn't increase
not_related_address = "yj949n1UH6fDhw6HtVE5VMj2iSTaSWBMcW"
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
self.nodes[1].generatetoaddress(100, not_related_address)
self.generatetoaddress(self.nodes[1], 100, not_related_address)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), 500)
@ -117,7 +117,7 @@ class RESTTest (BitcoinTestFramework):
self.log.info("Query an unspent TXO using the /getutxos URI")
self.nodes[1].generatetoaddress(1, not_related_address)
self.generatetoaddress(self.nodes[1], 1, not_related_address)
self.sync_all()
bb_hash = self.nodes[0].getbestblockhash()
@ -192,7 +192,7 @@ class RESTTest (BitcoinTestFramework):
json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spent))
assert_equal(len(json_obj['utxos']), 0)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending))
@ -213,7 +213,7 @@ class RESTTest (BitcoinTestFramework):
long_uri = '/'.join(['{}-{}'.format(txid, n) for n in range(15)])
self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=200)
self.nodes[0].generate(1) # generate block to not affect upcoming tests
self.generate(self.nodes[0], 1) # generate block to not affect upcoming tests
self.sync_all()
self.log.info("Test the /block, /blockhashbyheight and /headers URIs")
@ -284,7 +284,7 @@ class RESTTest (BitcoinTestFramework):
assert_equal(json_obj[0][key], rpc_block_json[key])
# See if we can get 5 headers in one response
self.nodes[1].generate(5)
self.generate(self.nodes[1], 5)
self.sync_all()
json_obj = self.test_rest_request("/headers/5/{}".format(bb_hash))
assert_equal(len(json_obj), 5) # now we should have 5 header objects
@ -319,7 +319,7 @@ class RESTTest (BitcoinTestFramework):
assert_equal(json_obj[tx]['depends'], txs[i - 1:i])
# Now mine the transactions
newblockhash = self.nodes[1].generate(1)
newblockhash = self.generate(self.nodes[1], 1)
self.sync_all()
# Check if the 3 tx show up in the new block

View File

@ -145,7 +145,7 @@ class UTXOCacheTracepointTest(BitcoinTestFramework):
def run_test(self):
self.wallet = MiniWallet(self.nodes[0])
self.wallet.generate(101)
self.generate(self.wallet, 101)
self.test_uncache()
self.test_add_spent()
@ -235,7 +235,7 @@ class UTXOCacheTracepointTest(BitcoinTestFramework):
# mining, we invalidate the block, start the tracing, and then trace the cache
# changes to the active utxo cache.
self.log.info("mine and invalidate a block that is later reconsidered")
block_hash = self.wallet.generate(1)[0]
block_hash = self.generate(self.wallet, 1)[0]
self.nodes[0].invalidateblock(block_hash)
self.log.info(
@ -385,7 +385,7 @@ class UTXOCacheTracepointTest(BitcoinTestFramework):
BLOCKS_TO_MINE = 450
self.log.info(f"mine {BLOCKS_TO_MINE} blocks to be able to prune")
self.wallet.generate(BLOCKS_TO_MINE)
self.generate(self.wallet, BLOCKS_TO_MINE)
# we added BLOCKS_TO_MINE coinbase UTXOs to the cache
possible_cache_sizes = {BLOCKS_TO_MINE}
expected_flushes.append(

View File

@ -120,7 +120,7 @@ class ValidationTracepointTest(BitcoinTestFramework):
handle_blockconnected)
self.log.info(f"mine {BLOCKS_EXPECTED} blocks")
block_hashes = self.nodes[0].generatetoaddress(
block_hashes = self.generatetoaddress(self.nodes[0],
BLOCKS_EXPECTED, ADDRESS_BCRT1_UNSPENDABLE)
for block_hash in block_hashes:
expected_blocks[block_hash] = self.nodes[0].getblock(block_hash, 2)

View File

@ -188,7 +188,7 @@ class ZMQTest (BitcoinTestFramework):
num_blocks = 5
self.log.info("Generate %(n)d blocks (and %(n)d coinbase txes)" % {"n": num_blocks})
genhashes = self.nodes[0].generatetoaddress(num_blocks, ADDRESS_BCRT1_UNSPENDABLE)
genhashes = self.generatetoaddress(self.nodes[0], num_blocks, ADDRESS_BCRT1_UNSPENDABLE)
self.sync_all()
@ -229,7 +229,7 @@ class ZMQTest (BitcoinTestFramework):
# Mining the block with this tx should result in second notification
# after coinbase tx notification
self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)
hashtx.receive()
txid = hashtx.receive()
assert_equal(payment_txid, txid.hex())
@ -261,14 +261,14 @@ class ZMQTest (BitcoinTestFramework):
# Generate 1 block in nodes[0] with 1 mempool tx and receive all notifications
payment_txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
disconnect_block = self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)[0]
disconnect_block = self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)[0]
disconnect_cb = self.nodes[0].getblock(disconnect_block)["tx"][0]
assert_equal(self.nodes[0].getbestblockhash(), hashblock.receive().hex())
assert_equal(hashtx.receive().hex(), payment_txid)
assert_equal(hashtx.receive().hex(), disconnect_cb)
# Generate 2 blocks in nodes[1] to a different address to ensure split
connect_blocks = self.nodes[1].generatetoaddress(2, ADDRESS_BCRT1_P2SH_OP_TRUE)
connect_blocks = self.generatetoaddress(self.nodes[1], 2, ADDRESS_BCRT1_P2SH_OP_TRUE)
# nodes[0] will reorg chain after connecting back nodes[1]
self.connect_nodes(0, 1)
@ -312,13 +312,13 @@ class ZMQTest (BitcoinTestFramework):
seq_num = 1
# Generate 1 block in nodes[0] and receive all notifications
dc_block = self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)[0]
dc_block = self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)[0]
# Note: We are not notified of any block transactions, coinbase or mined
assert_equal((self.nodes[0].getbestblockhash(), "C", None), seq.receive_sequence())
# Generate 2 blocks in nodes[1] to a different address to ensure a chain split
self.nodes[1].generatetoaddress(2, ADDRESS_BCRT1_P2SH_OP_TRUE)
self.generatetoaddress(self.nodes[1], 2, ADDRESS_BCRT1_P2SH_OP_TRUE)
# nodes[0] will reorg chain after connecting back nodes[1]
self.connect_nodes(0, 1)
@ -344,7 +344,7 @@ class ZMQTest (BitcoinTestFramework):
# though the mempool sequence number does go up by the number of transactions
# removed from the mempool by the block mining it.
mempool_size = len(self.nodes[0].getrawmempool())
c_block = self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)[0]
c_block = self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)[0]
self.sync_all()
# Make sure the number of mined transactions matches the number of txs out of mempool
mempool_size_delta = mempool_size - len(self.nodes[0].getrawmempool())
@ -384,7 +384,7 @@ class ZMQTest (BitcoinTestFramework):
# Other things may happen but aren't wallet-deterministic so we don't test for them currently
self.nodes[0].reconsiderblock(best_hash)
self.nodes[1].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
self.generatetoaddress(self.nodes[1], 1, ADDRESS_BCRT1_UNSPENDABLE)
self.sync_all()
self.log.info("Evict mempool transaction by block conflict")
@ -429,7 +429,7 @@ class ZMQTest (BitcoinTestFramework):
# Last tx
assert_equal((orig_txid_2, "A", mempool_seq), seq.receive_sequence())
mempool_seq += 1
self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)
self.sync_all() # want to make sure we didn't break "consensus" for other tests
def test_mempool_sync(self):
@ -479,7 +479,7 @@ class ZMQTest (BitcoinTestFramework):
for _ in range(num_txs):
txids.append(self.nodes[0].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=0.1))
self.sync_all()
self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)
final_txid = self.nodes[0].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=0.1)
# 3) Consume ZMQ backlog until we get to "now" for the mempool snapshot
@ -535,7 +535,7 @@ class ZMQTest (BitcoinTestFramework):
# 5) If you miss a zmq/mempool sequence number, go back to step (2)
self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)
def test_multiple_interfaces(self):
# Set up two subscribers with different addresses
@ -548,7 +548,7 @@ class ZMQTest (BitcoinTestFramework):
], sync_blocks=False)
# Generate 1 block in nodes[0] and receive all notifications
self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)
# Should receive the same block hash on both subscribers
assert_equal(self.nodes[0].getbestblockhash(), subscribers[0].receive().hex())
@ -565,7 +565,7 @@ class ZMQTest (BitcoinTestFramework):
], ipv6=True)
# Generate 1 block in nodes[0]
self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)
# Should receive the same block hash
assert_equal(self.nodes[0].getbestblockhash(), subscribers[0].receive().hex())

View File

@ -170,7 +170,7 @@ class DashZMQTest (DashTestFramework):
def generate_blocks(self, num_blocks):
mninfos_online = self.mninfo.copy()
nodes = [self.nodes[0]] + [mn.node for mn in mninfos_online]
self.nodes[0].generate(num_blocks)
self.generate(self.nodes[0], num_blocks)
self.sync_blocks(nodes)
def subscribe(self, publishers):
@ -214,7 +214,7 @@ class DashZMQTest (DashTestFramework):
# Subscribe to recovered signature messages
self.subscribe(recovered_sig_publishers)
# Generate a ChainLock and make sure this leads to valid recovered sig ZMQ messages
rpc_last_block_hash = self.nodes[0].generate(1)[0]
rpc_last_block_hash = self.generate(self.nodes[0], 1)[0]
self.wait_for_chainlocked_block_all_nodes(rpc_last_block_hash)
height = self.nodes[0].getblockcount()
rpc_request_id = hash256(ser_string(b"clsig") + struct.pack("<I", height))[::-1].hex()
@ -238,7 +238,7 @@ class DashZMQTest (DashTestFramework):
# Subscribe to ChainLock messages
self.subscribe(chain_lock_publishers)
# Generate ChainLock
generated_hash = self.nodes[0].generate(1)[0]
generated_hash = self.generate(self.nodes[0], 1)[0]
self.wait_for_chainlocked_block_all_nodes(generated_hash)
rpc_best_chain_lock = self.nodes[0].getbestchainlock()
rpc_best_chain_lock_hash = rpc_best_chain_lock["blockhash"]
@ -327,7 +327,7 @@ class DashZMQTest (DashTestFramework):
assert zmq_double_spend_tx_1.is_valid()
assert_equal(zmq_double_spend_tx_1.hash, rpc_raw_tx_1['txid'])
# No islock notifications when tx is not received yet
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
rpc_raw_tx_3 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)
isdlock = self.create_isdlock(rpc_raw_tx_3['hex'])
self.test_node.send_islock(isdlock)
@ -375,7 +375,7 @@ class DashZMQTest (DashTestFramework):
proposal_hex = ''.join(format(x, '02x') for x in json.dumps(proposal_data).encode())
collateral = self.nodes[0].gobject("prepare", "0", proposal_rev, proposal_time, proposal_hex)
self.wait_for_instantlock(collateral, self.nodes[0])
self.nodes[0].generate(6)
self.generate(self.nodes[0], 6)
self.sync_blocks()
rpc_proposal_hash = self.nodes[0].gobject("submit", "0", proposal_rev, proposal_time, proposal_hex, collateral)
# Validate hashgovernanceobject

View File

@ -76,7 +76,7 @@ class MempoolAcceptanceTest(BitcoinTestFramework):
outputs=[{node.getnewaddress(): 0.3}, {node.getnewaddress(): 49}],
))['hex']
txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block, maxfeerate=0)
node.generate(1)
self.generate(node, 1)
self.mempool_size = 0
self.check_mempool_result(
result_expected=[{'txid': txid_in_block, 'allowed': False, 'reject-reason': 'txn-already-known'}],
@ -171,7 +171,7 @@ class MempoolAcceptanceTest(BitcoinTestFramework):
outputs=[{node.getnewaddress(): 0.1}]
))['hex']
txid_spend_both = node.sendrawtransaction(hexstring=raw_tx_spend_both, maxfeerate=0)
node.generate(1)
self.generate(node, 1)
self.mempool_size = 0
# Now see if we can add the coins back to the utxo set by sending the exact txs again
self.check_mempool_result(

View File

@ -41,8 +41,8 @@ class MempoolCompatibilityTest(BitcoinTestFramework):
old_node, new_node = self.nodes
new_wallet = MiniWallet(new_node)
new_wallet.generate(1)
new_node.generate(100)
self.generate(new_wallet, 1)
self.generate(new_node, 100)
# Sync the nodes to ensure old_node has the block that contains the coinbase that new_wallet will spend.
# Otherwise, because coinbases are only valid in a block and not as loose txns, if the nodes aren't synced
# unbroadcasted_tx won't pass old_node's `MemPoolAccept::PreChecks`.

View File

@ -35,8 +35,8 @@ class MempoolExpiryTest(BitcoinTestFramework):
self.wallet = MiniWallet(node)
# Add enough mature utxos to the wallet so that all txs spend confirmed coins.
self.wallet.generate(4)
node.generate(100)
self.generate(self.wallet, 4)
self.generate(node, 100)
# Send a parent transaction that will expire.
parent_txid = self.wallet.send_self_transfer(from_node=node)['txid']

View File

@ -38,7 +38,7 @@ class MempoolPackageLimitsTest(BitcoinTestFramework):
self.address = node.get_deterministic_priv_key().address
self.coins = []
# The last 100 coinbase transactions are premature
for b in node.generatetoaddress(200, self.address)[:100]:
for b in self.generatetoaddress(node, 200, self.address)[:100]:
coinbase = node.getblock(blockhash=b, verbosity=2)["tx"][0]
self.coins.append({
"txid": coinbase["txid"],
@ -81,7 +81,7 @@ class MempoolPackageLimitsTest(BitcoinTestFramework):
assert_equal(txres["package-error"], "package-mempool-limits")
# Clear mempool and check that the package passes now
node.generate(1)
self.generate(node, 1)
assert all([res["allowed"] for res in node.testmempoolaccept(rawtxs=chain_hex)])
def test_chain_limits(self):
@ -171,7 +171,7 @@ class MempoolPackageLimitsTest(BitcoinTestFramework):
assert_equal(txres["package-error"], "package-mempool-limits")
# Clear mempool and check that the package passes now
node.generate(1)
self.generate(node, 1)
assert all([res["allowed"] for res in node.testmempoolaccept(rawtxs=package_hex)])
def test_anc_count_limits(self):
@ -227,7 +227,7 @@ class MempoolPackageLimitsTest(BitcoinTestFramework):
assert_equal(txres["package-error"], "package-mempool-limits")
# Clear mempool and check that the package passes now
node.generate(1)
self.generate(node, 1)
assert all([res["allowed"] for res in node.testmempoolaccept(rawtxs=package_hex)])
def test_anc_count_limits_2(self):
@ -285,7 +285,7 @@ class MempoolPackageLimitsTest(BitcoinTestFramework):
assert_equal(txres["package-error"], "package-mempool-limits")
# Clear mempool and check that the package passes now
node.generate(1)
self.generate(node, 1)
assert all([res["allowed"] for res in node.testmempoolaccept(rawtxs=[pc_hex, pd_hex])])
def test_anc_count_limits_bushy(self):
@ -335,7 +335,7 @@ class MempoolPackageLimitsTest(BitcoinTestFramework):
assert_equal(txres["package-error"], "package-mempool-limits")
# Clear mempool and check that the package passes now
node.generate(1)
self.generate(node, 1)
assert all([res["allowed"] for res in node.testmempoolaccept(rawtxs=package_hex)])
def test_anc_size_limits(self):
@ -394,7 +394,7 @@ class MempoolPackageLimitsTest(BitcoinTestFramework):
assert_equal(txres["package-error"], "package-mempool-limits")
# Clear mempool and check that the package passes now
node.generate(1)
self.generate(node, 1)
assert all([res["allowed"] for res in node.testmempoolaccept(rawtxs=[pc_hex, pd_hex])])
def test_desc_size_limits(self):
@ -465,7 +465,7 @@ class MempoolPackageLimitsTest(BitcoinTestFramework):
assert_equal(txres["package-error"], "package-mempool-limits")
# Clear mempool and check that the package passes now
node.generate(1)
self.generate(node, 1)
assert all([res["allowed"] for res in node.testmempoolaccept(rawtxs=package_hex)])
if __name__ == "__main__":

View File

@ -30,7 +30,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
def run_test(self):
# Mine some blocks and have them mature.
self.nodes[0].generate(COINBASE_MATURITY + 1)
self.generate(self.nodes[0], COINBASE_MATURITY + 1)
utxo = self.nodes[0].listunspent(10)
txid = utxo[0]['txid']
vout = utxo[0]['vout']

View File

@ -45,7 +45,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
def run_test(self):
# Mine some blocks and have them mature.
peer_inv_store = self.nodes[0].add_p2p_connection(P2PTxInvStore()) # keep track of invs
self.nodes[0].generate(COINBASE_MATURITY + 1)
self.generate(self.nodes[0], COINBASE_MATURITY + 1)
utxo = self.nodes[0].listunspent(10)
txid = utxo[0]['txid']
vout = utxo[0]['vout']
@ -173,7 +173,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
# Check that prioritising a tx before it's added to the mempool works
# First clear the mempool by mining a block.
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_blocks()
assert_equal(len(self.nodes[0].getrawmempool()), 0)
# Prioritise a transaction that has been mined, then add it back to the
@ -264,7 +264,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
# Test reorg handling
# First, the basics:
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_blocks()
self.nodes[1].invalidateblock(self.nodes[0].getbestblockhash())
self.nodes[1].reconsiderblock(self.nodes[0].getbestblockhash())
@ -311,7 +311,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
value = sent_value
# Mine these in a block
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
# Now generate tx8, with a big fee

View File

@ -173,7 +173,7 @@ class MempoolPersistTest(BitcoinTestFramework):
self.start_node(0)
# clear out mempool
node0.generate(1)
self.generate(node0, 1)
# ensure node0 doesn't have any connections
# make a transaction that will remain in the unbroadcast set

View File

@ -65,7 +65,7 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
wallet.sendrawtransaction(from_node=self.nodes[0], tx_hex=spend_2['hex'])
wallet.sendrawtransaction(from_node=self.nodes[0], tx_hex=spend_3['hex'])
self.log.info("Generate a block")
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.log.info("Check that time-locked transaction is still too immature to spend")
assert_raises_rpc_error(-26, 'non-final', self.nodes[0].sendrawtransaction, timelock_tx)
@ -78,7 +78,7 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
self.log.info("Broadcast and mine spend_3_1")
spend_3_1_id = self.nodes[0].sendrawtransaction(spend_3_1['hex'])
self.log.info("Generate a block")
last_block = self.nodes[0].generate(1)
last_block = self.generate(self.nodes[0], 1)
# Sync blocks, so that peer 1 gets the block before timelock_tx
# Otherwise, peer 1 would put the timelock_tx in m_recent_rejects
self.sync_all()

View File

@ -19,8 +19,8 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
wallet = MiniWallet(node)
# Add enough mature utxos to the wallet so that all txs spend confirmed coins
wallet.generate(3)
node.generate(100)
self.generate(wallet, 3)
self.generate(node, 100)
# Spend block 1/2/3's coinbase transactions
# Mine a block
@ -33,9 +33,9 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
# ... make sure all the transactions are confirmed again
blocks = []
spends1_ids = [wallet.send_self_transfer(from_node=node)['txid'] for _ in range(3)]
blocks.extend(node.generate(1))
blocks.extend(self.generate(node, 1))
spends2_ids = [wallet.send_self_transfer(from_node=node)['txid'] for _ in range(3)]
blocks.extend(node.generate(1))
blocks.extend(self.generate(node, 1))
spends_ids = set(spends1_ids + spends2_ids)
@ -52,7 +52,7 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
assert_equal(set(node.getrawmempool()), spends_ids)
# Generate another block, they should all get mined
blocks = node.generate(1)
blocks = self.generate(node, 1)
# mempool should be empty, all txns confirmed
assert_equal(set(node.getrawmempool()), set())
confirmed_txns = set(node.getblock(blocks[0])['tx'])

View File

@ -49,7 +49,7 @@ class MempoolSpendCoinbaseTest(BitcoinTestFramework):
assert_equal(self.nodes[0].getrawmempool(), [spend_mature_id])
# mine a block, mature one should get confirmed
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
assert_equal(set(self.nodes[0].getrawmempool()), set())
# ... and now previously immature can be spent:

View File

@ -111,7 +111,7 @@ class MempoolUnbroadcastTest(BitcoinTestFramework):
# a block
removal_reason = "Removed {} from set of unbroadcast txns before confirmation that txn was sent out".format(txhsh)
with node.assert_debug_log([removal_reason]):
node.generate(1)
self.generate(node, 1)
if __name__ == "__main__":
MempoolUnbroadcastTest().main()

View File

@ -91,7 +91,7 @@ class MempoolUpdateFromBlockTest(BitcoinTestFramework):
if tx_count in n_tx_to_mine:
# The created transactions are mined into blocks by batches.
self.log.info('The batch of {} transactions has been accepted into the mempool.'.format(len(self.nodes[0].getrawmempool())))
block_hash = self.nodes[0].generate(1)[0]
block_hash = self.generate(self.nodes[0], 1)[0]
if not first_block_hash:
first_block_hash = block_hash
assert_equal(len(self.nodes[0].getrawmempool()), 0)

View File

@ -52,7 +52,7 @@ class MiningTest(BitcoinTestFramework):
self.log.info('Create some old blocks')
for t in range(TIME_GENESIS_BLOCK, TIME_GENESIS_BLOCK + 200 * 156, 156):
self.bump_mocktime(156)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
mining_info = self.nodes[0].getmininginfo()
assert_equal(mining_info['blocks'], 200)
assert_equal(mining_info['currentblocktx'], 0)
@ -90,7 +90,7 @@ class MiningTest(BitcoinTestFramework):
assert_equal(mining_info['pooledtx'], 0)
# Mine a block to leave initial block download
node.generatetoaddress(1, node.get_deterministic_priv_key().address)
self.generatetoaddress(node, 1, node.get_deterministic_priv_key().address)
tmpl = node.getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)
self.log.info("getblocktemplate: Test capability advertised")
assert 'proposal' in tmpl['capabilities']
@ -245,7 +245,7 @@ class MiningTest(BitcoinTestFramework):
assert chain_tip(block.hash, status='active', branchlen=0) in filter_tip_keys(node.getchaintips())
# Building a few blocks should give the same results
node.generatetoaddress(10, node.get_deterministic_priv_key().address)
self.generatetoaddress(node, 10, node.get_deterministic_priv_key().address)
assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader(hexdata=CBlockHeader(bad_block_time).serialize().hex()))
assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader(hexdata=CBlockHeader(bad_block2).serialize().hex()))
node.submitheader(hexdata=CBlockHeader(block).serialize().hex())

View File

@ -34,7 +34,7 @@ class GetBlockTemplateLPTest(BitcoinTestFramework):
def run_test(self):
self.log.info("Warning: this test will take about 70 seconds in the best case. Be patient.")
self.log.info("Test that longpollid doesn't change between successive getblocktemplate() invocations if nothing else happens")
self.nodes[0].generate(10)
self.generate(self.nodes[0], 10)
template = self.nodes[0].getblocktemplate()
longpollid = template['longpollid']
template2 = self.nodes[0].getblocktemplate()
@ -62,7 +62,7 @@ class GetBlockTemplateLPTest(BitcoinTestFramework):
assert not thr.is_alive()
# Add enough mature utxos to the wallets, so that all txs spend confirmed coins
self.nodes[0].generate(100)
self.generate(self.nodes[0], 100)
self.sync_blocks()
self.log.info("Test that introducing a new transaction into the mempool will terminate the longpoll")

View File

@ -69,7 +69,7 @@ class PrioritiseTransactionTest(BitcoinTestFramework):
# also check that a different entry in the cheapest bucket is NOT mined
self.nodes[0].prioritisetransaction(txids[0][0], int(3*base_fee*COIN))
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
mempool = self.nodes[0].getrawmempool()
self.log.info("Assert that prioritised transaction was mined")
@ -99,7 +99,7 @@ class PrioritiseTransactionTest(BitcoinTestFramework):
# the other high fee transactions. Keep mining until our mempool has
# decreased by all the high fee size that we calculated above.
while (self.nodes[0].getmempoolinfo()['bytes'] > sizes[0] + sizes[1]):
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
# High fee transaction should not have been mined, but other high fee rate
# transactions should have been.

View File

@ -29,7 +29,7 @@ class BlockSyncTest(BitcoinTestFramework):
def run_test(self):
self.log.info("Setup network: node0->node1->node2")
self.log.info("Mining one block on node0 and verify all nodes sync")
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.log.info("Success!")

View File

@ -56,17 +56,17 @@ class CompactFiltersTest(BitcoinTestFramework):
peer_1 = self.nodes[1].add_p2p_connection(FiltersClient())
# Nodes 0 & 1 share the same first 999 blocks in the chain.
self.nodes[0].generate(999)
self.generate(self.nodes[0], 999)
self.sync_blocks(timeout=600)
# Stale blocks by disconnecting nodes 0 & 1, mining, then reconnecting
self.disconnect_nodes(0, 1)
stale_block_hash = self.nodes[0].generate(1)[0]
stale_block_hash = self.generate(self.nodes[0], 1)[0]
self.nodes[0].syncwithvalidationinterfacequeue()
assert_equal(self.nodes[0].getblockcount(), 1000)
self.nodes[1].generate(1001)
self.generate(self.nodes[1], 1001)
assert_equal(self.nodes[1].getblockcount(), 2000)
# Check that nodes have signalled NODE_COMPACT_FILTERS correctly.

View File

@ -78,7 +78,7 @@ class P2PBlocksOnly(BitcoinTestFramework):
self.log.info("Relay-permission peer's transaction is accepted and relayed")
self.nodes[0].disconnect_p2ps()
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
def blocks_relay_conn_tests(self):
self.log.info('Tests with node in normal mode with block-relay-only connections')

View File

@ -155,7 +155,7 @@ class CompactBlocksTest(BitcoinTestFramework):
block = self.build_block_on_tip(self.nodes[0])
self.test_node.send_and_ping(msg_block(block))
assert int(self.nodes[0].getbestblockhash(), 16) == block.sha256
self.nodes[0].generatetoaddress(COINBASE_MATURITY, self.nodes[0].getnewaddress())
self.generatetoaddress(self.nodes[0], COINBASE_MATURITY, self.nodes[0].getnewaddress())
total_value = block.vtx[0].vout[0].nValue
out_value = total_value // 10
@ -198,7 +198,7 @@ class CompactBlocksTest(BitcoinTestFramework):
def check_announcement_of_new_block(node, peer, predicate):
peer.clear_block_announcement()
block_hash = int(node.generate(1)[0], 16)
block_hash = int(self.generate(node, 1)[0], 16)
peer.wait_for_block_announcement(block_hash, timeout=30)
assert peer.block_announced
@ -261,7 +261,7 @@ class CompactBlocksTest(BitcoinTestFramework):
# This test actually causes dashd to (reasonably!) disconnect us, so do this last.
def test_invalid_cmpctblock_message(self):
self.nodes[0].generate(COINBASE_MATURITY + 1)
self.generate(self.nodes[0], COINBASE_MATURITY + 1)
block = self.build_block_on_tip(self.nodes[0])
cmpct_block = P2PHeaderAndShortIDs()
@ -278,7 +278,7 @@ class CompactBlocksTest(BitcoinTestFramework):
def test_compactblock_construction(self, test_node):
node = self.nodes[0]
# Generate a bunch of transactions.
node.generate(COINBASE_MATURITY + 1)
self.generate(node, COINBASE_MATURITY + 1)
num_transactions = 25
address = node.getnewaddress()
@ -296,7 +296,7 @@ class CompactBlocksTest(BitcoinTestFramework):
# Now mine a block, and look at the resulting compact block.
test_node.clear_block_announcement()
block_hash = int(node.generate(1)[0], 16)
block_hash = int(self.generate(node, 1)[0], 16)
# Store the raw block in our internal format.
block = from_hex(CBlock(), node.getblock("%064x" % block_hash, False))
@ -613,7 +613,7 @@ class CompactBlocksTest(BitcoinTestFramework):
new_blocks = []
for _ in range(MAX_CMPCTBLOCK_DEPTH + 1):
test_node.clear_block_announcement()
new_blocks.append(node.generate(1)[0])
new_blocks.append(self.generate(node, 1)[0])
test_node.wait_until(test_node.received_block_announcement, timeout=30)
test_node.clear_block_announcement()
@ -621,7 +621,7 @@ class CompactBlocksTest(BitcoinTestFramework):
test_node.wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30)
test_node.clear_block_announcement()
node.generate(1)
self.generate(node, 1)
test_node.wait_until(test_node.received_block_announcement, timeout=30)
test_node.clear_block_announcement()
with p2p_lock:
@ -789,7 +789,7 @@ class CompactBlocksTest(BitcoinTestFramework):
def run_test(self):
# Get the nodes out of IBD
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
# Setup the p2p connections
self.test_node = self.nodes[0].add_p2p_connection(TestP2PConn())

View File

@ -32,7 +32,7 @@ class P2PCompactBlocksBlocksOnly(BitcoinTestFramework):
self.sync_all()
def build_block_on_tip(self):
blockhash = self.nodes[2].generate(1)[0]
blockhash = self.generate(self.nodes[2], 1)[0]
block_hex = self.nodes[2].getblock(blockhash=blockhash, verbosity=0)
block = from_hex(CBlock(), block_hex)
block.rehash()

View File

@ -30,7 +30,7 @@ class CompactBlocksConnectionTest(BitcoinTestFramework):
def relay_block_through(self, peer):
"""Relay a new block through peer peer, and return HB status between 1 and [2,3,4,5]."""
self.connect_nodes(peer, 0)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_blocks()
self.disconnect_nodes(peer, 0)
status_to = [self.peer_info(1, i)['bip152_hb_to'] for i in range(2, 6)]
@ -44,7 +44,7 @@ class CompactBlocksConnectionTest(BitcoinTestFramework):
# Connect everyone to node 0, and mine some blocks to get all nodes out of IBD.
for i in range(1, 6):
self.connect_nodes(i, 0)
self.nodes[0].generate(2)
self.generate(self.nodes[0], 2)
self.sync_blocks()
for i in range(1, 6):
self.disconnect_nodes(i, 0)

View File

@ -53,7 +53,7 @@ class P2PEvict(BitcoinTestFramework):
protected_peers = set() # peers that we expect to be protected from eviction
current_peer = -1
node = self.nodes[0]
node.generatetoaddress(COINBASE_MATURITY + 1, node.get_deterministic_priv_key().address)
self.generatetoaddress(node, COINBASE_MATURITY + 1, node.get_deterministic_priv_key().address)
self.log.info("Create 4 peers and protect them from eviction by sending us a block")
for _ in range(4):

View File

@ -149,7 +149,7 @@ class FilterTest(BitcoinTestFramework):
assert not filter_peer.tx_received
# Clear the mempool so that this transaction does not impact subsequent tests
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
def test_filter(self, filter_peer):
# Set the bloomfilter using filterload
@ -159,14 +159,14 @@ class FilterTest(BitcoinTestFramework):
filter_address = self.nodes[0].decodescript(filter_peer.watch_script_pubkey)['address']
self.log.info('Check that we receive merkleblock and tx if the filter matches a tx in a block')
block_hash = self.nodes[0].generatetoaddress(1, filter_address)[0]
block_hash = self.generatetoaddress(self.nodes[0], 1, filter_address)[0]
txid = self.nodes[0].getblock(block_hash)['tx'][0]
filter_peer.wait_for_merkleblock(block_hash)
filter_peer.wait_for_tx(txid)
self.log.info('Check that we only receive a merkleblock if the filter does not match a tx in a block')
filter_peer.tx_received = False
block_hash = self.nodes[0].generatetoaddress(1, self.nodes[0].getnewaddress())[0]
block_hash = self.generatetoaddress(self.nodes[0], 1, self.nodes[0].getnewaddress())[0]
filter_peer.wait_for_merkleblock(block_hash)
assert not filter_peer.tx_received
@ -194,7 +194,7 @@ class FilterTest(BitcoinTestFramework):
filter_peer.merkleblock_received = False
filter_peer.tx_received = False
with self.nodes[0].assert_debug_log(expected_msgs=['received getdata']):
block_hash = self.nodes[0].generatetoaddress(1, self.nodes[0].getnewaddress())[0]
block_hash = self.generatetoaddress(self.nodes[0], 1, self.nodes[0].getnewaddress())[0]
filter_peer.wait_for_inv([CInv(MSG_BLOCK, int(block_hash, 16))])
filter_peer.sync_with_ping()
assert not filter_peer.merkleblock_received

View File

@ -69,7 +69,7 @@ class P2PFingerprintTest(BitcoinTestFramework):
self.nodes[0].setmocktime(int(time.time()) - 60 * 24 * 60 * 60)
# Generating a chain of 10 blocks
block_hashes = self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address)
block_hashes = self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address)
# Create longer chain starting 2 blocks before current tip
height = len(block_hashes) - 2
@ -98,7 +98,7 @@ class P2PFingerprintTest(BitcoinTestFramework):
# Longest chain is extended so stale is much older than chain tip
self.nodes[0].setmocktime(0)
block_hash = int(self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)[-1], 16)
block_hash = int(self.generatetoaddress(self.nodes[0], 1, self.nodes[0].get_deterministic_priv_key().address)[-1], 16)
assert_equal(self.nodes[0].getblockcount(), 14)
node0.wait_for_block(block_hash, timeout=3)

View File

@ -66,7 +66,7 @@ class P2PIBDTxRelayTest(BitcoinTestFramework):
self.nodes[0].disconnect_p2ps()
# Come out of IBD by generating a block
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
self.log.info("Check that nodes process the same transaction, even when unsolicited, when no longer in IBD")

View File

@ -47,7 +47,7 @@ class InstantSendTest(DashTestFramework):
for node in self.nodes:
self.wait_for_instantlock(is_id, node)
self.bump_mocktime(1)
self.nodes[0].generate(2)
self.generate(self.nodes[0], 2)
self.sync_all()
# create doublespending transaction, but don't relay it
@ -92,7 +92,7 @@ class InstantSendTest(DashTestFramework):
self.bump_mocktime(1)
# make sure the above TX is on node0
self.sync_mempools([n for n in self.nodes if n is not isolated])
self.nodes[0].generate(2)
self.generate(self.nodes[0], 2)
self.sync_all()
def test_mempool_doublespend(self):
@ -108,7 +108,7 @@ class InstantSendTest(DashTestFramework):
for node in self.nodes:
self.wait_for_instantlock(is_id, node)
self.bump_mocktime(1)
self.nodes[0].generate(2)
self.generate(self.nodes[0], 2)
self.sync_all()
# create doublespending transaction, but don't relay it
@ -141,7 +141,7 @@ class InstantSendTest(DashTestFramework):
assert_equal(receiver.getwalletinfo()["balance"], 0)
# mine more blocks
self.bump_mocktime(1)
self.nodes[0].generate(2)
self.generate(self.nodes[0], 2)
self.sync_all()
if __name__ == '__main__':

View File

@ -51,7 +51,7 @@ class InvalidBlockRequestTest(BitcoinTestFramework):
peer.send_blocks_and_test([block1], node, success=True)
self.log.info("Mature the block.")
node.generatetoaddress(100, node.get_deterministic_priv_key().address)
self.generatetoaddress(node, 100, node.get_deterministic_priv_key().address)
best_block = node.getblock(node.getbestblockhash())
tip = int(node.getbestblockhash(), 16)

View File

@ -16,7 +16,7 @@ class InvalidLocatorTest(BitcoinTestFramework):
def run_test(self):
node = self.nodes[0] # convenience reference to the node
node.generatetoaddress(1, node.get_deterministic_priv_key().address) # Get node out of IBD
self.generatetoaddress(node, 1, node.get_deterministic_priv_key().address) # Get node out of IBD
self.log.info('Test max locator size')
block_count = node.getblockcount()

View File

@ -76,7 +76,7 @@ class InvalidTxRequestTest(BitcoinTestFramework):
node.p2ps[0].send_blocks_and_test([block1, block2], node, success=True)
self.log.info("Mature the block.")
self.nodes[0].generatetoaddress(100, self.nodes[0].get_deterministic_priv_key().address)
self.generatetoaddress(self.nodes[0], 100, self.nodes[0].get_deterministic_priv_key().address)
# Iterate through a list of known invalid transaction types, ensuring each is
# rejected. Some are consensus invalid and some just violate policy.

View File

@ -111,7 +111,7 @@ class P2PLeakTest(BitcoinTestFramework):
no_verack_idle_peer.wait_until(lambda: no_verack_idle_peer.version_received)
# Mine a block and make sure that it's not sent to the connected peers
self.nodes[0].generate(nblocks=1)
self.generate(self.nodes[0], nblocks=1)
#Give the node enough time to possibly leak out a message
time.sleep(5)

View File

@ -26,8 +26,8 @@ class P2PLeakTxTest(BitcoinTestFramework):
gen_node = self.nodes[0] # The block and tx generating node
miniwallet = MiniWallet(gen_node)
# Add enough mature utxos to the wallet, so that all txs spend confirmed coins
miniwallet.generate(1)
gen_node.generate(100)
self.generate(miniwallet, 1)
self.generate(gen_node, 100)
inbound_peer = self.nodes[0].add_p2p_connection(P2PNode()) # An "attacking" inbound peer

View File

@ -57,7 +57,7 @@ class NodeNetworkLimitedTest(BitcoinTestFramework):
self.log.info("Mine enough blocks to reach the NODE_NETWORK_LIMITED range.")
self.connect_nodes(0, 1)
blocks = self.nodes[1].generatetoaddress(292, self.nodes[1].get_deterministic_priv_key().address)
blocks = self.generatetoaddress(self.nodes[1], 292, self.nodes[1].get_deterministic_priv_key().address)
self.sync_blocks([self.nodes[0], self.nodes[1]])
self.log.info("Make sure we can max retrieve block at tip-288.")
@ -89,7 +89,7 @@ class NodeNetworkLimitedTest(BitcoinTestFramework):
self.disconnect_all()
# mine 10 blocks on node 0 (pruned node)
self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address)
self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address)
# connect node1 (non pruned) with node0 (pruned) and check if the can sync
self.connect_nodes(0, 1)

View File

@ -115,7 +115,7 @@ class P2PPermissionsTests(BitcoinTestFramework):
self.nodes[1].assert_start_raises_init_error(["-whitebind=noban@127.0.0.1", "-bind=127.0.0.1", "-listen=0"], "Cannot set -bind or -whitebind together with -listen=0", match=ErrorMatch.PARTIAL_REGEX)
def check_tx_relay(self):
block_op_true = self.nodes[0].getblock(self.nodes[0].generatetoaddress(100, ADDRESS_BCRT1_P2SH_OP_TRUE)[0])
block_op_true = self.nodes[0].getblock(self.generatetoaddress(self.nodes[0], 100, ADDRESS_BCRT1_P2SH_OP_TRUE)[0])
self.sync_all()
self.log.debug("Create a connection from a forcerelay peer that rebroadcasts raw txs")

View File

@ -135,7 +135,7 @@ class QuorumDataMessagesTest(DashTestFramework):
self.bump_mocktime(bump_seconds)
# Test with/without expired request cleanup
if self.cleanup:
node0.generate(1)
self.generate(node0, 1)
self.sync_blocks()
def test_basics():

View File

@ -204,7 +204,7 @@ class SendHeadersTest(BitcoinTestFramework):
# Clear out block announcements from each p2p listener
[x.clear_block_announcements() for x in self.nodes[0].p2ps]
self.nodes[0].generatetoaddress(count, self.nodes[0].get_deterministic_priv_key().address)
self.generatetoaddress(self.nodes[0], count, self.nodes[0].get_deterministic_priv_key().address)
return int(self.nodes[0].getbestblockhash(), 16)
def mine_reorg(self, length):
@ -215,7 +215,7 @@ class SendHeadersTest(BitcoinTestFramework):
return the list of block hashes newly mined."""
# make sure all invalidated blocks are node0's
self.nodes[0].generatetoaddress(length, self.nodes[0].get_deterministic_priv_key().address)
self.generatetoaddress(self.nodes[0], length, self.nodes[0].get_deterministic_priv_key().address)
self.sync_blocks(self.nodes, wait=0.1)
for x in self.nodes[0].p2ps:
x.wait_for_block_announcement(int(self.nodes[0].getbestblockhash(), 16))
@ -224,7 +224,7 @@ class SendHeadersTest(BitcoinTestFramework):
tip_height = self.nodes[1].getblockcount()
hash_to_invalidate = self.nodes[1].getblockhash(tip_height - (length - 1))
self.nodes[1].invalidateblock(hash_to_invalidate)
all_hashes = self.nodes[1].generatetoaddress(length + 1, self.nodes[1].get_deterministic_priv_key().address) # Must be longer than the orig chain
all_hashes = self.generatetoaddress(self.nodes[1], length + 1, self.nodes[1].get_deterministic_priv_key().address) # Must be longer than the orig chain
self.sync_blocks(self.nodes, wait=0.1)
return [int(x, 16) for x in all_hashes]
@ -239,7 +239,7 @@ class SendHeadersTest(BitcoinTestFramework):
self.test_nonnull_locators(test_node, inv_node)
def test_null_locators(self, test_node, inv_node):
tip = self.nodes[0].getblockheader(self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)[0])
tip = self.nodes[0].getblockheader(self.generatetoaddress(self.nodes[0], 1, self.nodes[0].get_deterministic_priv_key().address)[0])
tip_hash = int(tip["hash"], 16)
inv_node.check_last_inv_announcement(inv=[tip_hash])

View File

@ -139,7 +139,7 @@ class SendHeadersTest(BitcoinTestFramework):
# Clear out block announcements from each p2p listener
[p2p.clear_block_announcements() for p2p in self.nodes[0].p2ps]
self.nodes[0].generate(count)
self.generate(self.nodes[0], count)
return int(self.nodes[0].getbestblockhash(), 16)
def mine_reorg(self, length):
@ -149,7 +149,7 @@ class SendHeadersTest(BitcoinTestFramework):
to-be-reorged-out blocks are mined, so that we don't break later tests.
return the list of block hashes newly mined."""
self.nodes[0].generate(length) # make sure all invalidated blocks are node0's
self.generate(self.nodes[0], length) # make sure all invalidated blocks are node0's
self.sync_blocks(self.nodes, wait=0.1)
for p2p in self.nodes[0].p2ps:
p2p.wait_for_block_announcement(int(self.nodes[0].getbestblockhash(), 16))
@ -158,7 +158,7 @@ class SendHeadersTest(BitcoinTestFramework):
tip_height = self.nodes[1].getblockcount()
hash_to_invalidate = self.nodes[1].getblockhash(tip_height - (length - 1))
self.nodes[1].invalidateblock(hash_to_invalidate)
all_hashes = self.nodes[1].generate(length + 1) # Must be longer than the orig chain
all_hashes = self.generate(self.nodes[1], length + 1) # Must be longer than the orig chain
self.sync_blocks(self.nodes, wait=0.1)
return [int(hash_value, 16) for hash_value in all_hashes]
@ -183,7 +183,7 @@ class SendHeadersTest(BitcoinTestFramework):
a block which hasn't been validated. Verifies only the first request returns
headers.
"""
tip = self.nodes[0].getblockheader(self.nodes[0].generate(1)[0])
tip = self.nodes[0].getblockheader(self.generate(self.nodes[0], 1)[0])
tip_hash = int(tip["hash"], 16)
inv_node.check_last_inv_announcement(inv=[tip_hash])

View File

@ -41,7 +41,7 @@ class AddressesDeprecationTest(BitcoinTestFramework):
txid = node.sendrawtransaction(hexstring=tx_signed, maxfeerate=0)
self.log.info("Test RPCResult scriptPubKey no longer returns the fields addresses or reqSigs by default")
hash = node.generateblock(output=node.getnewaddress(), transactions=[txid])['hash']
hash = self.generateblock(node, output=node.getnewaddress(), transactions=[txid])['hash']
# Ensure both nodes have the newly generated block on disk.
self.sync_blocks()
script_pub_key = node.getblock(blockhash=hash, verbose=2)['tx'][-1]['vout'][0]['scriptPubKey']

View File

@ -86,7 +86,7 @@ class BlockchainTest(BitcoinTestFramework):
for t in range(TIME_GENESIS_BLOCK, TIME_RANGE_END, TIME_RANGE_STEP):
# 156 sec steps from genesis block time
set_node_times(self.nodes, t)
self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_P2SH_OP_TRUE)
self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_P2SH_OP_TRUE)
assert_equal(self.nodes[0].getblockchaininfo()['blocks'], 200)
def _test_getblockchaininfo(self):
@ -347,12 +347,12 @@ class BlockchainTest(BitcoinTestFramework):
def _test_stopatheight(self):
assert_equal(self.nodes[0].getblockcount(), HEIGHT)
self.nodes[0].generatetoaddress(6, ADDRESS_BCRT1_P2SH_OP_TRUE)
self.generatetoaddress(self.nodes[0], 6, ADDRESS_BCRT1_P2SH_OP_TRUE)
assert_equal(self.nodes[0].getblockcount(), HEIGHT + 6)
self.log.debug('Node should not stop at this height')
assert_raises(subprocess.TimeoutExpired, lambda: self.nodes[0].process.wait(timeout=3))
try:
self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_P2SH_OP_TRUE)
self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_P2SH_OP_TRUE)
except (ConnectionError, http.client.BadStatusLine):
pass # The node already shut down before response
self.log.debug('Node should stop at this height...')
@ -408,7 +408,7 @@ class BlockchainTest(BitcoinTestFramework):
fee_per_kb = 1000 * fee_per_byte
miniwallet.send_self_transfer(fee_rate=fee_per_kb, from_node=node)
blockhash = node.generate(1)[0]
blockhash = self.generate(node, 1)[0]
self.log.info("Test getblock with verbosity 1 doesn't include fee")
block = node.getblock(blockhash, 1)

View File

@ -44,7 +44,7 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
self.check_addmultisigaddress_errors()
self.log.info('Generating blocks ...')
node0.generate(149)
self.generate(node0, 149)
self.sync_all()
self.moved = 0
@ -101,7 +101,7 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
def checkbalances(self):
node0, node1, node2 = self.nodes
node0.generate(1)
self.generate(node0, 1)
self.sync_all()
bal0 = node0.getbalance()
@ -158,7 +158,7 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
value = tx["vout"][vout]["value"]
prevtxs = [{"txid": txid, "vout": vout, "scriptPubKey": scriptPubKey, "redeemScript": mredeem, "amount": value}]
node0.generate(1)
self.generate(node0, 1)
outval = value - decimal.Decimal("0.00001000")
rawtx = node2.createrawtransaction([{"txid": txid, "vout": vout}], [{self.final: outval}])
@ -181,7 +181,7 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
self.moved += outval
tx = node0.sendrawtransaction(rawtx3["hex"], 0)
blk = node0.generate(1)[0]
blk = self.generate(node0, 1)[0]
assert tx in node0.getblock(blk)["tx"]
txinfo = node0.getrawtransaction(tx, True, blk)

View File

@ -22,7 +22,7 @@ class DeprecatedRpcTest(BitcoinTestFramework):
# In run_test:
# self.log.info("Test generate RPC")
# assert_raises_rpc_error(-32, 'The wallet generate rpc method is deprecated', self.nodes[0].rpc.generate, 1)
# self.nodes[1].generate(1)
# self.generate(self.nodes[1], 1)
self.log.info("No tested deprecated RPC methods")
if __name__ == '__main__':

View File

@ -23,7 +23,7 @@ class DumptxoutsetTest(BitcoinTestFramework):
node = self.nodes[0]
mocktime = node.getblockheader(node.getblockhash(0))['time'] + 1
node.setmocktime(mocktime)
node.generate(COINBASE_MATURITY)
self.generate(node, COINBASE_MATURITY)
FILENAME = 'txoutset.dat'
out = node.dumptxoutset(FILENAME)

View File

@ -67,9 +67,9 @@ class RawTransactionsTest(BitcoinTestFramework):
# = 2 bytes * minRelayTxFeePerByte
self.fee_tolerance = 2 * self.min_relay_tx_fee / 1000
self.nodes[2].generate(1)
self.generate(self.nodes[2], 1)
self.sync_all()
self.nodes[0].generate(121)
self.generate(self.nodes[0], 121)
self.sync_all()
self.test_change_position()
@ -127,7 +127,7 @@ class RawTransactionsTest(BitcoinTestFramework):
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 50)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
wwatch.unloadwallet()
@ -491,7 +491,7 @@ class RawTransactionsTest(BitcoinTestFramework):
# send 12 DASH to msig addr
self.nodes[0].sendtoaddress(mSigObj, 12)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
oldBalance = self.nodes[1].getbalance()
@ -502,7 +502,7 @@ class RawTransactionsTest(BitcoinTestFramework):
signed_psbt = w2.walletprocesspsbt(funded_psbt)
final_psbt = w2.finalizepsbt(signed_psbt['psbt'])
self.nodes[2].sendrawtransaction(final_psbt['hex'])
self.nodes[2].generate(1)
self.generate(self.nodes[2], 1)
self.sync_all()
# Make sure funds are received at node1.
@ -564,7 +564,7 @@ class RawTransactionsTest(BitcoinTestFramework):
self.nodes[1].walletpassphrase("test", 600)
signedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex'])
self.nodes[1].sendrawtransaction(signedTx['hex'])
self.nodes[1].generate(1)
self.generate(self.nodes[1], 1)
self.sync_all()
# Make sure funds are received at node1.
@ -576,12 +576,12 @@ class RawTransactionsTest(BitcoinTestFramework):
# Empty node1, send some small coins from node0 to node1.
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.nodes[1].generate(1)
self.generate(self.nodes[1], 1)
self.sync_all()
for _ in range(20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
# Fund a tx with ~20 small inputs.
@ -604,12 +604,12 @@ class RawTransactionsTest(BitcoinTestFramework):
# Again, empty node1, send some small coins from node0 to node1.
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.nodes[1].generate(1)
self.generate(self.nodes[1], 1)
self.sync_all()
for _ in range(20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
# Fund a tx with ~20 small inputs.
@ -621,7 +621,7 @@ class RawTransactionsTest(BitcoinTestFramework):
fundedTx = self.nodes[1].fundrawtransaction(rawtx)
fundedAndSignedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex'])
self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.nodes[1].generate(1)
self.generate(self.nodes[1], 1)
self.sync_all()
assert_equal(oldBalance+Decimal('500.19000000'), self.nodes[0].getbalance()) #0.19+block reward
@ -699,7 +699,7 @@ class RawTransactionsTest(BitcoinTestFramework):
signedtx = self.nodes[0].signrawtransactionwithwallet(signedtx["hex"])
assert signedtx["complete"]
self.nodes[0].sendrawtransaction(signedtx["hex"])
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
wwatch.unloadwallet()
@ -936,7 +936,7 @@ class RawTransactionsTest(BitcoinTestFramework):
wallet.sendrawtransaction(signedtx['hex'])
# And we can also use them once they're confirmed.
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
rawtx = wallet.createrawtransaction([], [{self.nodes[2].getnewaddress(): 3}])
fundedtx = wallet.fundrawtransaction(rawtx, {"include_unsafe": True})
tx_dec = wallet.decoderawtransaction(fundedtx['hex'])

View File

@ -23,13 +23,13 @@ class GenerateBlockTest(BitcoinTestFramework):
self.log.info('Generate an empty block to address')
address = node.getnewaddress()
hash = node.generateblock(address, [])['hash']
hash = self.generateblock(node, address, [])['hash']
block = node.getblock(hash, 2)
assert_equal(len(block['tx']), 1)
assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'], address)
self.log.info('Generate an empty block to a descriptor')
hash = node.generateblock('addr(' + address + ')', [])['hash']
hash = self.generateblock(node, 'addr(' + address + ')', [])['hash']
block = node.getblock(hash, 2)
assert_equal(len(block['tx']), 1)
assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'], address)
@ -37,13 +37,13 @@ class GenerateBlockTest(BitcoinTestFramework):
self.log.info('Generate an empty block to a combo descriptor with compressed pubkey')
combo_key = '0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798'
combo_address = 'yWziQMcwmKjRdzi7eWjwiQX8EjWcd6dSg6'
hash = node.generateblock('combo(' + combo_key + ')', [])['hash']
hash = self.generateblock(node, 'combo(' + combo_key + ')', [])['hash']
block = node.getblock(hash, 2)
assert_equal(len(block['tx']), 1)
assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'], combo_address)
# Generate 110 blocks to spend
node.generatetoaddress(110, address)
self.generatetoaddress(node, 110, address)
# Generate some extra mempool transactions to verify they don't get mined
for _ in range(10):
@ -51,7 +51,7 @@ class GenerateBlockTest(BitcoinTestFramework):
self.log.info('Generate block with txid')
txid = node.sendtoaddress(address, 1)
hash = node.generateblock(address, [txid])['hash']
hash = self.generateblock(node, address, [txid])['hash']
block = node.getblock(hash, 1)
assert_equal(len(block['tx']), 2)
assert_equal(block['tx'][1], txid)
@ -60,7 +60,7 @@ class GenerateBlockTest(BitcoinTestFramework):
utxos = node.listunspent(addresses=[address])
raw = node.createrawtransaction([{'txid':utxos[0]['txid'], 'vout':utxos[0]['vout']}],[{address:1}])
signed_raw = node.signrawtransactionwithwallet(raw)['hex']
hash = node.generateblock(address, [signed_raw])['hash']
hash = self.generateblock(node, address, [signed_raw])['hash']
block = node.getblock(hash, 1)
assert_equal(len(block['tx']), 2)
txid = block['tx'][1]
@ -72,26 +72,26 @@ class GenerateBlockTest(BitcoinTestFramework):
txid1 = node.sendrawtransaction(signed_raw1)
raw2 = node.createrawtransaction([{'txid':txid1, 'vout':0}],[{address:0.999}])
signed_raw2 = node.signrawtransactionwithwallet(raw2)['hex']
assert_raises_rpc_error(-25, 'TestBlockValidity failed: bad-txns-inputs-missingorspent', node.generateblock, address, [signed_raw2, txid1])
assert_raises_rpc_error(-25, 'TestBlockValidity failed: bad-txns-inputs-missingorspent', self.generateblock, node, address, [signed_raw2, txid1])
self.log.info('Fail to generate block with txid not in mempool')
missing_txid = '0000000000000000000000000000000000000000000000000000000000000000'
assert_raises_rpc_error(-5, 'Transaction ' + missing_txid + ' not in mempool.', node.generateblock, address, [missing_txid])
assert_raises_rpc_error(-5, 'Transaction ' + missing_txid + ' not in mempool.', self.generateblock, node, address, [missing_txid])
self.log.info('Fail to generate block with invalid raw tx')
invalid_raw_tx = '0000'
assert_raises_rpc_error(-22, 'Transaction decode failed for ' + invalid_raw_tx, node.generateblock, address, [invalid_raw_tx])
assert_raises_rpc_error(-22, 'Transaction decode failed for ' + invalid_raw_tx, self.generateblock, node, address, [invalid_raw_tx])
self.log.info('Fail to generate block with invalid address/descriptor')
assert_raises_rpc_error(-5, 'Invalid address or descriptor', node.generateblock, '1234', [])
assert_raises_rpc_error(-5, 'Invalid address or descriptor', self.generateblock, node, '1234', [])
self.log.info('Fail to generate block with a ranged descriptor')
ranged_descriptor = 'pkh(tpubD6NzVbkrYhZ4XgiXtGrdW5XDAPFCL9h7we1vwNCpn8tGbBcgfVYjXyhWo4E1xkh56hjod1RhGjxbaTLV3X4FyWuejifB9jusQ46QzG87VKp/0/*)'
assert_raises_rpc_error(-8, 'Ranged descriptor not accepted. Maybe pass through deriveaddresses first?', node.generateblock, ranged_descriptor, [])
assert_raises_rpc_error(-8, 'Ranged descriptor not accepted. Maybe pass through deriveaddresses first?', self.generateblock, node, ranged_descriptor, [])
self.log.info('Fail to generate block with a descriptor missing a private key')
child_descriptor = 'pkh(tpubD6NzVbkrYhZ4XgiXtGrdW5XDAPFCL9h7we1vwNCpn8tGbBcgfVYjXyhWo4E1xkh56hjod1RhGjxbaTLV3X4FyWuejifB9jusQ46QzG87VKp/0\'/0)'
assert_raises_rpc_error(-5, 'Cannot derive script without private keys', node.generateblock, child_descriptor, [])
assert_raises_rpc_error(-5, 'Cannot derive script without private keys', self.generateblock, node, child_descriptor, [])
if __name__ == '__main__':
GenerateBlockTest().main()

View File

@ -21,8 +21,8 @@ class GetBlockFilterTest(BitcoinTestFramework):
# Create two chains by disconnecting nodes 0 & 1, mining, then reconnecting
self.disconnect_nodes(0, 1)
self.nodes[0].generate(3)
self.nodes[1].generate(4)
self.generate(self.nodes[0], 3)
self.generate(self.nodes[1], 4)
assert_equal(self.nodes[0].getblockcount(), 3)
chain0_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)]

View File

@ -27,11 +27,11 @@ class GetBlockFromPeerTest(BitcoinTestFramework):
def run_test(self):
self.log.info("Mine 4 blocks on Node 0")
self.nodes[0].generate(4)
self.generate(self.nodes[0], 4)
assert_equal(self.nodes[0].getblockcount(), 204)
self.log.info("Mine competing 3 blocks on Node 1")
self.nodes[1].generate(3)
self.generate(self.nodes[1], 3)
assert_equal(self.nodes[1].getblockcount(), 203)
short_tip = self.nodes[1].getbestblockhash()

View File

@ -43,11 +43,11 @@ class GetblockstatsTest(BitcoinTestFramework):
def generate_test_data(self, filename):
self.nodes[0].setmocktime(self.mocktime)
self.nodes[0].generate(COINBASE_MATURITY + 1)
self.generate(self.nodes[0], COINBASE_MATURITY + 1)
address = self.nodes[0].get_deterministic_priv_key().address
self.nodes[0].sendtoaddress(address=address, amount=10, subtractfeefromamount=True)
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.sync_all()
self.nodes[0].sendtoaddress(address=address, amount=10, subtractfeefromamount=True)
@ -55,7 +55,7 @@ class GetblockstatsTest(BitcoinTestFramework):
self.nodes[0].settxfee(amount=0.003)
self.nodes[0].sendtoaddress(address=address, amount=1, subtractfeefromamount=True)
self.sync_all()
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.expected_stats = self.get_stats()

View File

@ -26,8 +26,8 @@ class GetChainTipsTest (BitcoinTestFramework):
# Split the network and build two chains of different lengths.
self.split_network()
self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address)
self.nodes[2].generatetoaddress(20, self.nodes[2].get_deterministic_priv_key().address)
self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address)
self.generatetoaddress(self.nodes[2], 20, self.nodes[2].get_deterministic_priv_key().address)
self.sync_all(self.nodes[:2])
self.sync_all(self.nodes[2:])

View File

@ -23,12 +23,12 @@ class InvalidateTest(BitcoinTestFramework):
def run_test(self):
self.log.info("Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:")
self.log.info("Mine 4 blocks on Node 0")
self.nodes[0].generatetoaddress(4, self.nodes[0].get_deterministic_priv_key().address)
self.generatetoaddress(self.nodes[0], 4, self.nodes[0].get_deterministic_priv_key().address)
assert_equal(self.nodes[0].getblockcount(), 4)
besthash_n0 = self.nodes[0].getbestblockhash()
self.log.info("Mine competing 6 blocks on Node 1")
self.nodes[1].generatetoaddress(6, self.nodes[1].get_deterministic_priv_key().address)
self.generatetoaddress(self.nodes[1], 6, self.nodes[1].get_deterministic_priv_key().address)
assert_equal(self.nodes[1].getblockcount(), 6)
self.log.info("Connect nodes to force a reorg")
@ -54,7 +54,7 @@ class InvalidateTest(BitcoinTestFramework):
self.nodes[2].invalidateblock(self.nodes[2].getblockhash(3))
assert_equal(self.nodes[2].getblockcount(), 2)
self.log.info("..and then mine a block")
self.nodes[2].generatetoaddress(1, self.nodes[2].get_deterministic_priv_key().address)
self.generatetoaddress(self.nodes[2], 1, self.nodes[2].get_deterministic_priv_key().address)
self.log.info("Verify all nodes are at the right height")
self.wait_until(lambda: self.nodes[2].getblockcount() == 3, timeout=5)
self.wait_until(lambda: self.nodes[0].getblockcount() == 4, timeout=5)
@ -64,13 +64,13 @@ class InvalidateTest(BitcoinTestFramework):
self.restart_node(0, extra_args=["-checkblocks=5"])
self.restart_node(1, extra_args=["-checkblocks=5"])
self.connect_nodes(0, 1)
self.nodes[0].generate(10)
self.generate(self.nodes[0], 10)
self.sync_blocks(self.nodes[0:2])
newheight = self.nodes[0].getblockcount()
for j in range(2):
self.restart_node(0, extra_args=["-checkblocks=5"])
tip = self.nodes[0].generate(10)[-1]
self.nodes[1].generate(9)
tip = self.generate(self.nodes[0], 10)[-1]
self.generate(self.nodes[1], 9)
self.connect_nodes(0, 1)
self.sync_blocks(self.nodes[0:2])
assert_equal(self.nodes[0].getblockcount(), newheight + 10 * (j + 1))
@ -87,7 +87,7 @@ class InvalidateTest(BitcoinTestFramework):
assert_equal(tip, self.nodes[1].getbestblockhash())
self.log.info("Verify that we reconsider all ancestors as well")
blocks = self.nodes[1].generatetodescriptor(10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR)
blocks = self.generatetodescriptor(self.nodes[1], 10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR)
assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
# Invalidate the two blocks at the tip
self.nodes[1].invalidateblock(blocks[-1])
@ -99,7 +99,7 @@ class InvalidateTest(BitcoinTestFramework):
assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
self.log.info("Verify that we reconsider all descendants")
blocks = self.nodes[1].generatetodescriptor(10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR)
blocks = self.generatetodescriptor(self.nodes[1], 10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR)
assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
# Invalidate the two blocks at the tip
self.nodes[1].invalidateblock(blocks[-2])

View File

@ -23,7 +23,7 @@ class RPCMasternodeTest(DashTestFramework):
checked_0_operator_reward = False
checked_non_0_operator_reward = False
while not checked_0_operator_reward or not checked_non_0_operator_reward:
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
bi = self.nodes[0].getblockchaininfo()
height = bi["blocks"]
blockhash = bi["bestblockhash"]
@ -59,7 +59,7 @@ class RPCMasternodeTest(DashTestFramework):
self.log.info("test that `masternode payments` results at chaintip match `getblocktemplate` results for that block")
gbt_masternode = self.nodes[0].getblocktemplate()["masternode"]
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
payments_masternode = self.nodes[0].masternode("payments")[0]["masternodes"][0]
for i in range(0, len(gbt_masternode)):
assert_equal(gbt_masternode[i]["payee"], payments_masternode["payees"][i]["address"])
@ -84,13 +84,13 @@ class RPCMasternodeTest(DashTestFramework):
protx_info["state"]["operatorPayoutAddress"] == payments_masternode["payees"][0]["address"]
assert option1 or option2
checked_non_0_operator_reward = True
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
self.log.info("test that `masternode outputs` show correct list")
addr1 = self.nodes[0].getnewaddress()
addr2 = self.nodes[0].getnewaddress()
self.nodes[0].sendmany('', {addr1: 1000, addr2: 1000})
self.nodes[0].generate(1)
self.generate(self.nodes[0], 1)
# we have 3 masternodes that are running already and 2 new outputs we just created
assert_equal(len(self.nodes[0].masternode("outputs")), 5)

View File

@ -47,9 +47,9 @@ class NetTest(DashTestFramework):
def run_test(self):
# We need miniwallet to make a transaction
self.wallet = MiniWallet(self.nodes[0])
self.wallet.generate(1)
self.generate(self.wallet, 1)
# Get out of IBD for the getpeerinfo tests.
self.nodes[0].generate(101)
self.generate(self.nodes[0], 101)
# By default, the test framework sets up an addnode connection from
# node 1 --> node0. By connecting node0 --> node 1, we're left with
@ -78,7 +78,7 @@ class NetTest(DashTestFramework):
self.log.info("Test getpeerinfo")
# Create a few getpeerinfo last_block/last_transaction values.
self.wallet.send_self_transfer(from_node=self.nodes[0]) # Make a transaction so we can see it in the getpeerinfo results
self.nodes[1].generate(1)
self.generate(self.nodes[1], 1)
self.sync_all()
time_now = self.mocktime
peer_info = [x.getpeerinfo() for x in self.nodes]

View File

@ -48,7 +48,7 @@ class RPCPackagesTest(BitcoinTestFramework):
self.address = node.get_deterministic_priv_key().address
self.coins = []
# The last 100 coinbase transactions are premature
for b in node.generatetoaddress(200, self.address)[:100]:
for b in self.generatetoaddress(node, 200, self.address)[:100]:
coinbase = node.getblock(blockhash=b, verbosity=2)["tx"][0]
self.coins.append({
"txid": coinbase["txid"],
@ -148,7 +148,7 @@ class RPCPackagesTest(BitcoinTestFramework):
assert_equal(testres_single, testres_multiple)
# Clean up by clearing the mempool
node.generate(1)
self.generate(node, 1)
def test_multiple_children(self):
node = self.nodes[0]

Some files were not shown because too many files have changed in this diff Show More