2016-05-06 11:23:48 +02:00
|
|
|
#!/usr/bin/env python3
|
2023-08-16 19:27:31 +02:00
|
|
|
# Copyright (c) 2015-2020 The Bitcoin Core developers
|
2016-05-06 11:23:48 +02:00
|
|
|
# Distributed under the MIT software license, see the accompanying
|
2015-09-18 21:59:55 +02:00
|
|
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
2019-01-07 10:55:35 +01:00
|
|
|
"""Test behavior of -maxuploadtarget.
|
2015-09-18 21:59:55 +02:00
|
|
|
|
|
|
|
* Verify that getdata requests for old blocks (>1week) are dropped
|
|
|
|
if uploadtarget has been reached.
|
2018-03-21 16:16:28 +01:00
|
|
|
* Verify that getdata requests for recent blocks are respected even
|
2015-09-18 21:59:55 +02:00
|
|
|
if uploadtarget has been reached.
|
|
|
|
* Verify that the upload counters are reset after 24 hours.
|
2019-01-07 10:55:35 +01:00
|
|
|
"""
|
2017-05-02 19:10:23 +02:00
|
|
|
from collections import defaultdict
|
2019-01-07 10:55:35 +01:00
|
|
|
|
2022-09-20 09:52:32 +02:00
|
|
|
from test_framework.messages import CInv, MAX_BLOCK_SIZE, MSG_BLOCK, msg_getdata
|
Merge #13054: tests: Enable automatic detection of undefined names in Python tests scripts. Remove wildcard imports.
68400d8b96 tests: Use explicit imports (practicalswift)
Pull request description:
Enable automatic detection of undefined names in Python tests scripts. Remove wildcard imports.
Wildcard imports make it unclear which names are present in the namespace, confusing both readers and many automated tools.
An additional benefit of not using wildcard imports in tests scripts is that readers of a test script then can infer the rough testing scope just by looking at the imports.
Before this commit:
```
$ contrib/devtools/lint-python.sh | head -10
./test/functional/feature_rbf.py:8:1: F403 'from test_framework.util import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:9:1: F403 'from test_framework.script import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:10:1: F403 'from test_framework.mininode import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:15:12: F405 bytes_to_hex_str may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:17:58: F405 CScript may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:25:13: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:26:31: F405 satoshi_round may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:26:60: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:30:41: F405 satoshi_round may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:30:68: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
$
```
After this commit:
```
$ contrib/devtools/lint-python.sh | head -10
$
```
Tree-SHA512: 3f826d39cffb6438388e5efcb20a9622ff8238247e882d68f7b38609877421b2a8e10e9229575f8eb6a8fa42dec4256986692e92922c86171f750a0e887438d9
2018-08-13 14:24:43 +02:00
|
|
|
from test_framework.mininode import P2PInterface
|
2019-01-07 10:55:35 +01:00
|
|
|
from test_framework.test_framework import BitcoinTestFramework
|
Merge #13054: tests: Enable automatic detection of undefined names in Python tests scripts. Remove wildcard imports.
68400d8b96 tests: Use explicit imports (practicalswift)
Pull request description:
Enable automatic detection of undefined names in Python tests scripts. Remove wildcard imports.
Wildcard imports make it unclear which names are present in the namespace, confusing both readers and many automated tools.
An additional benefit of not using wildcard imports in tests scripts is that readers of a test script then can infer the rough testing scope just by looking at the imports.
Before this commit:
```
$ contrib/devtools/lint-python.sh | head -10
./test/functional/feature_rbf.py:8:1: F403 'from test_framework.util import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:9:1: F403 'from test_framework.script import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:10:1: F403 'from test_framework.mininode import *' used; unable to detect undefined names
./test/functional/feature_rbf.py:15:12: F405 bytes_to_hex_str may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:17:58: F405 CScript may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:25:13: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:26:31: F405 satoshi_round may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:26:60: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:30:41: F405 satoshi_round may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
./test/functional/feature_rbf.py:30:68: F405 COIN may be undefined, or defined from star imports: test_framework.mininode, test_framework.script, test_framework.util
$
```
After this commit:
```
$ contrib/devtools/lint-python.sh | head -10
$
```
Tree-SHA512: 3f826d39cffb6438388e5efcb20a9622ff8238247e882d68f7b38609877421b2a8e10e9229575f8eb6a8fa42dec4256986692e92922c86171f750a0e887438d9
2018-08-13 14:24:43 +02:00
|
|
|
from test_framework.util import assert_equal, mine_large_block, set_node_times
|
|
|
|
|
2015-09-18 21:59:55 +02:00
|
|
|
|
2018-03-20 11:44:19 +01:00
|
|
|
class TestP2PConn(P2PInterface):
|
2015-09-18 21:59:55 +02:00
|
|
|
def __init__(self):
|
2017-03-30 09:22:59 +02:00
|
|
|
super().__init__()
|
2017-05-02 19:10:23 +02:00
|
|
|
self.block_receive_map = defaultdict(int)
|
2015-09-18 21:59:55 +02:00
|
|
|
|
2020-04-05 13:12:45 +02:00
|
|
|
def on_inv(self, message):
|
2015-09-18 21:59:55 +02:00
|
|
|
pass
|
|
|
|
|
2020-04-05 13:12:45 +02:00
|
|
|
def on_block(self, message):
|
2015-09-18 21:59:55 +02:00
|
|
|
message.block.calc_sha256()
|
2017-05-02 19:10:23 +02:00
|
|
|
self.block_receive_map[message.block.sha256] += 1
|
2015-09-18 21:59:55 +02:00
|
|
|
|
|
|
|
class MaxUploadTest(BitcoinTestFramework):
|
Merge #12987: tests/tools: Enable additional Python flake8 rules for automatic linting via Travis
643aad17fa Enable additional flake8 rules (practicalswift)
f020aca297 Minor Python cleanups to make flake8 pass with the new rules enabled (practicalswift)
Pull request description:
Enabled rules:
```
* E242: tab after ','
* E266: too many leading '#' for block comment
* E401: multiple imports on one line
* E402: module level import not at top of file
* E701: multiple statements on one line (colon)
* E901: SyntaxError: invalid syntax
* E902: TokenError: EOF in multi-line string
* F821: undefined name 'Foo'
* W293: blank line contains whitespace
* W606: 'async' and 'await' are reserved keywords starting with Python 3.7
```
Note to reviewers:
* In general we don't allow whitespace cleanups to existing code, but in order to allow for enabling Travis checking for these rules a few smaller whitespace cleanups had to made as part of this PR.
* Use [this `?w=1` link](https://github.com/bitcoin/bitcoin/pull/12987/files?w=1) to show a diff without whitespace changes.
Before this commit:
```
$ flake8 -qq --statistics --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E242,E266,E271,E272,E273,E274,E275,E304,E306,E401,E402,E502,E701,E702,E703,E714,E721,E741,E742,E743,F401,E901,E902,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F821,F822,F823,F831,F841,W292,W293,W504,W601,W602,W603,W604,W605,W606 .
5 E266 too many leading '#' for block comment
4 E401 multiple imports on one line
6 E402 module level import not at top of file
5 E701 multiple statements on one line (colon)
1 F812 list comprehension redefines 'n' from line 159
4 F821 undefined name 'ConnectionRefusedError'
28 W293 blank line contains whitespace
```
After this commit:
```
$ flake8 -qq --statistics --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E242,E266,E271,E272,E273,E274,E275,E304,E306,E401,E402,E502,E701,E702,E703,E714,E721,E741,E742,E743,F401,E901,E902,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F821,F822,F823,F831,F841,W292,W293,W504,W601,W602,W603,W604,W605,W606 .
$
```
Tree-SHA512: fc7d5e752298a50d4248afc620ee2c173135b4ca008e48e02913ac968e5a24a5fd5396926047ec62f1d580d537434ccae01f249bb2f3338fa59dc630bf97ca7a
Signed-off-by: pasta <pasta@dashboost.org>
2018-04-16 17:49:49 +02:00
|
|
|
|
2017-09-01 18:47:13 +02:00
|
|
|
def set_test_params(self):
|
2016-05-20 15:16:51 +02:00
|
|
|
self.setup_clean_chain = True
|
|
|
|
self.num_nodes = 1
|
2019-07-16 22:07:14 +02:00
|
|
|
self.extra_args = [["-maxuploadtarget=200", "-blockmaxsize=999000", "-maxtipage="+str(2*60*60*24*7), "-acceptnonstdtxn=1"]]
|
2019-12-09 19:52:38 +01:00
|
|
|
self.supports_cli = False
|
2016-05-20 15:16:51 +02:00
|
|
|
|
2016-12-06 12:05:31 +01:00
|
|
|
# Cache for utxos, as the listunspent may take a long time later in the test
|
|
|
|
self.utxo_cache = []
|
|
|
|
|
2018-09-13 12:33:15 +02:00
|
|
|
def skip_test_if_missing_module(self):
|
|
|
|
self.skip_if_no_wallet()
|
|
|
|
|
2015-09-18 21:59:55 +02:00
|
|
|
def run_test(self):
|
2018-04-18 13:48:59 +02:00
|
|
|
# Advance all nodes 2 weeks in the future
|
2019-08-09 01:14:11 +02:00
|
|
|
old_mocktime = self.mocktime
|
2018-04-18 13:48:59 +02:00
|
|
|
current_mocktime = old_mocktime + 2*60*60*24*7
|
2019-08-12 19:10:56 +02:00
|
|
|
self.mocktime = current_mocktime
|
2018-04-18 13:48:59 +02:00
|
|
|
set_node_times(self.nodes, current_mocktime)
|
|
|
|
|
2015-09-18 21:59:55 +02:00
|
|
|
# Before we connect anything, we first set the time on the node
|
|
|
|
# to be in the past, otherwise things break because the CNode
|
|
|
|
# time counters can't be reset backward after initialization
|
2018-04-18 13:48:59 +02:00
|
|
|
self.nodes[0].setmocktime(old_mocktime)
|
2015-09-18 21:59:55 +02:00
|
|
|
|
|
|
|
# Generate some old blocks
|
|
|
|
self.nodes[0].generate(130)
|
|
|
|
|
2017-11-08 19:10:43 +01:00
|
|
|
# p2p_conns[0] will only request old blocks
|
|
|
|
# p2p_conns[1] will only request new blocks
|
|
|
|
# p2p_conns[2] will test resetting the counters
|
|
|
|
p2p_conns = []
|
2015-09-18 21:59:55 +02:00
|
|
|
|
2017-11-14 08:56:04 +01:00
|
|
|
for _ in range(3):
|
2018-03-20 11:44:19 +01:00
|
|
|
p2p_conns.append(self.nodes[0].add_p2p_connection(TestP2PConn()))
|
2015-09-18 21:59:55 +02:00
|
|
|
|
|
|
|
# Now mine a big block
|
2016-12-06 12:05:31 +01:00
|
|
|
mine_large_block(self.nodes[0], self.utxo_cache)
|
2015-09-18 21:59:55 +02:00
|
|
|
|
|
|
|
# Store the hash; we'll request this later
|
|
|
|
big_old_block = self.nodes[0].getbestblockhash()
|
|
|
|
old_block_size = self.nodes[0].getblock(big_old_block, True)['size']
|
|
|
|
big_old_block = int(big_old_block, 16)
|
|
|
|
|
|
|
|
# Advance to two days ago
|
2018-04-18 13:48:59 +02:00
|
|
|
self.nodes[0].setmocktime(current_mocktime - 2*60*60*24)
|
2015-09-18 21:59:55 +02:00
|
|
|
|
|
|
|
# Mine one more block, so that the prior block looks old
|
2016-12-06 12:05:31 +01:00
|
|
|
mine_large_block(self.nodes[0], self.utxo_cache)
|
2015-09-18 21:59:55 +02:00
|
|
|
|
|
|
|
# We'll be requesting this new block too
|
|
|
|
big_new_block = self.nodes[0].getbestblockhash()
|
|
|
|
big_new_block = int(big_new_block, 16)
|
|
|
|
|
2017-11-08 19:10:43 +01:00
|
|
|
# p2p_conns[0] will test what happens if we just keep requesting the
|
2015-09-18 21:59:55 +02:00
|
|
|
# the same big old block too many times (expect: disconnect)
|
|
|
|
|
|
|
|
getdata_request = msg_getdata()
|
2022-09-20 09:52:32 +02:00
|
|
|
getdata_request.inv.append(CInv(MSG_BLOCK, big_old_block))
|
2015-09-18 21:59:55 +02:00
|
|
|
|
|
|
|
max_bytes_per_day = 200*1024*1024
|
2016-01-19 17:47:55 +01:00
|
|
|
daily_buffer = 144 * MAX_BLOCK_SIZE
|
2015-11-06 00:05:06 +01:00
|
|
|
max_bytes_available = max_bytes_per_day - daily_buffer
|
2016-03-19 21:36:32 +01:00
|
|
|
success_count = max_bytes_available // old_block_size
|
2015-11-11 10:10:48 +01:00
|
|
|
|
2015-09-18 21:59:55 +02:00
|
|
|
# 144MB will be reserved for relaying new blocks, so expect this to
|
|
|
|
# succeed for ~70 tries.
|
2016-05-06 11:23:48 +02:00
|
|
|
for i in range(success_count):
|
2020-04-02 20:12:39 +02:00
|
|
|
p2p_conns[0].send_and_ping(getdata_request)
|
2017-11-08 19:10:43 +01:00
|
|
|
assert_equal(p2p_conns[0].block_receive_map[big_old_block], i+1)
|
2015-09-18 21:59:55 +02:00
|
|
|
|
|
|
|
assert_equal(len(self.nodes[0].getpeerinfo()), 3)
|
2020-07-29 03:23:12 +02:00
|
|
|
# At most a couple more tries should succeed (depending on how long
|
2015-09-18 21:59:55 +02:00
|
|
|
# the test has been running so far).
|
2016-05-06 11:23:48 +02:00
|
|
|
for i in range(3):
|
2017-11-08 19:10:43 +01:00
|
|
|
p2p_conns[0].send_message(getdata_request)
|
|
|
|
p2p_conns[0].wait_for_disconnect()
|
2015-09-18 21:59:55 +02:00
|
|
|
assert_equal(len(self.nodes[0].getpeerinfo()), 2)
|
2017-03-09 21:16:20 +01:00
|
|
|
self.log.info("Peer 0 disconnected after downloading old block too many times")
|
2015-09-18 21:59:55 +02:00
|
|
|
|
2017-11-08 19:10:43 +01:00
|
|
|
# Requesting the current block on p2p_conns[1] should succeed indefinitely,
|
2015-09-18 21:59:55 +02:00
|
|
|
# even when over the max upload target.
|
|
|
|
# We'll try 200 times
|
2022-09-20 09:52:32 +02:00
|
|
|
getdata_request.inv = [CInv(MSG_BLOCK, big_new_block)]
|
2016-05-06 11:23:48 +02:00
|
|
|
for i in range(200):
|
2020-04-02 20:12:39 +02:00
|
|
|
p2p_conns[1].send_and_ping(getdata_request)
|
2017-11-08 19:10:43 +01:00
|
|
|
p2p_conns[1].sync_with_ping()
|
|
|
|
assert_equal(p2p_conns[1].block_receive_map[big_new_block], i+1)
|
2015-09-18 21:59:55 +02:00
|
|
|
|
2017-03-09 21:16:20 +01:00
|
|
|
self.log.info("Peer 1 able to repeatedly download new block")
|
2015-09-18 21:59:55 +02:00
|
|
|
|
2017-11-08 19:10:43 +01:00
|
|
|
# But if p2p_conns[1] tries for an old block, it gets disconnected too.
|
2022-09-20 09:52:32 +02:00
|
|
|
getdata_request.inv = [CInv(MSG_BLOCK, big_old_block)]
|
2017-11-08 19:10:43 +01:00
|
|
|
p2p_conns[1].send_message(getdata_request)
|
|
|
|
p2p_conns[1].wait_for_disconnect()
|
2015-09-18 21:59:55 +02:00
|
|
|
assert_equal(len(self.nodes[0].getpeerinfo()), 1)
|
2015-11-11 10:10:48 +01:00
|
|
|
|
2017-03-09 21:16:20 +01:00
|
|
|
self.log.info("Peer 1 disconnected after trying to download old block")
|
2015-09-18 21:59:55 +02:00
|
|
|
|
2017-03-09 21:16:20 +01:00
|
|
|
self.log.info("Advancing system time on node to clear counters...")
|
2015-09-18 21:59:55 +02:00
|
|
|
|
|
|
|
# If we advance the time by 24 hours, then the counters should reset,
|
2017-11-08 19:10:43 +01:00
|
|
|
# and p2p_conns[2] should be able to retrieve the old block.
|
2018-04-18 13:48:59 +02:00
|
|
|
self.nodes[0].setmocktime(current_mocktime)
|
2017-11-08 19:10:43 +01:00
|
|
|
p2p_conns[2].sync_with_ping()
|
2020-04-02 20:12:39 +02:00
|
|
|
p2p_conns[2].send_and_ping(getdata_request)
|
2017-11-08 19:10:43 +01:00
|
|
|
assert_equal(p2p_conns[2].block_receive_map[big_old_block], 1)
|
2015-09-18 21:59:55 +02:00
|
|
|
|
2017-03-09 21:16:20 +01:00
|
|
|
self.log.info("Peer 2 able to download old block")
|
2015-09-18 21:59:55 +02:00
|
|
|
|
2017-11-14 08:56:04 +01:00
|
|
|
self.nodes[0].disconnect_p2ps()
|
2015-09-18 21:59:55 +02:00
|
|
|
|
2020-06-06 17:07:25 +02:00
|
|
|
self.log.info("Restarting node 0 with download permission and 1MB maxuploadtarget")
|
|
|
|
self.restart_node(0, ["-whitelist=download@127.0.0.1", "-maxuploadtarget=1", "-blockmaxsize=999000", "-maxtipage="+str(2*60*60*24*7), "-mocktime="+str(current_mocktime)])
|
2015-11-11 10:10:48 +01:00
|
|
|
|
2017-11-08 19:10:43 +01:00
|
|
|
# Reconnect to self.nodes[0]
|
2018-03-20 11:44:19 +01:00
|
|
|
self.nodes[0].add_p2p_connection(TestP2PConn())
|
2015-11-11 10:10:48 +01:00
|
|
|
|
|
|
|
#retrieve 20 blocks which should be enough to break the 1MB limit
|
2022-09-20 09:52:32 +02:00
|
|
|
getdata_request.inv = [CInv(MSG_BLOCK, big_new_block)]
|
2016-05-06 11:23:48 +02:00
|
|
|
for i in range(20):
|
2020-04-02 20:12:39 +02:00
|
|
|
self.nodes[0].p2p.send_and_ping(getdata_request)
|
2017-11-08 19:10:43 +01:00
|
|
|
assert_equal(self.nodes[0].p2p.block_receive_map[big_new_block], i+1)
|
2015-11-11 10:10:48 +01:00
|
|
|
|
2022-09-20 09:52:32 +02:00
|
|
|
getdata_request.inv = [CInv(MSG_BLOCK, big_old_block)]
|
2017-11-08 19:10:43 +01:00
|
|
|
self.nodes[0].p2p.send_and_ping(getdata_request)
|
2015-11-11 10:10:48 +01:00
|
|
|
|
2020-06-06 17:07:25 +02:00
|
|
|
self.log.info("Peer still connected after trying to download old block (download permission)")
|
|
|
|
peer_info = self.nodes[0].getpeerinfo()
|
|
|
|
assert_equal(len(peer_info), 1) # node is still connected
|
|
|
|
assert_equal(peer_info[0]['permissions'], ['download'])
|
|
|
|
|
2015-11-11 10:10:48 +01:00
|
|
|
|
2015-09-18 21:59:55 +02:00
|
|
|
if __name__ == '__main__':
|
|
|
|
MaxUploadTest().main()
|