From 7bba7375d06c74afe28ff98aa44de6aa8523dbc9 Mon Sep 17 00:00:00 2001 From: "Wladimir J. van der Laan" Date: Wed, 17 Aug 2016 12:12:55 +0200 Subject: [PATCH] Merge #8482: [qa] Use single cache dir for chains fad8cf6 [qa] Use single cache dir for chains (MarcoFalke) fa2d68f [qa] Adjust timeouts for micro-optimization of run time (MarcoFalke) fae596f [qa] Sort scripts by time for pull_tester and don't overwrite setup_chain (MarcoFalke) --- .gitignore | 3 +-- qa/pull-tester/rpc-tests.py | 2 ++ qa/rpc-tests/p2p-mempool.py | 7 ++++-- qa/rpc-tests/test_framework/test_framework.py | 4 +++- qa/rpc-tests/test_framework/util.py | 24 +++++++++---------- 5 files changed, 23 insertions(+), 17 deletions(-) diff --git a/.gitignore b/.gitignore index fd2d881596..e9f38f9825 100644 --- a/.gitignore +++ b/.gitignore @@ -110,10 +110,9 @@ linux-build win32-build qa/pull-tester/run-bitcoind-for-test.sh qa/pull-tester/tests_config.py -qa/pull-tester/cache/* qa/pull-tester/test.*/* qa/tmp -cache/ +qa/cache/* share/BitcoindComparisonTool.jar !src/leveldb*/Makefile diff --git a/qa/pull-tester/rpc-tests.py b/qa/pull-tester/rpc-tests.py index 90d1e87bb2..604a1060be 100755 --- a/qa/pull-tester/rpc-tests.py +++ b/qa/pull-tester/rpc-tests.py @@ -146,6 +146,7 @@ testScripts = [ 'sendheaders.py', # NOTE: needs dash_hash to pass 'keypool.py', 'keypool-hd.py', + 'p2p-mempool.py', 'prioritise_transaction.py', 'invalidblockrequest.py', # NOTE: needs dash_hash to pass 'invalidtxrequest.py', # NOTE: needs dash_hash to pass @@ -204,6 +205,7 @@ def runtests(): coverage = RPCCoverage() print("Initializing coverage directory at %s\n" % coverage.dir) flags = ["--srcdir=%s/src" % BUILDDIR] + passon_args + flags.append("--cachedir=%s/qa/cache" % BUILDDIR) if coverage: flags.append(coverage.flag) diff --git a/qa/rpc-tests/p2p-mempool.py b/qa/rpc-tests/p2p-mempool.py index 5d2daf39f8..5c5d778f42 100755 --- a/qa/rpc-tests/p2p-mempool.py +++ b/qa/rpc-tests/p2p-mempool.py @@ -72,8 +72,11 @@ class TestNode(NodeConnCB): self.send_message(msg_mempool()) class P2PMempoolTests(BitcoinTestFramework): - def setup_chain(self): - initialize_chain_clean(self.options.tmpdir, 2) + + def __init__(self): + super().__init__() + self.setup_clean_chain = True + self.num_nodes = 2 def setup_network(self): # Start a node with maxuploadtarget of 200 MB (/24h) diff --git a/qa/rpc-tests/test_framework/test_framework.py b/qa/rpc-tests/test_framework/test_framework.py index 7b7ddb4887..d7a7c9cc50 100755 --- a/qa/rpc-tests/test_framework/test_framework.py +++ b/qa/rpc-tests/test_framework/test_framework.py @@ -47,7 +47,7 @@ class BitcoinTestFramework(object): if self.setup_clean_chain: initialize_chain_clean(self.options.tmpdir, self.num_nodes) else: - initialize_chain(self.options.tmpdir, self.num_nodes) + initialize_chain(self.options.tmpdir, self.num_nodes, self.options.cachedir) def setup_nodes(self): return start_nodes(self.num_nodes, self.options.tmpdir) @@ -108,6 +108,8 @@ class BitcoinTestFramework(object): help="Don't stop dashds after the test execution") parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../../src"), help="Source directory containing dashd/dash-cli (default: %default)") + parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../cache"), + help="Directory for caching pregenerated datadirs") parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"), help="Root directory for datadirs") parser.add_option("--tracerpc", dest="trace_rpc", default=False, action="store_true", diff --git a/qa/rpc-tests/test_framework/util.py b/qa/rpc-tests/test_framework/util.py index ce5a7ec48c..756ea012d2 100644 --- a/qa/rpc-tests/test_framework/util.py +++ b/qa/rpc-tests/test_framework/util.py @@ -131,7 +131,7 @@ def hex_str_to_bytes(hex_str): def str_to_b64str(string): return b64encode(string.encode('utf-8')).decode('ascii') -def sync_blocks(rpc_connections, wait=1, timeout=60): +def sync_blocks(rpc_connections, wait=0.125, timeout=60): """ Wait until everybody has the same tip """ @@ -143,7 +143,7 @@ def sync_blocks(rpc_connections, wait=1, timeout=60): timeout -= wait raise AssertionError("Block sync failed") -def sync_mempools(rpc_connections, wait=1, timeout=60): +def sync_mempools(rpc_connections, wait=0.5, timeout=60): """ Wait until everybody has the same transactions in their memory pools @@ -215,7 +215,7 @@ def wait_for_bitcoind_start(process, url, i): raise # unkown JSON RPC exception time.sleep(0.25) -def initialize_chain(test_dir, num_nodes): +def initialize_chain(test_dir, num_nodes, cachedir): """ Create a cache of a 200-block-long chain (with wallet) for MAX_NODES Afterward, create num_nodes copies from the cache @@ -224,7 +224,7 @@ def initialize_chain(test_dir, num_nodes): assert num_nodes <= MAX_NODES create_cache = False for i in range(MAX_NODES): - if not os.path.isdir(os.path.join('cache', 'node'+str(i))): + if not os.path.isdir(os.path.join(cachedir, 'node'+str(i))): create_cache = True break @@ -232,12 +232,12 @@ def initialize_chain(test_dir, num_nodes): #find and delete old cache directories if any exist for i in range(MAX_NODES): - if os.path.isdir(os.path.join("cache","node"+str(i))): - shutil.rmtree(os.path.join("cache","node"+str(i))) + if os.path.isdir(os.path.join(cachedir,"node"+str(i))): + shutil.rmtree(os.path.join(cachedir,"node"+str(i))) # Create cache directories, run dashds: for i in range(MAX_NODES): - datadir=initialize_datadir("cache", i) + datadir=initialize_datadir(cachedir, i) args = [ os.getenv("DASHD", "dashd"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ] if i > 0: args.append("-connect=127.0.0.1:"+str(p2p_port(0))) @@ -279,13 +279,13 @@ def initialize_chain(test_dir, num_nodes): wait_bitcoinds() disable_mocktime() for i in range(MAX_NODES): - os.remove(log_filename("cache", i, "debug.log")) - os.remove(log_filename("cache", i, "db.log")) - os.remove(log_filename("cache", i, "peers.dat")) - os.remove(log_filename("cache", i, "fee_estimates.dat")) + os.remove(log_filename(cachedir, i, "debug.log")) + os.remove(log_filename(cachedir, i, "db.log")) + os.remove(log_filename(cachedir, i, "peers.dat")) + os.remove(log_filename(cachedir, i, "fee_estimates.dat")) for i in range(num_nodes): - from_dir = os.path.join("cache", "node"+str(i)) + from_dir = os.path.join(cachedir, "node"+str(i)) to_dir = os.path.join(test_dir, "node"+str(i)) shutil.copytree(from_dir, to_dir) initialize_datadir(test_dir, i) # Overwrite port/rpcport in dash.conf