mirror of
https://github.com/dashpay/dash.git
synced 2024-12-25 03:52:49 +01:00
Merge #8482: [qa] Use single cache dir for chains
fad8cf6
[qa] Use single cache dir for chains (MarcoFalke)fa2d68f
[qa] Adjust timeouts for micro-optimization of run time (MarcoFalke)fae596f
[qa] Sort scripts by time for pull_tester and don't overwrite setup_chain (MarcoFalke)
This commit is contained in:
parent
5c928f0052
commit
7bba7375d0
3
.gitignore
vendored
3
.gitignore
vendored
@ -110,10 +110,9 @@ linux-build
|
|||||||
win32-build
|
win32-build
|
||||||
qa/pull-tester/run-bitcoind-for-test.sh
|
qa/pull-tester/run-bitcoind-for-test.sh
|
||||||
qa/pull-tester/tests_config.py
|
qa/pull-tester/tests_config.py
|
||||||
qa/pull-tester/cache/*
|
|
||||||
qa/pull-tester/test.*/*
|
qa/pull-tester/test.*/*
|
||||||
qa/tmp
|
qa/tmp
|
||||||
cache/
|
qa/cache/*
|
||||||
share/BitcoindComparisonTool.jar
|
share/BitcoindComparisonTool.jar
|
||||||
|
|
||||||
!src/leveldb*/Makefile
|
!src/leveldb*/Makefile
|
||||||
|
@ -146,6 +146,7 @@ testScripts = [
|
|||||||
'sendheaders.py', # NOTE: needs dash_hash to pass
|
'sendheaders.py', # NOTE: needs dash_hash to pass
|
||||||
'keypool.py',
|
'keypool.py',
|
||||||
'keypool-hd.py',
|
'keypool-hd.py',
|
||||||
|
'p2p-mempool.py',
|
||||||
'prioritise_transaction.py',
|
'prioritise_transaction.py',
|
||||||
'invalidblockrequest.py', # NOTE: needs dash_hash to pass
|
'invalidblockrequest.py', # NOTE: needs dash_hash to pass
|
||||||
'invalidtxrequest.py', # NOTE: needs dash_hash to pass
|
'invalidtxrequest.py', # NOTE: needs dash_hash to pass
|
||||||
@ -204,6 +205,7 @@ def runtests():
|
|||||||
coverage = RPCCoverage()
|
coverage = RPCCoverage()
|
||||||
print("Initializing coverage directory at %s\n" % coverage.dir)
|
print("Initializing coverage directory at %s\n" % coverage.dir)
|
||||||
flags = ["--srcdir=%s/src" % BUILDDIR] + passon_args
|
flags = ["--srcdir=%s/src" % BUILDDIR] + passon_args
|
||||||
|
flags.append("--cachedir=%s/qa/cache" % BUILDDIR)
|
||||||
if coverage:
|
if coverage:
|
||||||
flags.append(coverage.flag)
|
flags.append(coverage.flag)
|
||||||
|
|
||||||
|
@ -72,8 +72,11 @@ class TestNode(NodeConnCB):
|
|||||||
self.send_message(msg_mempool())
|
self.send_message(msg_mempool())
|
||||||
|
|
||||||
class P2PMempoolTests(BitcoinTestFramework):
|
class P2PMempoolTests(BitcoinTestFramework):
|
||||||
def setup_chain(self):
|
|
||||||
initialize_chain_clean(self.options.tmpdir, 2)
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self.setup_clean_chain = True
|
||||||
|
self.num_nodes = 2
|
||||||
|
|
||||||
def setup_network(self):
|
def setup_network(self):
|
||||||
# Start a node with maxuploadtarget of 200 MB (/24h)
|
# Start a node with maxuploadtarget of 200 MB (/24h)
|
||||||
|
@ -47,7 +47,7 @@ class BitcoinTestFramework(object):
|
|||||||
if self.setup_clean_chain:
|
if self.setup_clean_chain:
|
||||||
initialize_chain_clean(self.options.tmpdir, self.num_nodes)
|
initialize_chain_clean(self.options.tmpdir, self.num_nodes)
|
||||||
else:
|
else:
|
||||||
initialize_chain(self.options.tmpdir, self.num_nodes)
|
initialize_chain(self.options.tmpdir, self.num_nodes, self.options.cachedir)
|
||||||
|
|
||||||
def setup_nodes(self):
|
def setup_nodes(self):
|
||||||
return start_nodes(self.num_nodes, self.options.tmpdir)
|
return start_nodes(self.num_nodes, self.options.tmpdir)
|
||||||
@ -108,6 +108,8 @@ class BitcoinTestFramework(object):
|
|||||||
help="Don't stop dashds after the test execution")
|
help="Don't stop dashds after the test execution")
|
||||||
parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../../src"),
|
parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../../src"),
|
||||||
help="Source directory containing dashd/dash-cli (default: %default)")
|
help="Source directory containing dashd/dash-cli (default: %default)")
|
||||||
|
parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../cache"),
|
||||||
|
help="Directory for caching pregenerated datadirs")
|
||||||
parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"),
|
parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"),
|
||||||
help="Root directory for datadirs")
|
help="Root directory for datadirs")
|
||||||
parser.add_option("--tracerpc", dest="trace_rpc", default=False, action="store_true",
|
parser.add_option("--tracerpc", dest="trace_rpc", default=False, action="store_true",
|
||||||
|
@ -131,7 +131,7 @@ def hex_str_to_bytes(hex_str):
|
|||||||
def str_to_b64str(string):
|
def str_to_b64str(string):
|
||||||
return b64encode(string.encode('utf-8')).decode('ascii')
|
return b64encode(string.encode('utf-8')).decode('ascii')
|
||||||
|
|
||||||
def sync_blocks(rpc_connections, wait=1, timeout=60):
|
def sync_blocks(rpc_connections, wait=0.125, timeout=60):
|
||||||
"""
|
"""
|
||||||
Wait until everybody has the same tip
|
Wait until everybody has the same tip
|
||||||
"""
|
"""
|
||||||
@ -143,7 +143,7 @@ def sync_blocks(rpc_connections, wait=1, timeout=60):
|
|||||||
timeout -= wait
|
timeout -= wait
|
||||||
raise AssertionError("Block sync failed")
|
raise AssertionError("Block sync failed")
|
||||||
|
|
||||||
def sync_mempools(rpc_connections, wait=1, timeout=60):
|
def sync_mempools(rpc_connections, wait=0.5, timeout=60):
|
||||||
"""
|
"""
|
||||||
Wait until everybody has the same transactions in their memory
|
Wait until everybody has the same transactions in their memory
|
||||||
pools
|
pools
|
||||||
@ -215,7 +215,7 @@ def wait_for_bitcoind_start(process, url, i):
|
|||||||
raise # unkown JSON RPC exception
|
raise # unkown JSON RPC exception
|
||||||
time.sleep(0.25)
|
time.sleep(0.25)
|
||||||
|
|
||||||
def initialize_chain(test_dir, num_nodes):
|
def initialize_chain(test_dir, num_nodes, cachedir):
|
||||||
"""
|
"""
|
||||||
Create a cache of a 200-block-long chain (with wallet) for MAX_NODES
|
Create a cache of a 200-block-long chain (with wallet) for MAX_NODES
|
||||||
Afterward, create num_nodes copies from the cache
|
Afterward, create num_nodes copies from the cache
|
||||||
@ -224,7 +224,7 @@ def initialize_chain(test_dir, num_nodes):
|
|||||||
assert num_nodes <= MAX_NODES
|
assert num_nodes <= MAX_NODES
|
||||||
create_cache = False
|
create_cache = False
|
||||||
for i in range(MAX_NODES):
|
for i in range(MAX_NODES):
|
||||||
if not os.path.isdir(os.path.join('cache', 'node'+str(i))):
|
if not os.path.isdir(os.path.join(cachedir, 'node'+str(i))):
|
||||||
create_cache = True
|
create_cache = True
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -232,12 +232,12 @@ def initialize_chain(test_dir, num_nodes):
|
|||||||
|
|
||||||
#find and delete old cache directories if any exist
|
#find and delete old cache directories if any exist
|
||||||
for i in range(MAX_NODES):
|
for i in range(MAX_NODES):
|
||||||
if os.path.isdir(os.path.join("cache","node"+str(i))):
|
if os.path.isdir(os.path.join(cachedir,"node"+str(i))):
|
||||||
shutil.rmtree(os.path.join("cache","node"+str(i)))
|
shutil.rmtree(os.path.join(cachedir,"node"+str(i)))
|
||||||
|
|
||||||
# Create cache directories, run dashds:
|
# Create cache directories, run dashds:
|
||||||
for i in range(MAX_NODES):
|
for i in range(MAX_NODES):
|
||||||
datadir=initialize_datadir("cache", i)
|
datadir=initialize_datadir(cachedir, i)
|
||||||
args = [ os.getenv("DASHD", "dashd"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ]
|
args = [ os.getenv("DASHD", "dashd"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ]
|
||||||
if i > 0:
|
if i > 0:
|
||||||
args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
|
args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
|
||||||
@ -279,13 +279,13 @@ def initialize_chain(test_dir, num_nodes):
|
|||||||
wait_bitcoinds()
|
wait_bitcoinds()
|
||||||
disable_mocktime()
|
disable_mocktime()
|
||||||
for i in range(MAX_NODES):
|
for i in range(MAX_NODES):
|
||||||
os.remove(log_filename("cache", i, "debug.log"))
|
os.remove(log_filename(cachedir, i, "debug.log"))
|
||||||
os.remove(log_filename("cache", i, "db.log"))
|
os.remove(log_filename(cachedir, i, "db.log"))
|
||||||
os.remove(log_filename("cache", i, "peers.dat"))
|
os.remove(log_filename(cachedir, i, "peers.dat"))
|
||||||
os.remove(log_filename("cache", i, "fee_estimates.dat"))
|
os.remove(log_filename(cachedir, i, "fee_estimates.dat"))
|
||||||
|
|
||||||
for i in range(num_nodes):
|
for i in range(num_nodes):
|
||||||
from_dir = os.path.join("cache", "node"+str(i))
|
from_dir = os.path.join(cachedir, "node"+str(i))
|
||||||
to_dir = os.path.join(test_dir, "node"+str(i))
|
to_dir = os.path.join(test_dir, "node"+str(i))
|
||||||
shutil.copytree(from_dir, to_dir)
|
shutil.copytree(from_dir, to_dir)
|
||||||
initialize_datadir(test_dir, i) # Overwrite port/rpcport in dash.conf
|
initialize_datadir(test_dir, i) # Overwrite port/rpcport in dash.conf
|
||||||
|
Loading…
Reference in New Issue
Block a user