mirror of
https://github.com/dashpay/dash.git
synced 2024-12-25 12:02:48 +01:00
Merge pull request #4524 from Munkybooty/backports-0.19-pr3
Backports 0.19 pr3
This commit is contained in:
commit
ffb44aa76a
@ -91,14 +91,23 @@ def filtermultipayoutaddress(mns):
|
|||||||
return [mn for mn in mns if len(hist[mn['state']['payoutAddress']]) == 1]
|
return [mn for mn in mns if len(hist[mn['state']['payoutAddress']]) == 1]
|
||||||
|
|
||||||
def resolveasn(resolver, ip):
|
def resolveasn(resolver, ip):
|
||||||
asn = int([x.to_text() for x in resolver.resolve('.'.join(reversed(ip.split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
|
if ip['net'] == 'ipv4':
|
||||||
|
ipaddr = ip['ip']
|
||||||
|
prefix = '.origin'
|
||||||
|
else: # http://www.team-cymru.com/IP-ASN-mapping.html
|
||||||
|
res = str() # 2001:4860:b002:23::68
|
||||||
|
for nb in ip['ip'].split(':')[:4]: # pick the first 4 nibbles
|
||||||
|
for c in nb.zfill(4): # right padded with '0'
|
||||||
|
res += c + '.' # 2001 4860 b002 0023
|
||||||
|
ipaddr = res.rstrip('.') # 2.0.0.1.4.8.6.0.b.0.0.2.0.0.2.3
|
||||||
|
prefix = '.origin6'
|
||||||
|
asn = int([x.to_text() for x in resolver.resolve('.'.join(reversed(ipaddr.split('.'))) + prefix + '.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
|
||||||
return asn
|
return asn
|
||||||
|
|
||||||
# Based on Greg Maxwell's seed_filter.py
|
# Based on Greg Maxwell's seed_filter.py
|
||||||
def filterbyasn(ips, max_per_asn, max_total):
|
def filterbyasn(ips, max_per_asn, max_total):
|
||||||
# Sift out ips by type
|
# Sift out ips by type
|
||||||
ips_ipv4 = [ip for ip in ips if ip['net'] == 'ipv4']
|
ips_ipv46 = [ip for ip in ips if ip['net'] in ['ipv4', 'ipv6']]
|
||||||
ips_ipv6 = [ip for ip in ips if ip['net'] == 'ipv6']
|
|
||||||
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
|
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
|
||||||
|
|
||||||
my_resolver = dns.resolver.Resolver()
|
my_resolver = dns.resolver.Resolver()
|
||||||
@ -109,13 +118,12 @@ def filterbyasn(ips, max_per_asn, max_total):
|
|||||||
my_resolver.nameservers = ['208.67.222.222', '208.67.220.220']
|
my_resolver.nameservers = ['208.67.222.222', '208.67.220.220']
|
||||||
|
|
||||||
# Resolve ASNs in parallel
|
# Resolve ASNs in parallel
|
||||||
asns = [pool.apply_async(resolveasn, args=(my_resolver, ip['ip'])) for ip in ips_ipv4]
|
asns = [pool.apply_async(resolveasn, args=(my_resolver, ip)) for ip in ips_ipv46]
|
||||||
|
|
||||||
# Filter IPv4 by ASN
|
# Filter IPv46 by ASN
|
||||||
result = []
|
result = []
|
||||||
asn_count = {}
|
asn_count = {}
|
||||||
for i in range(len(ips_ipv4)):
|
for i, ip in enumerate(ips_ipv46):
|
||||||
ip = ips_ipv4[i]
|
|
||||||
if len(result) == max_total:
|
if len(result) == max_total:
|
||||||
break
|
break
|
||||||
try:
|
try:
|
||||||
@ -129,10 +137,7 @@ def filterbyasn(ips, max_per_asn, max_total):
|
|||||||
except:
|
except:
|
||||||
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
|
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
|
||||||
|
|
||||||
# TODO: filter IPv6 by ASN
|
# Add back Onions
|
||||||
|
|
||||||
# Add back non-IPv4
|
|
||||||
result.extend(ips_ipv6)
|
|
||||||
result.extend(ips_onion)
|
result.extend(ips_onion)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@ -14,8 +14,9 @@ Each package is required to define at least these variables:
|
|||||||
placeholder such as 1.0 can be used.
|
placeholder such as 1.0 can be used.
|
||||||
|
|
||||||
$(package)_download_path:
|
$(package)_download_path:
|
||||||
Location of the upstream source, without the file-name. Usually http or
|
Location of the upstream source, without the file-name. Usually http, https
|
||||||
ftp.
|
or ftp. Secure transmission options like https should be preferred if
|
||||||
|
available.
|
||||||
|
|
||||||
$(package)_file_name:
|
$(package)_file_name:
|
||||||
The upstream source filename available at the download path.
|
The upstream source filename available at the download path.
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package=bdb
|
package=bdb
|
||||||
$(package)_version=4.8.30
|
$(package)_version=4.8.30
|
||||||
$(package)_download_path=http://download.oracle.com/berkeley-db
|
$(package)_download_path=https://download.oracle.com/berkeley-db
|
||||||
$(package)_file_name=db-$($(package)_version).NC.tar.gz
|
$(package)_file_name=db-$($(package)_version).NC.tar.gz
|
||||||
$(package)_sha256_hash=12edc0df75bf9abd7f82f821795bcee50f42cb2e5f76a6a281b85732798364ef
|
$(package)_sha256_hash=12edc0df75bf9abd7f82f821795bcee50f42cb2e5f76a6a281b85732798364ef
|
||||||
$(package)_build_subdir=build_unix
|
$(package)_build_subdir=build_unix
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package=fontconfig
|
package=fontconfig
|
||||||
$(package)_version=2.12.1
|
$(package)_version=2.12.1
|
||||||
$(package)_download_path=http://www.freedesktop.org/software/fontconfig/release/
|
$(package)_download_path=https://www.freedesktop.org/software/fontconfig/release/
|
||||||
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
|
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
|
||||||
$(package)_sha256_hash=b449a3e10c47e1d1c7a6ec6e2016cca73d3bd68fbbd4f0ae5cc6b573f7d6c7f3
|
$(package)_sha256_hash=b449a3e10c47e1d1c7a6ec6e2016cca73d3bd68fbbd4f0ae5cc6b573f7d6c7f3
|
||||||
$(package)_dependencies=freetype expat
|
$(package)_dependencies=freetype expat
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package=freetype
|
package=freetype
|
||||||
$(package)_version=2.7.1
|
$(package)_version=2.7.1
|
||||||
$(package)_download_path=http://download.savannah.gnu.org/releases/$(package)
|
$(package)_download_path=https://download.savannah.gnu.org/releases/$(package)
|
||||||
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
|
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
|
||||||
$(package)_sha256_hash=3a3bb2c4e15ffb433f2032f50a5b5a92558206822e22bfe8cbe339af4aa82f88
|
$(package)_sha256_hash=3a3bb2c4e15ffb433f2032f50a5b5a92558206822e22bfe8cbe339af4aa82f88
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package=libXau
|
package=libXau
|
||||||
$(package)_version=1.0.8
|
$(package)_version=1.0.8
|
||||||
$(package)_download_path=http://xorg.freedesktop.org/releases/individual/lib/
|
$(package)_download_path=https://xorg.freedesktop.org/releases/individual/lib/
|
||||||
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
|
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
|
||||||
$(package)_sha256_hash=fdd477320aeb5cdd67272838722d6b7d544887dfe7de46e1e7cc0c27c2bea4f2
|
$(package)_sha256_hash=fdd477320aeb5cdd67272838722d6b7d544887dfe7de46e1e7cc0c27c2bea4f2
|
||||||
$(package)_dependencies=xproto
|
$(package)_dependencies=xproto
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package=libxcb
|
package=libxcb
|
||||||
$(package)_version=1.10
|
$(package)_version=1.10
|
||||||
$(package)_download_path=http://xcb.freedesktop.org/dist
|
$(package)_download_path=https://xcb.freedesktop.org/dist
|
||||||
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
|
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
|
||||||
$(package)_sha256_hash=98d9ab05b636dd088603b64229dd1ab2d2cc02ab807892e107d674f9c3f2d5b5
|
$(package)_sha256_hash=98d9ab05b636dd088603b64229dd1ab2d2cc02ab807892e107d674f9c3f2d5b5
|
||||||
$(package)_dependencies=xcb_proto libXau
|
$(package)_dependencies=xcb_proto libXau
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package=miniupnpc
|
package=miniupnpc
|
||||||
$(package)_version=2.0.20180203
|
$(package)_version=2.0.20180203
|
||||||
$(package)_download_path=https://miniupnp.tuxfamily.org/files
|
$(package)_download_path=https://miniupnp.tuxfamily.org/files/
|
||||||
$(package)_file_name=$(package)-$($(package)_version).tar.gz
|
$(package)_file_name=$(package)-$($(package)_version).tar.gz
|
||||||
$(package)_sha256_hash=90dda8c7563ca6cd4a83e23b3c66dbbea89603a1675bfdb852897c2c9cc220b7
|
$(package)_sha256_hash=90dda8c7563ca6cd4a83e23b3c66dbbea89603a1675bfdb852897c2c9cc220b7
|
||||||
$(package)_patches=dont_use_wingen.patch
|
$(package)_patches=dont_use_wingen.patch
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package=xcb_proto
|
package=xcb_proto
|
||||||
$(package)_version=1.10
|
$(package)_version=1.10
|
||||||
$(package)_download_path=http://xcb.freedesktop.org/dist
|
$(package)_download_path=https://xcb.freedesktop.org/dist
|
||||||
$(package)_file_name=xcb-proto-$($(package)_version).tar.bz2
|
$(package)_file_name=xcb-proto-$($(package)_version).tar.bz2
|
||||||
$(package)_sha256_hash=7ef40ddd855b750bc597d2a435da21e55e502a0fefa85b274f2c922800baaf05
|
$(package)_sha256_hash=7ef40ddd855b750bc597d2a435da21e55e502a0fefa85b274f2c922800baaf05
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package=xproto
|
package=xproto
|
||||||
$(package)_version=7.0.26
|
$(package)_version=7.0.26
|
||||||
$(package)_download_path=http://xorg.freedesktop.org/releases/individual/proto
|
$(package)_download_path=https://xorg.freedesktop.org/releases/individual/proto
|
||||||
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
|
$(package)_file_name=$(package)-$($(package)_version).tar.bz2
|
||||||
$(package)_sha256_hash=636162c1759805a5a0114a369dffdeccb8af8c859ef6e1445f26a4e6e046514f
|
$(package)_sha256_hash=636162c1759805a5a0114a369dffdeccb8af8c859ef6e1445f26a4e6e046514f
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package=zlib
|
package=zlib
|
||||||
$(package)_version=1.2.11
|
$(package)_version=1.2.11
|
||||||
$(package)_download_path=http://www.zlib.net
|
$(package)_download_path=https://www.zlib.net
|
||||||
$(package)_file_name=$(package)-$($(package)_version).tar.gz
|
$(package)_file_name=$(package)-$($(package)_version).tar.gz
|
||||||
$(package)_sha256_hash=c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1
|
$(package)_sha256_hash=c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1
|
||||||
|
|
||||||
|
@ -4078,23 +4078,22 @@ bool PeerLogicValidation::ProcessMessages(CNode* pfrom, std::atomic<bool>& inter
|
|||||||
if (m_enable_bip61) {
|
if (m_enable_bip61) {
|
||||||
connman->PushMessage(pfrom, CNetMsgMaker(INIT_PROTO_VERSION).Make(NetMsgType::REJECT, strCommand, REJECT_MALFORMED, std::string("error parsing message")));
|
connman->PushMessage(pfrom, CNetMsgMaker(INIT_PROTO_VERSION).Make(NetMsgType::REJECT, strCommand, REJECT_MALFORMED, std::string("error parsing message")));
|
||||||
}
|
}
|
||||||
if (strstr(e.what(), "end of data"))
|
if (strstr(e.what(), "end of data")) {
|
||||||
{
|
|
||||||
// Allow exceptions from under-length message on vRecv
|
// Allow exceptions from under-length message on vRecv
|
||||||
LogPrint(BCLog::NET, "%s(%s, %u bytes): Exception '%s' caught, normally caused by a message being shorter than its stated length\n", __func__, SanitizeString(strCommand), nMessageSize, e.what());
|
LogPrint(BCLog::NET, "%s(%s, %u bytes): Exception '%s' caught, normally caused by a message being shorter than its stated length\n", __func__, SanitizeString(strCommand), nMessageSize, e.what());
|
||||||
}
|
} else if (strstr(e.what(), "size too large")) {
|
||||||
else if (strstr(e.what(), "size too large"))
|
|
||||||
{
|
|
||||||
// Allow exceptions from over-long size
|
// Allow exceptions from over-long size
|
||||||
LogPrint(BCLog::NET, "%s(%s, %u bytes): Exception '%s' caught\n", __func__, SanitizeString(strCommand), nMessageSize, e.what());
|
LogPrint(BCLog::NET, "%s(%s, %u bytes): Exception '%s' caught\n", __func__, SanitizeString(strCommand), nMessageSize, e.what());
|
||||||
}
|
} else if (strstr(e.what(), "non-canonical ReadCompactSize()")) {
|
||||||
else if (strstr(e.what(), "non-canonical ReadCompactSize()"))
|
|
||||||
{
|
|
||||||
// Allow exceptions from non-canonical encoding
|
// Allow exceptions from non-canonical encoding
|
||||||
LogPrint(BCLog::NET, "%s(%s, %u bytes): Exception '%s' caught\n", __func__, SanitizeString(strCommand), nMessageSize, e.what());
|
LogPrint(BCLog::NET, "%s(%s, %u bytes): Exception '%s' caught\n", __func__, SanitizeString(strCommand), nMessageSize, e.what());
|
||||||
}
|
} else if (strstr(e.what(), "Superfluous witness record")) {
|
||||||
else
|
// Allow exceptions from illegal witness encoding
|
||||||
{
|
LogPrint(BCLog::NET, "%s(%s, %u bytes): Exception '%s' caught\n", __func__, SanitizeString(strCommand), nMessageSize, e.what());
|
||||||
|
} else if (strstr(e.what(), "Unknown transaction optional data")) {
|
||||||
|
// Allow exceptions from unknown witness encoding
|
||||||
|
LogPrint(BCLog::NET, "%s(%s, %u bytes): Exception '%s' caught\n", __func__, SanitizeString(strCommand), nMessageSize, e.what());
|
||||||
|
} else {
|
||||||
PrintExceptionContinue(std::current_exception(), "ProcessMessages()");
|
PrintExceptionContinue(std::current_exception(), "ProcessMessages()");
|
||||||
}
|
}
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
|
@ -254,6 +254,7 @@ UniValue importaddress(const JSONRPCRequest& request)
|
|||||||
"\nNote: This call can take over an hour to complete if rescan is true, during that time, other rpc calls\n"
|
"\nNote: This call can take over an hour to complete if rescan is true, during that time, other rpc calls\n"
|
||||||
"may report that the imported address exists but related transactions are still missing, leading to temporarily incorrect/bogus balances and unspent outputs until rescan completes.\n"
|
"may report that the imported address exists but related transactions are still missing, leading to temporarily incorrect/bogus balances and unspent outputs until rescan completes.\n"
|
||||||
"If you have the full public key, you should call importpubkey instead of this.\n"
|
"If you have the full public key, you should call importpubkey instead of this.\n"
|
||||||
|
"Hint: use importmulti to import more than one address.\n"
|
||||||
"\nNote: If you import a non-standard raw script in hex form, outputs sending to it will be treated\n"
|
"\nNote: If you import a non-standard raw script in hex form, outputs sending to it will be treated\n"
|
||||||
"as change, and not show up in many RPCs.\n",
|
"as change, and not show up in many RPCs.\n",
|
||||||
{
|
{
|
||||||
@ -446,6 +447,7 @@ UniValue importpubkey(const JSONRPCRequest& request)
|
|||||||
throw std::runtime_error(
|
throw std::runtime_error(
|
||||||
RPCHelpMan{"importpubkey",
|
RPCHelpMan{"importpubkey",
|
||||||
"\nAdds a public key (in hex) that can be watched as if it were in your wallet but cannot be used to spend. Requires a new wallet backup.\n"
|
"\nAdds a public key (in hex) that can be watched as if it were in your wallet but cannot be used to spend. Requires a new wallet backup.\n"
|
||||||
|
"Hint: use importmulti to import more than one public key.\n"
|
||||||
"\nNote: This call can take over an hour to complete if rescan is true, during that time, other rpc calls\n"
|
"\nNote: This call can take over an hour to complete if rescan is true, during that time, other rpc calls\n"
|
||||||
"may report that the imported pubkey exists but related transactions are still missing, leading to temporarily incorrect/bogus balances and unspent outputs until rescan completes.\n",
|
"may report that the imported pubkey exists but related transactions are still missing, leading to temporarily incorrect/bogus balances and unspent outputs until rescan completes.\n",
|
||||||
{
|
{
|
||||||
@ -517,7 +519,8 @@ UniValue importwallet(const JSONRPCRequest& request)
|
|||||||
if (request.fHelp || request.params.size() != 1)
|
if (request.fHelp || request.params.size() != 1)
|
||||||
throw std::runtime_error(
|
throw std::runtime_error(
|
||||||
RPCHelpMan{"importwallet",
|
RPCHelpMan{"importwallet",
|
||||||
"\nImports keys from a wallet dump file (see dumpwallet). Requires a new wallet backup to include imported keys.\n",
|
"\nImports keys from a wallet dump file (see dumpwallet). Requires a new wallet backup to include imported keys.\n"
|
||||||
|
"Note: Use \"getwalletinfo\" to query the scanning progress.\n",
|
||||||
{
|
{
|
||||||
{"filename", RPCArg::Type::STR, RPCArg::Optional::NO, "The wallet file"},
|
{"filename", RPCArg::Type::STR, RPCArg::Optional::NO, "The wallet file"},
|
||||||
},
|
},
|
||||||
|
@ -3598,7 +3598,8 @@ static UniValue rescanblockchain(const JSONRPCRequest& request)
|
|||||||
if (request.fHelp || request.params.size() > 2) {
|
if (request.fHelp || request.params.size() > 2) {
|
||||||
throw std::runtime_error(
|
throw std::runtime_error(
|
||||||
RPCHelpMan{"rescanblockchain",
|
RPCHelpMan{"rescanblockchain",
|
||||||
"\nRescan the local blockchain for wallet related transactions.\n",
|
"\nRescan the local blockchain for wallet related transactions.\n"
|
||||||
|
"Note: Use \"getwalletinfo\" to query the scanning progress.\n",
|
||||||
{
|
{
|
||||||
{"start_height", RPCArg::Type::NUM, /* default */ "0", "block height where the rescan should start"},
|
{"start_height", RPCArg::Type::NUM, /* default */ "0", "block height where the rescan should start"},
|
||||||
{"stop_height", RPCArg::Type::NUM, RPCArg::Optional::OMITTED_NAMED_ARG, "the last block height that should be scanned. If none is provided it will rescan up to the tip at return time of this call."},
|
{"stop_height", RPCArg::Type::NUM, RPCArg::Optional::OMITTED_NAMED_ARG, "the last block height that should be scanned. If none is provided it will rescan up to the tip at return time of this call."},
|
||||||
|
@ -28,16 +28,17 @@ def mine_large_blocks(node, n):
|
|||||||
# followed by 950k of OP_NOP. This would be non-standard in a non-coinbase
|
# followed by 950k of OP_NOP. This would be non-standard in a non-coinbase
|
||||||
# transaction but is consensus valid.
|
# transaction but is consensus valid.
|
||||||
|
|
||||||
|
# Set the nTime if this is the first time this function has been called.
|
||||||
|
# A static variable ensures that time is monotonicly increasing and is therefore
|
||||||
|
# different for each block created => blockhash is unique.
|
||||||
|
if "nTimes" not in mine_large_blocks.__dict__:
|
||||||
|
mine_large_blocks.nTime = 0
|
||||||
|
|
||||||
# Get the block parameters for the first block
|
# Get the block parameters for the first block
|
||||||
big_script = CScript([OP_RETURN] + [OP_NOP] * 950000)
|
big_script = CScript([OP_RETURN] + [OP_NOP] * 950000)
|
||||||
best_block = node.getblock(node.getbestblockhash())
|
best_block = node.getblock(node.getbestblockhash())
|
||||||
height = int(best_block["height"]) + 1
|
height = int(best_block["height"]) + 1
|
||||||
try:
|
mine_large_blocks.nTime = max(mine_large_blocks.nTime, int(best_block["time"])) + 1
|
||||||
# Static variable ensures that time is monotonicly increasing and is therefore
|
|
||||||
# different for each block created => blockhash is unique.
|
|
||||||
mine_large_blocks.nTime = min(mine_large_blocks.nTime, int(best_block["time"])) + 1
|
|
||||||
except AttributeError:
|
|
||||||
mine_large_blocks.nTime = int(best_block["time"]) + 1
|
|
||||||
previousblockhash = int(best_block["hash"], 16)
|
previousblockhash = int(best_block["hash"], 16)
|
||||||
|
|
||||||
for _ in range(n):
|
for _ in range(n):
|
||||||
|
@ -66,6 +66,13 @@ if os.name != 'nt' or sys.getwindowsversion() >= (10, 0, 14393):
|
|||||||
TEST_EXIT_PASSED = 0
|
TEST_EXIT_PASSED = 0
|
||||||
TEST_EXIT_SKIPPED = 77
|
TEST_EXIT_SKIPPED = 77
|
||||||
|
|
||||||
|
EXTENDED_SCRIPTS = [
|
||||||
|
# These tests are not run by default.
|
||||||
|
# Longest test should go first, to favor running tests in parallel
|
||||||
|
'feature_pruning.py', # NOTE: Prune mode is incompatible with -txindex, should work with governance validation disabled though.
|
||||||
|
'feature_dbcrash.py',
|
||||||
|
]
|
||||||
|
|
||||||
BASE_SCRIPTS = [
|
BASE_SCRIPTS = [
|
||||||
# Scripts that are run by default.
|
# Scripts that are run by default.
|
||||||
# Longest test should go first, to favor running tests in parallel
|
# Longest test should go first, to favor running tests in parallel
|
||||||
@ -227,13 +234,6 @@ BASE_SCRIPTS = [
|
|||||||
# Put them in a random line within the section that fits their approximate run-time
|
# Put them in a random line within the section that fits their approximate run-time
|
||||||
]
|
]
|
||||||
|
|
||||||
EXTENDED_SCRIPTS = [
|
|
||||||
# These tests are not run by default.
|
|
||||||
# Longest test should go first, to favor running tests in parallel
|
|
||||||
'feature_pruning.py', # NOTE: Prune mode is incompatible with -txindex, should work with governance validation disabled though.
|
|
||||||
'feature_dbcrash.py',
|
|
||||||
]
|
|
||||||
|
|
||||||
# Place EXTENDED_SCRIPTS first since it has the 3 longest running tests
|
# Place EXTENDED_SCRIPTS first since it has the 3 longest running tests
|
||||||
ALL_SCRIPTS = EXTENDED_SCRIPTS + BASE_SCRIPTS
|
ALL_SCRIPTS = EXTENDED_SCRIPTS + BASE_SCRIPTS
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user