mirror of
https://github.com/dashpay/dash.git
synced 2024-12-25 03:52:49 +01:00
Merge pull request #5537 from knst/bc-bp-v20-missing-8v2
backport: bitcoin#12134, #18426, #18534, #18828, #18864, #19153, #19201, #19205, #19560, #19813, #19859, partial #20354, #20955, #22442, #22790
This commit is contained in:
commit
135be62d44
3
.gitignore
vendored
3
.gitignore
vendored
@ -111,6 +111,9 @@ qrc_*.cpp
|
||||
build
|
||||
*.dSYM
|
||||
|
||||
# Previous releases
|
||||
releases
|
||||
|
||||
#lcov
|
||||
*.gcno
|
||||
*.gcda
|
||||
|
@ -152,6 +152,15 @@ builder-image:
|
||||
script:
|
||||
- echo "INTEGRATION_TESTS_ARGS=${INTEGRATION_TESTS_ARGS}"
|
||||
- ./ci/dash/test_integrationtests.sh $INTEGRATION_TESTS_ARGS
|
||||
cache:
|
||||
# Let all branches share the same cache, which is ok because get_previous_releases.py extracts release binaries in separate folders
|
||||
key:
|
||||
files:
|
||||
# CI files where $PREVIOUS_RELEASES_TO_DOWNLOAD is defined
|
||||
- ci/test/00_setup_env_native_qt5.sh
|
||||
prefix: ${CI_JOB_NAME}
|
||||
paths:
|
||||
- releases
|
||||
after_script:
|
||||
- mkdir -p $CI_PROJECT_DIR/testlogs
|
||||
artifacts:
|
||||
|
@ -31,7 +31,7 @@ services:
|
||||
cache:
|
||||
ccache: true
|
||||
directories:
|
||||
- $BASE_BUILD_DIR/ci/scratch/.ccache
|
||||
- $BASE_BUILD_DIR/releases/$HOST
|
||||
before_cache:
|
||||
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then brew cleanup; fi
|
||||
env:
|
||||
@ -244,7 +244,7 @@ after_success:
|
||||
FILE_ENV="./ci/test/00_setup_env_win64.sh"
|
||||
|
||||
- stage: test
|
||||
name: 'x86_64 Linux [GOAL: install] [focal] [uses qt5 dev package and some depends packages] [unsigned char]'
|
||||
name: 'x86_64 Linux [GOAL: install] [focal] [previous releases, uses qt5 dev package and some depends packages] [unsigned char]'
|
||||
env: >-
|
||||
FILE_ENV="./ci/test/00_setup_env_native_qt5.sh"
|
||||
# x86_64 Linux (xenial, no depends, only system libs, sanitizers: thread (TSan))
|
||||
|
@ -13,7 +13,7 @@ If the repository is not a fresh git clone, you might have to clean files from p
|
||||
|
||||
The ci needs to perform various sysadmin tasks such as installing packages or writing to the user's home directory.
|
||||
While most of the actions are done inside a docker container, this is not possible for all. Thus, cache directories,
|
||||
such as the depends cache or ccache, are mounted as read-write into the docker container. While it should be fine to run
|
||||
such as the depends cache, previous release binaries, or ccache, are mounted as read-write into the docker container. While it should be fine to run
|
||||
the ci system locally on you development box, the ci scripts can generally be assumed to have received less review and
|
||||
testing compared to other parts of the codebase. If you want to keep the work tree clean, you might want to run the ci
|
||||
system in a virtual machine with a Linux operating system of your choice.
|
||||
|
@ -20,6 +20,11 @@ fi
|
||||
|
||||
export LD_LIBRARY_PATH=$DEPENDS_DIR/$HOST/lib
|
||||
|
||||
if [ -n "$PREVIOUS_RELEASES_TO_DOWNLOAD" ]; then
|
||||
echo "Downloading previous releases: $PREVIOUS_RELEASES_TO_DOWNLOAD"
|
||||
./test/get_previous_releases.py -b -t "$PREVIOUS_RELEASES_DIR" ${PREVIOUS_RELEASES_TO_DOWNLOAD}
|
||||
fi
|
||||
|
||||
cd build-ci/dashcore-$BUILD_TARGET
|
||||
|
||||
if [ "$SOCKETEVENTS" = "" ]; then
|
||||
|
@ -39,6 +39,7 @@ export USE_BUSY_BOX=${USE_BUSY_BOX:-false}
|
||||
export RUN_UNIT_TESTS=${RUN_UNIT_TESTS:-true}
|
||||
export RUN_INTEGRATION_TESTS=${RUN_INTEGRATION_TESTS:-true}
|
||||
export RUN_SECURITY_TESTS=${RUN_SECURITY_TESTS:-false}
|
||||
export TEST_PREVIOUS_RELEASES=${TEST_PREVIOUS_RELEASES:-false}
|
||||
export RUN_FUZZ_TESTS=${RUN_FUZZ_TESTS:-false}
|
||||
export RUN_SYMBOL_TESTS=${RUN_SYMBOL_TESTS:-true}
|
||||
export CONTAINER_NAME=${CONTAINER_NAME:-ci_unnamed}
|
||||
@ -59,6 +60,7 @@ export CCACHE_DIR=${CCACHE_DIR:-$CACHE_DIR/ccache}
|
||||
export DEPENDS_DIR=${DEPENDS_DIR:-$BASE_ROOT_DIR/depends}
|
||||
# Folder where the build is done (bin and lib).
|
||||
export BASE_OUTDIR=${BASE_OUTDIR:-$BASE_SCRATCH_DIR/out/$HOST}
|
||||
export PREVIOUS_RELEASES_DIR=${PREVIOUS_RELEASES_DIR:-$BASE_ROOT_DIR/releases/$HOST}
|
||||
export SDK_URL=${SDK_URL:-https://bitcoincore.org/depends-sources/sdks}
|
||||
export DOCKER_PACKAGES=${DOCKER_PACKAGES:-build-essential libtool autotools-dev automake pkg-config bsdmainutils curl ca-certificates ccache python3 rsync git procps}
|
||||
export GOAL=${GOAL:-install}
|
||||
|
@ -14,4 +14,6 @@ export DEP_OPTS="NO_UPNP=1 DEBUG=1"
|
||||
export RUN_UNIT_TESTS_SEQUENTIAL="true"
|
||||
export RUN_UNIT_TESTS="false"
|
||||
export GOAL="install"
|
||||
export TEST_PREVIOUS_RELEASES=true
|
||||
export PREVIOUS_RELEASES_TO_DOWNLOAD="v0.15.0.0 v0.16.1.1 v0.17.0.3 v18.2.2 v19.3.0"
|
||||
export BITCOIN_CONFIG="--enable-zmq --enable-reduce-exports --disable-fuzz-binary LDFLAGS=-static-libstdc++"
|
||||
|
@ -20,12 +20,13 @@ fi
|
||||
|
||||
mkdir -p "${BASE_SCRATCH_DIR}"
|
||||
mkdir -p "${CCACHE_DIR}"
|
||||
mkdir -p "${PREVIOUS_RELEASES_DIR}"
|
||||
|
||||
export ASAN_OPTIONS="detect_stack_use_after_return=1:check_initialization_order=1:strict_init_order=1"
|
||||
export LSAN_OPTIONS="suppressions=${BASE_BUILD_DIR}/test/sanitizer_suppressions/lsan"
|
||||
export TSAN_OPTIONS="suppressions=${BASE_BUILD_DIR}/test/sanitizer_suppressions/tsan"
|
||||
export UBSAN_OPTIONS="suppressions=${BASE_BUILD_DIR}/test/sanitizer_suppressions/ubsan:print_stacktrace=1:halt_on_error=1:report_error_type=1"
|
||||
env | grep -E '^(BASE_|QEMU_|CCACHE_|LC_ALL|BOOST_TEST_RANDOM|CONFIG_SHELL|(ASAN|LSAN|TSAN|UBSAN)_OPTIONS)' | tee /tmp/env
|
||||
env | grep -E '^(BASE_|QEMU_|CCACHE_|LC_ALL|BOOST_TEST_RANDOM|CONFIG_SHELL|(ASAN|LSAN|TSAN|UBSAN)_OPTIONS|TEST_PREVIOUS_RELEASES|PREVIOUS_RELEASES_DIR))' | tee /tmp/env
|
||||
if [[ $HOST = *-mingw32 ]]; then
|
||||
DOCKER_ADMIN="--cap-add SYS_ADMIN"
|
||||
elif [[ $BITCOIN_CONFIG = *--with-sanitizers=*address* ]]; then # If ran with (ASan + LSan), Docker needs access to ptrace (https://github.com/google/sanitizers/issues/764)
|
||||
@ -42,6 +43,7 @@ if [ -z "$DANGER_RUN_CI_ON_HOST" ]; then
|
||||
--mount type=bind,src=$BASE_ROOT_DIR,dst=/ro_base,readonly \
|
||||
--mount type=bind,src=$CCACHE_DIR,dst=$CCACHE_DIR \
|
||||
--mount type=bind,src=$DEPENDS_DIR,dst=$DEPENDS_DIR \
|
||||
--mount type=bind,src=$PREVIOUS_RELEASES_DIR,dst=$PREVIOUS_RELEASES_DIR \
|
||||
-w $BASE_ROOT_DIR \
|
||||
--env-file /tmp/env \
|
||||
--name $CONTAINER_NAME \
|
||||
|
@ -26,7 +26,7 @@ if [[ $HOST = *-mingw32 ]]; then
|
||||
fi
|
||||
if [ -z "$NO_DEPENDS" ]; then
|
||||
if [[ $DOCKER_NAME_TAG == centos* ]]; then
|
||||
# CentOS has problems building the depends if the config shell is not explicitely set
|
||||
# CentOS has problems building the depends if the config shell is not explicitly set
|
||||
# (i.e. for libevent a Makefile with an empty SHELL variable is generated, leading to
|
||||
# an error as the first command is executed)
|
||||
SHELL_OPTS="CONFIG_SHELL=/bin/bash"
|
||||
@ -35,3 +35,8 @@ if [ -z "$NO_DEPENDS" ]; then
|
||||
fi
|
||||
DOCKER_EXEC $SHELL_OPTS make $MAKEJOBS -C depends HOST=$HOST $DEP_OPTS
|
||||
fi
|
||||
if [ -n "$PREVIOUS_RELEASES_TO_DOWNLOAD" ]; then
|
||||
BEGIN_FOLD previous-versions
|
||||
DOCKER_EXEC test/get_previous_releases.py -b -t "$PREVIOUS_RELEASES_DIR" "${PREVIOUS_RELEASES_TO_DOWNLOAD}"
|
||||
END_FOLD
|
||||
fi
|
||||
|
@ -96,6 +96,12 @@ Run all possible tests with
|
||||
test/functional/test_runner.py --extended
|
||||
```
|
||||
|
||||
In order to run backwards compatibility tests, download the previous node binaries:
|
||||
|
||||
```
|
||||
test/get_previous_releases.py -b v19.3.0 v18.2.2 v0.17.0.3 v0.16.1.1 v0.15.0.0
|
||||
```
|
||||
|
||||
By default, up to 4 tests will be run in parallel by test_runner. To specify
|
||||
how many jobs to run, append `--jobs=n`
|
||||
|
||||
|
327
test/functional/feature_backwards_compatibility.py
Executable file
327
test/functional/feature_backwards_compatibility.py
Executable file
@ -0,0 +1,327 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2018-2019 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
"""Backwards compatibility functional test
|
||||
|
||||
Test various backwards compatibility scenarios. Requires previous releases binaries,
|
||||
see test/README.md.
|
||||
|
||||
v0.15.0.0 is not required by this test, but it is used in wallet_upgradewallet.py.
|
||||
Due to a hardfork in regtest, it can't be used to sync nodes.
|
||||
|
||||
Due to RPC changes introduced in various versions the below tests
|
||||
won't work for older versions without some patches or workarounds.
|
||||
|
||||
Use only the latest patch version of each release, unless a test specifically
|
||||
needs an older patch version.
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
|
||||
from test_framework.util import (
|
||||
assert_equal,
|
||||
)
|
||||
|
||||
|
||||
class BackwardsCompatibilityTest(BitcoinTestFramework):
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 6
|
||||
# Add new version after each release:
|
||||
self.extra_args = [
|
||||
[], # Pre-release: use to mine blocks
|
||||
["-nowallet"], # Pre-release: use to receive coins, swap wallets, etc
|
||||
["-nowallet"], # v19.3.0
|
||||
["-nowallet"], # v18.2.2
|
||||
["-nowallet"], # v0.17.0.3
|
||||
["-nowallet"], # v0.16.1.1
|
||||
]
|
||||
self.wallet_names = [self.default_wallet_name]
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
self.skip_if_no_previous_releases()
|
||||
|
||||
def setup_nodes(self):
|
||||
self.add_nodes(self.num_nodes, extra_args=self.extra_args, versions=[
|
||||
None,
|
||||
None,
|
||||
19030000,
|
||||
18020200,
|
||||
170003,
|
||||
160101,
|
||||
])
|
||||
|
||||
self.start_nodes()
|
||||
self.import_deterministic_coinbase_privkeys()
|
||||
|
||||
def run_test(self):
|
||||
self.nodes[0].generatetoaddress(101, self.nodes[0].getnewaddress())
|
||||
|
||||
self.sync_blocks()
|
||||
|
||||
# Sanity check the test framework:
|
||||
res = self.nodes[self.num_nodes - 1].getblockchaininfo()
|
||||
assert_equal(res['blocks'], 101)
|
||||
|
||||
node_master = self.nodes[self.num_nodes - 5]
|
||||
node_v19 = self.nodes[self.num_nodes - 4]
|
||||
node_v18 = self.nodes[self.num_nodes - 3]
|
||||
node_v17 = self.nodes[self.num_nodes - 2]
|
||||
node_v16 = self.nodes[self.num_nodes - 1]
|
||||
|
||||
self.log.info("Test wallet backwards compatibility...")
|
||||
# Create a number of wallets and open them in older versions:
|
||||
|
||||
# w1: regular wallet, created on master: update this test when default
|
||||
# wallets can no longer be opened by older versions.
|
||||
node_master.createwallet(wallet_name="w1")
|
||||
wallet = node_master.get_wallet_rpc("w1")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled']
|
||||
assert info['keypoolsize'] > 0
|
||||
# Create a confirmed transaction, receiving coins
|
||||
address = wallet.getnewaddress()
|
||||
self.nodes[0].sendtoaddress(address, 1)
|
||||
self.sync_mempools()
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_blocks()
|
||||
|
||||
# w1_v19: regular wallet, created with v0.19
|
||||
node_v19.createwallet(wallet_name="w1_v19")
|
||||
wallet = node_v19.get_wallet_rpc("w1_v19")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled']
|
||||
assert info['keypoolsize'] > 0
|
||||
# Use addmultisigaddress (see #18075)
|
||||
address_18075 = wallet.addmultisigaddress(1, ["0296b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52", "037211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073"], "")["address"]
|
||||
assert wallet.getaddressinfo(address_18075)["solvable"]
|
||||
|
||||
# w1_v18: regular wallet, created with v0.18
|
||||
node_v18.createwallet(wallet_name="w1_v18")
|
||||
wallet = node_v18.get_wallet_rpc("w1_v18")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled']
|
||||
assert info['keypoolsize'] > 0
|
||||
|
||||
# w2: wallet with private keys disabled, created on master: update this
|
||||
# test when default wallets private keys disabled can no longer be
|
||||
# opened by older versions.
|
||||
node_master.createwallet(wallet_name="w2", disable_private_keys=True)
|
||||
wallet = node_master.get_wallet_rpc("w2")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled'] == False
|
||||
assert info['keypoolsize'] == 0
|
||||
|
||||
# w2_v19: wallet with private keys disabled, created with v0.19
|
||||
node_v19.createwallet(wallet_name="w2_v19", disable_private_keys=True)
|
||||
wallet = node_v19.get_wallet_rpc("w2_v19")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled'] == False
|
||||
assert info['keypoolsize'] == 0
|
||||
|
||||
# w2_v18: wallet with private keys disabled, created with v0.18
|
||||
node_v18.createwallet(wallet_name="w2_v18", disable_private_keys=True)
|
||||
wallet = node_v18.get_wallet_rpc("w2_v18")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled'] == False
|
||||
assert info['keypoolsize'] == 0
|
||||
|
||||
# w3: blank wallet, created on master: update this
|
||||
# test when default blank wallets can no longer be opened by older versions.
|
||||
node_master.createwallet(wallet_name="w3", blank=True)
|
||||
wallet = node_master.get_wallet_rpc("w3")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled']
|
||||
assert info['keypoolsize'] == 0
|
||||
|
||||
# w3_v19: blank wallet, created with v0.19
|
||||
node_v19.createwallet(wallet_name="w3_v19", blank=True)
|
||||
wallet = node_v19.get_wallet_rpc("w3_v19")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled']
|
||||
assert info['keypoolsize'] == 0
|
||||
|
||||
# w3_v18: blank wallet, created with v0.18
|
||||
node_v18.createwallet(wallet_name="w3_v18", blank=True)
|
||||
wallet = node_v18.get_wallet_rpc("w3_v18")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled']
|
||||
assert info['keypoolsize'] == 0
|
||||
|
||||
# Copy the wallets to older nodes:
|
||||
node_master_wallets_dir = os.path.join(node_master.datadir, "regtest/wallets")
|
||||
node_v19_wallets_dir = os.path.join(node_v19.datadir, "regtest/wallets")
|
||||
node_v18_wallets_dir = os.path.join(node_v18.datadir, "regtest/wallets")
|
||||
node_v17_wallets_dir = os.path.join(node_v17.datadir, "regtest/wallets")
|
||||
node_v16_wallets_dir = os.path.join(node_v16.datadir, "regtest")
|
||||
node_master.unloadwallet("w1")
|
||||
node_master.unloadwallet("w2")
|
||||
node_v19.unloadwallet("w1_v19")
|
||||
node_v19.unloadwallet("w2_v19")
|
||||
node_v18.unloadwallet("w1_v18")
|
||||
node_v18.unloadwallet("w2_v18")
|
||||
|
||||
# Copy wallets to v0.16
|
||||
for wallet in os.listdir(node_master_wallets_dir):
|
||||
shutil.copytree(
|
||||
os.path.join(node_master_wallets_dir, wallet),
|
||||
os.path.join(node_v16_wallets_dir, wallet)
|
||||
)
|
||||
|
||||
# Copy wallets to v0.17
|
||||
for wallet in os.listdir(node_master_wallets_dir):
|
||||
shutil.copytree(
|
||||
os.path.join(node_master_wallets_dir, wallet),
|
||||
os.path.join(node_v17_wallets_dir, wallet)
|
||||
)
|
||||
for wallet in os.listdir(node_v18_wallets_dir):
|
||||
shutil.copytree(
|
||||
os.path.join(node_v18_wallets_dir, wallet),
|
||||
os.path.join(node_v17_wallets_dir, wallet)
|
||||
)
|
||||
|
||||
# Copy wallets to v0.18
|
||||
for wallet in os.listdir(node_master_wallets_dir):
|
||||
shutil.copytree(
|
||||
os.path.join(node_master_wallets_dir, wallet),
|
||||
os.path.join(node_v18_wallets_dir, wallet)
|
||||
)
|
||||
|
||||
# Copy wallets to v0.19
|
||||
for wallet in os.listdir(node_master_wallets_dir):
|
||||
shutil.copytree(
|
||||
os.path.join(node_master_wallets_dir, wallet),
|
||||
os.path.join(node_v19_wallets_dir, wallet)
|
||||
)
|
||||
|
||||
# Open the wallets in v0.19
|
||||
node_v19.loadwallet("w1")
|
||||
wallet = node_v19.get_wallet_rpc("w1")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled']
|
||||
assert info['keypoolsize'] > 0
|
||||
txs = wallet.listtransactions()
|
||||
assert_equal(len(txs), 1)
|
||||
|
||||
node_v19.loadwallet("w2")
|
||||
wallet = node_v19.get_wallet_rpc("w2")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled'] == False
|
||||
assert info['keypoolsize'] == 0
|
||||
|
||||
node_v19.loadwallet("w3")
|
||||
wallet = node_v19.get_wallet_rpc("w3")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled']
|
||||
assert info['keypoolsize'] == 0
|
||||
|
||||
# Open the wallets in v0.18
|
||||
node_v18.loadwallet("w1")
|
||||
wallet = node_v18.get_wallet_rpc("w1")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled']
|
||||
assert info['keypoolsize'] > 0
|
||||
txs = wallet.listtransactions()
|
||||
assert_equal(len(txs), 1)
|
||||
|
||||
node_v18.loadwallet("w2")
|
||||
wallet = node_v18.get_wallet_rpc("w2")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled'] == False
|
||||
assert info['keypoolsize'] == 0
|
||||
|
||||
node_v18.loadwallet("w3")
|
||||
wallet = node_v18.get_wallet_rpc("w3")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled']
|
||||
assert info['keypoolsize'] == 0
|
||||
|
||||
# Open the wallets in v0.17
|
||||
node_v17.loadwallet("w1_v18")
|
||||
wallet = node_v17.get_wallet_rpc("w1_v18")
|
||||
info = wallet.getwalletinfo()
|
||||
# doesn't have private_keys_enabled in v17
|
||||
#assert info['private_keys_enabled']
|
||||
assert info['keypoolsize'] > 0
|
||||
|
||||
node_v17.loadwallet("w1")
|
||||
wallet = node_v17.get_wallet_rpc("w1")
|
||||
info = wallet.getwalletinfo()
|
||||
# doesn't have private_keys_enabled in v17
|
||||
#assert info['private_keys_enabled']
|
||||
assert info['keypoolsize'] > 0
|
||||
|
||||
node_v17.loadwallet("w2_v18")
|
||||
wallet = node_v17.get_wallet_rpc("w2_v18")
|
||||
info = wallet.getwalletinfo()
|
||||
# doesn't have private_keys_enabled in v17
|
||||
# TODO enable back when HD wallets are created by default
|
||||
# assert info['private_keys_enabled'] == False
|
||||
# assert info['keypoolsize'] == 0
|
||||
|
||||
node_v17.loadwallet("w2")
|
||||
wallet = node_v17.get_wallet_rpc("w2")
|
||||
info = wallet.getwalletinfo()
|
||||
# doesn't have private_keys_enabled in v17
|
||||
# TODO enable back when HD wallets are created by default
|
||||
#assert info['private_keys_enabled'] == False
|
||||
#assert info['keypoolsize'] == 0
|
||||
|
||||
# RPC loadwallet failure causes bitcoind to exit, in addition to the RPC
|
||||
# call failure, so the following test won't work:
|
||||
# assert_raises_rpc_error(-4, "Wallet loading failed.", node_v17.loadwallet, 'w3_v18')
|
||||
|
||||
# Instead, we stop node and try to launch it with the wallet:
|
||||
self.stop_node(4)
|
||||
# it expected to fail with error 'DBErrors::TOO_NEW' but Dash Core can open v18 by version 17
|
||||
# can be implemented in future if there's any incompatible versions
|
||||
#node_v17.assert_start_raises_init_error(["-wallet=w3_v18"], "Error: Error loading w3_v18: Wallet requires newer version of Dash Core")
|
||||
#node_v17.assert_start_raises_init_error(["-wallet=w3"], "Error: Error loading w3: Wallet requires newer version of Dash Core")
|
||||
self.start_node(4)
|
||||
|
||||
# Open most recent wallet in v0.16 (no loadwallet RPC)
|
||||
self.restart_node(5, extra_args=["-wallet=w2"])
|
||||
wallet = node_v16.get_wallet_rpc("w2")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['keypoolsize'] == 1
|
||||
|
||||
self.log.info("Test wallet upgrade path...")
|
||||
# u1: regular wallet, created with v0.17
|
||||
node_v17.createwallet(wallet_name="u1_v17")
|
||||
wallet = node_v17.get_wallet_rpc("u1_v17")
|
||||
address = wallet.getnewaddress()
|
||||
info = wallet.getaddressinfo(address)
|
||||
# TODO enable back when HD wallets are created by default
|
||||
#hdkeypath = info["hdkeypath"]
|
||||
pubkey = info["pubkey"]
|
||||
|
||||
# Copy the 0.17 wallet to the last Bitcoin Core version and open it:
|
||||
node_v17.unloadwallet("u1_v17")
|
||||
shutil.copytree(
|
||||
os.path.join(node_v17_wallets_dir, "u1_v17"),
|
||||
os.path.join(node_master_wallets_dir, "u1_v17")
|
||||
)
|
||||
node_master.loadwallet("u1_v17")
|
||||
wallet = node_master.get_wallet_rpc("u1_v17")
|
||||
info = wallet.getaddressinfo(address)
|
||||
# TODO enable back when HD wallets are created by default
|
||||
#descriptor = "pkh([" + info["hdmasterfingerprint"] + hdkeypath[1:] + "]" + pubkey + ")"
|
||||
#assert_equal(info["desc"], descsum_create(descriptor))
|
||||
assert_equal(info["pubkey"], pubkey)
|
||||
|
||||
# Copy the 0.19 wallet to the last Bitcoin Core version and open it:
|
||||
shutil.copytree(
|
||||
os.path.join(node_v19_wallets_dir, "w1_v19"),
|
||||
os.path.join(node_master_wallets_dir, "w1_v19")
|
||||
)
|
||||
node_master.loadwallet("w1_v19")
|
||||
wallet = node_master.get_wallet_rpc("w1_v19")
|
||||
assert wallet.getaddressinfo(address_18075)["solvable"]
|
||||
|
||||
if __name__ == '__main__':
|
||||
BackwardsCompatibilityTest().main()
|
80
test/functional/mempool_compatibility.py
Executable file
80
test/functional/mempool_compatibility.py
Executable file
@ -0,0 +1,80 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2017-2020 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
"""Test that mempool.dat is both backward and forward compatible between versions
|
||||
|
||||
NOTE: The test is designed to prevent cases when compatibility is broken accidentally.
|
||||
In case we need to break mempool compatibility we can continue to use the test by just bumping the version number.
|
||||
|
||||
The previous release v0.15.0.0 is required by this test, see test/README.md.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import (
|
||||
adjust_bitcoin_conf_for_pre_16
|
||||
)
|
||||
|
||||
class MempoolCompatibilityTest(BitcoinTestFramework):
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
self.wallet_names = [None, self.default_wallet_name]
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
self.skip_if_no_previous_releases()
|
||||
|
||||
def setup_network(self):
|
||||
self.add_nodes(self.num_nodes, versions=[
|
||||
150000, # oldest version supported by the test framework
|
||||
None,
|
||||
])
|
||||
self.extra_args = [
|
||||
[],
|
||||
[],
|
||||
]
|
||||
adjust_bitcoin_conf_for_pre_16(self.nodes[0].bitcoinconf)
|
||||
self.start_nodes()
|
||||
self.import_deterministic_coinbase_privkeys()
|
||||
|
||||
def run_test(self):
|
||||
self.log.info("Test that mempool.dat is compatible between versions")
|
||||
|
||||
old_node = self.nodes[0]
|
||||
new_node = self.nodes[1]
|
||||
recipient = old_node.getnewaddress()
|
||||
self.stop_node(1)
|
||||
|
||||
self.log.info("Add a transaction to mempool on old node and shutdown")
|
||||
old_tx_hash = old_node.sendtoaddress(recipient, 0.0001)
|
||||
assert old_tx_hash in old_node.getrawmempool()
|
||||
self.stop_node(0)
|
||||
|
||||
self.log.info("Move mempool.dat from old to new node")
|
||||
old_node_mempool = os.path.join(old_node.datadir, self.chain, 'mempool.dat')
|
||||
new_node_mempool = os.path.join(new_node.datadir, self.chain, 'mempool.dat')
|
||||
os.rename(old_node_mempool, new_node_mempool)
|
||||
|
||||
self.log.info("Start new node and verify mempool contains the tx")
|
||||
self.start_node(1)
|
||||
assert old_tx_hash in new_node.getrawmempool()
|
||||
|
||||
self.log.info("Add unbroadcasted tx to mempool on new node and shutdown")
|
||||
unbroadcasted_tx_hash = new_node.sendtoaddress(recipient, 0.0001)
|
||||
assert unbroadcasted_tx_hash in new_node.getrawmempool()
|
||||
mempool = new_node.getrawmempool(True)
|
||||
assert mempool[unbroadcasted_tx_hash]['unbroadcast']
|
||||
self.stop_node(1)
|
||||
|
||||
self.log.info("Move mempool.dat from new to old node")
|
||||
os.rename(new_node_mempool, old_node_mempool)
|
||||
|
||||
self.log.info("Start old node again and verify mempool contains both txs")
|
||||
self.start_node(0, ['-nowallet'])
|
||||
assert old_tx_hash in old_node.getrawmempool()
|
||||
assert unbroadcasted_tx_hash in old_node.getrawmempool()
|
||||
|
||||
if __name__ == "__main__":
|
||||
MempoolCompatibilityTest().main()
|
@ -14,6 +14,7 @@ import logging
|
||||
import os
|
||||
import pdb
|
||||
import random
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
@ -455,22 +456,47 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
||||
|
||||
# Public helper methods. These can be accessed by the subclass test scripts.
|
||||
|
||||
def add_nodes(self, num_nodes, extra_args=None, *, rpchost=None, binary=None):
|
||||
def add_nodes(self, num_nodes, extra_args=None, *, rpchost=None, binary=None, binary_cli=None, versions=None):
|
||||
"""Instantiate TestNode objects.
|
||||
|
||||
Should only be called once after the nodes have been specified in
|
||||
set_test_params()."""
|
||||
def get_bin_from_version(version, bin_name, bin_default):
|
||||
if not version:
|
||||
return bin_default
|
||||
return os.path.join(
|
||||
self.options.previous_releases_path,
|
||||
re.sub(
|
||||
r'\.0$' if version != 150000 else r'^$',
|
||||
'', # remove trailing .0 for point releases
|
||||
'v{}.{}.{}.{}'.format(
|
||||
(version % 100000000) // 1000000,
|
||||
(version % 1000000) // 10000,
|
||||
(version % 10000) // 100,
|
||||
(version % 100) // 1,
|
||||
),
|
||||
),
|
||||
'bin',
|
||||
bin_name,
|
||||
)
|
||||
|
||||
if self.bind_to_localhost_only:
|
||||
extra_confs = [["bind=127.0.0.1"]] * num_nodes
|
||||
else:
|
||||
extra_confs = [[]] * num_nodes
|
||||
if extra_args is None:
|
||||
extra_args = [[]] * num_nodes
|
||||
if versions is None:
|
||||
versions = [None] * num_nodes
|
||||
if binary is None:
|
||||
binary = [self.options.bitcoind] * num_nodes
|
||||
binary = [get_bin_from_version(v, 'dashd', self.options.bitcoind) for v in versions]
|
||||
if binary_cli is None:
|
||||
binary_cli = [get_bin_from_version(v, 'dash-cli', self.options.bitcoincli) for v in versions]
|
||||
assert_equal(len(extra_confs), num_nodes)
|
||||
assert_equal(len(extra_args), num_nodes)
|
||||
assert_equal(len(versions), num_nodes)
|
||||
assert_equal(len(binary), num_nodes)
|
||||
assert_equal(len(binary_cli), num_nodes)
|
||||
old_num_nodes = len(self.nodes)
|
||||
for i in range(num_nodes):
|
||||
self.nodes.append(TestNode(
|
||||
@ -482,7 +508,8 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
||||
timewait=self.rpc_timeout,
|
||||
timeout_factor=self.options.timeout_factor,
|
||||
bitcoind=binary[i],
|
||||
bitcoin_cli=self.options.bitcoincli,
|
||||
bitcoin_cli=binary_cli[i],
|
||||
version=versions[i],
|
||||
mocktime=self.mocktime,
|
||||
coverage_dir=self.options.coveragedir,
|
||||
cwd=self.options.tmpdir,
|
||||
@ -734,7 +761,8 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
||||
if pool.count(pool[0]) == len(rpc_connections):
|
||||
if flush_scheduler:
|
||||
for r in rpc_connections:
|
||||
r.syncwithvalidationinterfacequeue()
|
||||
if r.version_is_at_least(170000):
|
||||
r.syncwithvalidationinterfacequeue()
|
||||
return
|
||||
# Check that each peer has at least one connection
|
||||
assert (all([len(x.getpeerinfo()) for x in rpc_connections]))
|
||||
|
@ -63,7 +63,7 @@ class TestNode():
|
||||
To make things easier for the test writer, any unrecognised messages will
|
||||
be dispatched to the RPC connection."""
|
||||
|
||||
def __init__(self, i, datadir, extra_args_from_options, *, chain, rpchost, timewait, timeout_factor, bitcoind, bitcoin_cli, mocktime, coverage_dir, cwd, extra_conf=None, extra_args=None, use_cli=False, start_perf=False, use_valgrind=False):
|
||||
def __init__(self, i, datadir, extra_args_from_options, *, chain, rpchost, timewait, timeout_factor, bitcoind, bitcoin_cli, mocktime, coverage_dir, cwd, extra_conf=None, extra_args=None, use_cli=False, start_perf=False, use_valgrind=False, version=None):
|
||||
"""
|
||||
Kwargs:
|
||||
start_perf (bool): If True, begin profiling the node with `perf` as soon as
|
||||
@ -89,6 +89,7 @@ class TestNode():
|
||||
# Note that common args are set in the config file (see initialize_datadir)
|
||||
self.extra_args = extra_args
|
||||
self.extra_args_from_options = extra_args_from_options
|
||||
self.version = version
|
||||
# Configuration for logging is set as command-line args rather than in the bitcoin.conf file.
|
||||
# This means that starting a bitcoind using the temp dir to debug a failed test won't
|
||||
# spam debug.log.
|
||||
@ -96,7 +97,6 @@ class TestNode():
|
||||
self.binary,
|
||||
"-datadir=" + self.datadir,
|
||||
"-logtimemicros",
|
||||
"-logthreadnames",
|
||||
"-debug",
|
||||
"-debugexclude=libevent",
|
||||
"-debugexclude=leveldb",
|
||||
@ -113,6 +113,9 @@ class TestNode():
|
||||
"--gen-suppressions=all", "--exit-on-first-error=yes",
|
||||
"--error-exitcode=1", "--quiet"] + self.args
|
||||
|
||||
if self.version_is_at_least(190000):
|
||||
self.args.append("-logthreadnames")
|
||||
|
||||
self.cli = TestNodeCLI(bitcoin_cli, self.datadir)
|
||||
self.use_cli = use_cli
|
||||
self.start_perf = start_perf
|
||||
@ -242,24 +245,27 @@ class TestNode():
|
||||
)
|
||||
rpc.getblockcount()
|
||||
# If the call to getblockcount() succeeds then the RPC connection is up
|
||||
wait_until(lambda: rpc.getmempoolinfo()['loaded'])
|
||||
# Wait for the node to finish reindex, block import, and
|
||||
# loading the mempool. Usually importing happens fast or
|
||||
# even "immediate" when the node is started. However, there
|
||||
# is no guarantee and sometimes ThreadImport might finish
|
||||
# later. This is going to cause intermittent test failures,
|
||||
# because generally the tests assume the node is fully
|
||||
# ready after being started.
|
||||
#
|
||||
# For example, the node will reject block messages from p2p
|
||||
# when it is still importing with the error "Unexpected
|
||||
# block message received"
|
||||
#
|
||||
# The wait is done here to make tests as robust as possible
|
||||
# and prevent racy tests and intermittent failures as much
|
||||
# as possible. Some tests might not need this, but the
|
||||
# overhead is trivial, and the added guarantees are worth
|
||||
# the minimal performance cost.
|
||||
if self.version_is_at_least(180000):
|
||||
# getmempoolinfo.loaded is available since commit
|
||||
# 71e38b9ebcb78b3a264a4c25c7c4e373317f2a40 (version 0.18.0)
|
||||
wait_until(lambda: rpc.getmempoolinfo()['loaded'])
|
||||
# Wait for the node to finish reindex, block import, and
|
||||
# loading the mempool. Usually importing happens fast or
|
||||
# even "immediate" when the node is started. However, there
|
||||
# is no guarantee and sometimes ThreadImport might finish
|
||||
# later. This is going to cause intermittent test failures,
|
||||
# because generally the tests assume the node is fully
|
||||
# ready after being started.
|
||||
#
|
||||
# For example, the node will reject block messages from p2p
|
||||
# when it is still importing with the error "Unexpected
|
||||
# block message received"
|
||||
#
|
||||
# The wait is done here to make tests as robust as possible
|
||||
# and prevent racy tests and intermittent failures as much
|
||||
# as possible. Some tests might not need this, but the
|
||||
# overhead is trivial, and the added guarantees are worth
|
||||
# the minimal performance cost.
|
||||
self.log.debug("RPC successfully started")
|
||||
if self.use_cli:
|
||||
return
|
||||
@ -325,7 +331,11 @@ class TestNode():
|
||||
return
|
||||
self.log.debug("Stopping node")
|
||||
try:
|
||||
self.stop(wait=wait)
|
||||
# Do not use wait argument when testing older nodes, e.g. in feature_backwards_compatibility.py
|
||||
if self.version_is_at_least(180000):
|
||||
self.stop(wait=wait)
|
||||
else:
|
||||
self.stop()
|
||||
except http.client.CannotSendRequest:
|
||||
self.log.exception("Unable to stop node.")
|
||||
|
||||
|
@ -345,7 +345,7 @@ def initialize_datadir(dirname, n, chain):
|
||||
os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True)
|
||||
return datadir
|
||||
|
||||
def adjust_bitcoin_conf_for_pre_17(conf_file):
|
||||
def adjust_bitcoin_conf_for_pre_16(conf_file):
|
||||
with open(conf_file,'r', encoding='utf8') as conf:
|
||||
conf_data = conf.read()
|
||||
with open(conf_file, 'w', encoding='utf8') as conf:
|
||||
|
@ -192,6 +192,7 @@ BASE_SCRIPTS = [
|
||||
'feature_assumevalid.py',
|
||||
'example_test.py',
|
||||
'wallet_txn_doublespend.py',
|
||||
'feature_backwards_compatibility.py',
|
||||
'wallet_txn_clone.py --mineblock',
|
||||
'feature_notifications.py',
|
||||
'rpc_getblockfilter.py',
|
||||
@ -260,6 +261,7 @@ BASE_SCRIPTS = [
|
||||
'feature_asmap.py',
|
||||
'feature_includeconf.py',
|
||||
'mempool_unbroadcast.py',
|
||||
'mempool_compatibility.py',
|
||||
'rpc_deriveaddresses.py',
|
||||
'rpc_deriveaddresses.py --usecli',
|
||||
'rpc_scantxoutset.py',
|
||||
|
@ -4,18 +4,17 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
"""upgradewallet RPC functional test
|
||||
|
||||
Test upgradewallet RPC. Download v0.15.2 v0.16.3 node binaries:
|
||||
|
||||
contrib/devtools/previous_release.sh -b v0.15.2 v0.16.3
|
||||
Requires previous releases binaries, see test/README.md.
|
||||
Only v0.15.2 and v0.16.3 are required by this test. The others are used in feature_backwards_compatibility.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from test_framework.blocktools import COINBASE_MATURITY
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.test_framework import (BitcoinTestFramework, SkipTest)
|
||||
from test_framework.util import (
|
||||
adjust_bitcoin_conf_for_pre_17,
|
||||
adjust_bitcoin_conf_for_pre_16,
|
||||
assert_equal,
|
||||
assert_greater_than,
|
||||
assert_is_hex_string,
|
||||
@ -27,15 +26,18 @@ class UpgradeWalletTest(BitcoinTestFramework):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 3
|
||||
self.extra_args = [
|
||||
["-addresstype=bech32"], # current wallet version
|
||||
[], # current wallet version
|
||||
["-usehd=1"], # v0.16.3 wallet
|
||||
["-usehd=0"] # v0.15.2 wallet
|
||||
]
|
||||
self.wallet_names = [self.default_wallet_name]
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
self.skip_if_no_bdb()
|
||||
self.skip_if_no_previous_releases()
|
||||
# TODO: this test doesn't work yet
|
||||
raise SkipTest("Test wallet_upgradewallet.py is not adapted for Dash Core yet.")
|
||||
|
||||
def setup_network(self):
|
||||
self.setup_nodes()
|
||||
@ -46,10 +48,10 @@ class UpgradeWalletTest(BitcoinTestFramework):
|
||||
160300,
|
||||
150200,
|
||||
])
|
||||
# adapt bitcoin.conf, because older bitcoind's don't recognize config sections
|
||||
adjust_bitcoin_conf_for_pre_17(self.nodes[1].bitcoinconf)
|
||||
adjust_bitcoin_conf_for_pre_17(self.nodes[2].bitcoinconf)
|
||||
# adapt dash.conf, because older dashd's don't recognize config sections
|
||||
adjust_bitcoin_conf_for_pre_16(self.nodes[2].bitcoinconf)
|
||||
self.start_nodes()
|
||||
self.import_deterministic_coinbase_privkeys()
|
||||
|
||||
def dumb_sync_blocks(self):
|
||||
"""
|
||||
@ -94,7 +96,7 @@ class UpgradeWalletTest(BitcoinTestFramework):
|
||||
v15_2_wallet = os.path.join(v15_2_node.datadir, "regtest/wallet.dat")
|
||||
self.stop_nodes()
|
||||
|
||||
# Copy the 0.16.3 wallet to the last Bitcoin Core version and open it:
|
||||
# Copy the 0.16.3 wallet to the last Dash Core version and open it:
|
||||
shutil.rmtree(node_master_wallet_dir)
|
||||
os.mkdir(node_master_wallet_dir)
|
||||
shutil.copy(
|
||||
@ -117,7 +119,7 @@ class UpgradeWalletTest(BitcoinTestFramework):
|
||||
assert_equal(wallet.getbalance(), v16_3_balance)
|
||||
|
||||
self.stop_node(0)
|
||||
# Copy the 0.15.2 wallet to the last Bitcoin Core version and open it:
|
||||
# Copy the 0.15.2 wallet to the last Dash Core version and open it:
|
||||
shutil.rmtree(node_master_wallet_dir)
|
||||
os.mkdir(node_master_wallet_dir)
|
||||
shutil.copy(
|
||||
|
265
test/get_previous_releases.py
Executable file
265
test/get_previous_releases.py
Executable file
@ -0,0 +1,265 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (c) 2018-2020 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
#
|
||||
# Download or build previous releases.
|
||||
# Needs curl and tar to download a release, or the build dependencies when
|
||||
# building a release.
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
from fnmatch import fnmatch
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import hashlib
|
||||
|
||||
|
||||
SHA256_SUMS = {
|
||||
"a4b555b47f5f9a5a01fc5d3b543731088bd10a65dd7fa81fb552818146e424b5": "dashcore-19.3.0-aarch64-linux-gnu.tar.gz",
|
||||
"531bb188c1aea808ef6f3533d71182a51958136f6e43d9fcadaef1a5fcdd0468": "dashcore-19.3.0-osx.dmg",
|
||||
"1b4673a2bd71f9f2b593c2d71386e60f4744b59b57142707f0045ed49c92024b": "dashcore-19.3.0-osx64.tar.gz",
|
||||
"d23cd59ab3a230ebb9bd34fa6329e0d157ecfdbd133f171dfdfa08039d0b3983": "dashcore-19.3.0-riscv64-linux-gnu.tar.gz",
|
||||
"b5c1860440f97dbb79b1d79bcc48fb2dcc7f0915dd0c4f9fc77aba9cab0294f3": "dashcore-19.3.0-win64-setup.exe",
|
||||
"8a288189bd4b7c23bb1f917256290dd606d9d47a533dcede0c6190a8f4722e1a": "dashcore-19.3.0-win64.zip",
|
||||
"c2f3ff5631094abe16af8e476d1197be8685ee20601deda5cad0c34fc879c3de": "dashcore-19.3.0-x86_64-linux-gnu.tar.gz",
|
||||
"b4bb6bec21213e47586607657e69b0a53905e3c32e2e8e650e93db54dce572d8": "dashcore-19.3.0.tar.gz",
|
||||
#
|
||||
"2870149fda49e731fdf67951408e8b9a1f21f4d91693add0287fe6abb7f8e5b4": "dashcore-19.1.0-aarch64-linux-gnu.tar.gz",
|
||||
"900f6209831f1a21be7ed51edd48a6312fdfb8759fac94b77b23d77484254356": "dashcore-19.1.0-osx64.tar.gz",
|
||||
"4ff370e904f08f9b31727535c5ccdde616d7cdee2fb9396aa887910fc87702ff": "dashcore-19.1.0-osx.dmg",
|
||||
"49fb6cc79429cd46e57b9b1197c863dac5ca56a17004596d9cc364f5fcf395f8": "dashcore-19.1.0-riscv64-linux-gnu.tar.gz",
|
||||
"4aad6aedd3b45ae8c5279ad6ee886e7d80a1faa59be9bae882bdd6df68992990": "dashcore-19.1.0-x86_64-linux-gnu.tar.gz",
|
||||
#
|
||||
"d7907726666e9266f5eae830789a1c36cf8c84b43bc0c0fab907317a5cc03f09": "dashcore-18.2.2-aarch64-linux-gnu.tar.gz",
|
||||
"b70c5fb7c916f093840b9adb6f0287488843e0e69b403e99ed0bc93d34e24f85": "dashcore-18.2.2-osx.dmg",
|
||||
"9b376e99400a3b0cb8e777477cf07567c36ed65018e4becbd98eebfbcca3efee": "dashcore-18.2.2-osx64.tar.gz",
|
||||
"091d704b58e51171fcad7de24ea6d9db644834cfa5df610517a9528a70b9c4cf": "dashcore-18.2.2-riscv64-linux-gnu.tar.gz",
|
||||
"be3f3c7f3f98198837dc6739cc99126f9de6ddb38c0cf6d291068a65e3c6dede": "dashcore-18.2.2-win64-setup.exe",
|
||||
"ade7b79182443a04b101a4a51cdc402425e583873aafa44a136e4937f89bde61": "dashcore-18.2.2-win64.zip",
|
||||
"ebe3170835232c0a1e7456712fbb285d5749cbf3cfb5de29f8db3a2ad81be1cf": "dashcore-18.2.2-x86_64-linux-gnu.tar.gz",
|
||||
"76c5c44961d30d570c430325fe145fbd413c8038e88994dc6c93fa2da4dc2dd7": "dashcore-18.2.2.tar.gz",
|
||||
#
|
||||
"57b7b9a7939a726afc133229dbecc4f46394bf57d32bf3e936403ade2b2a8519": "dashcore-0.17.0.3-aarch64-linux-gnu.tar.gz",
|
||||
"024fa38c48925a684bac217c0915451a3f9d15a1048035ef41ca0c9ffa8c6801": "dashcore-0.17.0.3-arm-linux-gnueabihf.tar.gz",
|
||||
"c5c3d1f6dbe9934ac10ef43da733fa9e01689f7e42d363edbfcfa0c4c64269c5": "dashcore-0.17.0.3-i686-pc-linux-gnu.tar.gz",
|
||||
"349c65fb6c0d2d509d338b5bd8ca25b069b84c6d57b49a9d1bc830744e09926b": "dashcore-0.17.0.3-osx64.tar.gz",
|
||||
"5b6ce9f43fc07f5e73c0de6890929adcda31e29479f06605b4f7434e04348041": "dashcore-0.17.0.3-osx.dmg",
|
||||
"baf1e0e24c7d2a699898a33b10f4b9c2fb6059286a6c336fd4921a58a4e8eb80": "dashcore-0.17.0.3.tar.gz",
|
||||
"f46958c99a9d635dea81f7a76ab079f25816a428eecdb0db556bdec4c08b8418": "dashcore-0.17.0.3-win32-setup.exe",
|
||||
"3c02bbd6e8d232b24ab72789f91c5be7197f105ee4db03a597fd6a262098c713": "dashcore-0.17.0.3-win32.zip",
|
||||
"347cd9b1899274eef62fca55f9e0bc929dc48482866a89f2098671cf68e9ace6": "dashcore-0.17.0.3-win64-setup.exe",
|
||||
"e606767165adc16d2a02a510d029e2bb4fc47e0beca548fd4ef5be675d3635ab": "dashcore-0.17.0.3-win64.zip",
|
||||
"d4086b1271589e8d72e6ca151a1c8f12e4dc2878d60ec69532d0c48e99391996": "dashcore-0.17.0.3-x86_64-linux-gnu.tar.gz",
|
||||
#
|
||||
"b0fd7b1344701f6b96f6b6978fbce7fd5d3e0310a2993e17858573d80e2941c0": "dashcore-0.16.1.1-aarch64-linux-gnu.tar.gz",
|
||||
"6d829fb8a419db93d99f03a12d0a426292cfba916fa7173107f7a760e4d1cd56": "dashcore-0.16.1.1-arm-linux-gnueabihf.tar.gz",
|
||||
"7e3737967bb28532c985858dc23af01cbd5ec239a083a91a351313b95105c6cc": "dashcore-0.16.1.1-i686-pc-linux-gnu.tar.gz",
|
||||
"3f26d7da7b3ea5ce1fabf34b4086a978324d5806481dc8470b15294a0807100d": "dashcore-0.16.1.1-osx64.tar.gz",
|
||||
"49a5ca7364b62f9908239e12da8181c9bbe8b7ca6508bc569f05907800af084c": "dashcore-0.16.1.1-osx.dmg",
|
||||
"8cd15db027b1745a9205c2067a2e5113772696535ec521a7fc9f6d7b2583e0ea": "dashcore-0.16.1.1.tar.gz",
|
||||
"dced7d9588e80d3d97d2c06fb2352e0ccb97e23698ed78f2db21d94c6550f2e4": "dashcore-0.16.1.1-win32-setup.exe",
|
||||
"1c6efe0f70702f4abd6ce42e0dffe9c311f3b71174ab497defc03cd69cebcfc4": "dashcore-0.16.1.1-win32.zip",
|
||||
"72daff27358d87c1154161e04bf4cb4091ef8cf506b196503c4e58f943913030": "dashcore-0.16.1.1-win64-setup.exe",
|
||||
"3f9bf89da1eb0354f06020d107926ebeb799625d792954d2b9d1436dfdea014e": "dashcore-0.16.1.1-win64.zip",
|
||||
"8803928bd18e9515f1254f97751eb6e537a084d66111ce7968eafb23e26bf3a5": "dashcore-0.16.1.1-x86_64-linux-gnu.tar.gz",
|
||||
#
|
||||
"e2c7f2566e26420a54c8d08e1f8a8d5595bb22fba46d3a84ab931f5cd0efc7f9": "dashcore-0.15.0.0-aarch64-linux-gnu.tar.gz",
|
||||
"be3a2054eb39826bd416252ab3c9a233e90a27b545739b15fb4c9c399b0fbe68": "dashcore-0.15.0.0-arm-linux-gnueabihf.tar.gz",
|
||||
"457c4c934669223beb29ceface62f700999bd1a0c913bd89d45de5f8e6e916b7": "dashcore-0.15.0.0-i686-pc-linux-gnu.tar.gz",
|
||||
"f5e5d25df3d84a9e5dceef43d0bcf54fa697ea73f3e29ec39a8f9952ace8792c": "dashcore-0.15.0.0-osx64.tar.gz",
|
||||
"09f76396217eef6e5a7ba464d9b1f5abd78925b314f663bb709fdb02013899df": "dashcore-0.15.0.0-osx.dmg",
|
||||
"8a1088477198b3cd549017246ecbd4d03ddafae772a0344c92a0c4d9478d90b6": "dashcore-0.15.0.0.tar.gz",
|
||||
"a647e7ba87e8e31fba0683a61a02c578a967a58901784b9b96f0df1233293241": "dashcore-0.15.0.0-win32-setup.exe",
|
||||
"404d5451a782fabda43197d0608d7cfab1a51a02695c72cdada4507f1db5f99c": "dashcore-0.15.0.0-win32.zip",
|
||||
"f532bc7e0360e80908eb6b9c3aeec7e0037e70e25dee3b040dbbf7a124e05619": "dashcore-0.15.0.0-win64-setup.exe",
|
||||
"3ba6ff98113af30319fb1499d132d993633380476f9980443d630d21a40e0efb": "dashcore-0.15.0.0-win64.zip",
|
||||
"4cc0815ebd595f3d0134a8df9e6224cbe3d79398a5a899b60ca5f4ab8a576160": "dashcore-0.15.0.0-x86_64-linux-gnu.tar.gz",
|
||||
}
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def pushd(new_dir) -> None:
|
||||
previous_dir = os.getcwd()
|
||||
os.chdir(new_dir)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
os.chdir(previous_dir)
|
||||
|
||||
|
||||
def download_binary(tag, args) -> int:
|
||||
if Path(tag).is_dir():
|
||||
if not args.remove_dir:
|
||||
print('Using cached {}'.format(tag))
|
||||
return 0
|
||||
shutil.rmtree(tag)
|
||||
Path(tag).mkdir()
|
||||
bin_path = 'releases/download/v{}'.format(tag[1:])
|
||||
match = re.compile('v(.*)(rc[0-9]+)$').search(tag)
|
||||
if match:
|
||||
bin_path = 'releases/download/test.{}'.format(
|
||||
match.group(1), match.group(2))
|
||||
tarball = 'dashcore-{tag}-{platform}.tar.gz'.format(
|
||||
tag=tag[1:], platform=args.platform)
|
||||
tarballUrl = 'https://github.com/dashpay/dash/{bin_path}/{tarball}'.format(
|
||||
bin_path=bin_path, tarball=tarball)
|
||||
|
||||
print('Fetching: {tarballUrl}'.format(tarballUrl=tarballUrl))
|
||||
|
||||
header, status = subprocess.Popen(
|
||||
['curl', '--head', tarballUrl], stdout=subprocess.PIPE).communicate()
|
||||
if re.search("404 Not Found", header.decode("utf-8")):
|
||||
print("Binary tag was not found")
|
||||
return 1
|
||||
|
||||
curlCmds = [
|
||||
['curl', '-L', '--remote-name', tarballUrl]
|
||||
]
|
||||
|
||||
for cmd in curlCmds:
|
||||
ret = subprocess.run(cmd).returncode
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
hasher = hashlib.sha256()
|
||||
with open(tarball, "rb") as afile:
|
||||
hasher.update(afile.read())
|
||||
tarballHash = hasher.hexdigest()
|
||||
|
||||
if tarballHash not in SHA256_SUMS or SHA256_SUMS[tarballHash] != tarball:
|
||||
if tarball in SHA256_SUMS.values():
|
||||
print("Checksum did not match")
|
||||
return 1
|
||||
|
||||
print("Checksum for given version doesn't exist")
|
||||
return 1
|
||||
print("Checksum matched")
|
||||
|
||||
# Extract tarball
|
||||
# special case for v17 and earlier: other name of version
|
||||
filename = tag[1:-2] if tag[1:3] == "0." else tag[1:]
|
||||
ret = subprocess.run(['tar', '-zxf', tarball, '-C', tag,
|
||||
'--strip-components=1',
|
||||
'dashcore-{tag}'.format(tag=filename, platform=args.platform)]).returncode
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
Path(tarball).unlink()
|
||||
return 0
|
||||
|
||||
|
||||
def build_release(tag, args) -> int:
|
||||
githubUrl = "https://github.com/dashpay/dash"
|
||||
if args.remove_dir:
|
||||
if Path(tag).is_dir():
|
||||
shutil.rmtree(tag)
|
||||
if not Path(tag).is_dir():
|
||||
# fetch new tags
|
||||
subprocess.run(
|
||||
["git", "fetch", githubUrl, "--tags"])
|
||||
output = subprocess.check_output(['git', 'tag', '-l', tag])
|
||||
if not output:
|
||||
print('Tag {} not found'.format(tag))
|
||||
return 1
|
||||
ret = subprocess.run([
|
||||
'git', 'clone', githubUrl, tag
|
||||
]).returncode
|
||||
if ret:
|
||||
return ret
|
||||
with pushd(tag):
|
||||
ret = subprocess.run(['git', 'checkout', tag]).returncode
|
||||
if ret:
|
||||
return ret
|
||||
host = args.host
|
||||
if args.depends:
|
||||
with pushd('depends'):
|
||||
ret = subprocess.run(['make', 'NO_QT=1']).returncode
|
||||
if ret:
|
||||
return ret
|
||||
host = os.environ.get(
|
||||
'HOST', subprocess.check_output(['./config.guess']))
|
||||
config_flags = '--prefix={pwd}/depends/{host} '.format(
|
||||
pwd=os.getcwd(),
|
||||
host=host) + args.config_flags
|
||||
cmds = [
|
||||
'./autogen.sh',
|
||||
'./configure {}'.format(config_flags),
|
||||
'make',
|
||||
]
|
||||
for cmd in cmds:
|
||||
ret = subprocess.run(cmd.split()).returncode
|
||||
if ret:
|
||||
return ret
|
||||
# Move binaries, so they're in the same place as in the
|
||||
# release download
|
||||
Path('bin').mkdir(exist_ok=True)
|
||||
files = ['dashd', 'dash-cli', 'dash-tx']
|
||||
for f in files:
|
||||
Path('src/'+f).rename('bin/'+f)
|
||||
return 0
|
||||
|
||||
|
||||
def check_host(args) -> int:
|
||||
args.host = os.environ.get('HOST', subprocess.check_output(
|
||||
'./depends/config.guess').decode())
|
||||
if args.download_binary:
|
||||
platforms = {
|
||||
'aarch64-*-linux*': 'aarch64-linux-gnu',
|
||||
'x86_64-*-linux*': 'x86_64-linux-gnu',
|
||||
'x86_64-apple-darwin*': 'osx64',
|
||||
'aarch64-apple-darwin*': 'osx64',
|
||||
}
|
||||
args.platform = ''
|
||||
for pattern, target in platforms.items():
|
||||
if fnmatch(args.host, pattern):
|
||||
args.platform = target
|
||||
if not args.platform:
|
||||
print('Not sure which binary to download for {}'.format(args.host))
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def main(args) -> int:
|
||||
Path(args.target_dir).mkdir(exist_ok=True, parents=True)
|
||||
print("Releases directory: {}".format(args.target_dir))
|
||||
ret = check_host(args)
|
||||
if ret:
|
||||
return ret
|
||||
if args.download_binary:
|
||||
with pushd(args.target_dir):
|
||||
for tag in args.tags:
|
||||
ret = download_binary(tag, args)
|
||||
if ret:
|
||||
return ret
|
||||
return 0
|
||||
args.config_flags = os.environ.get('CONFIG_FLAGS', '')
|
||||
args.config_flags += ' --without-gui --disable-tests --disable-bench'
|
||||
with pushd(args.target_dir):
|
||||
for tag in args.tags:
|
||||
ret = build_release(tag, args)
|
||||
if ret:
|
||||
return ret
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument('-r', '--remove-dir', action='store_true',
|
||||
help='remove existing directory.')
|
||||
parser.add_argument('-d', '--depends', action='store_true',
|
||||
help='use depends.')
|
||||
parser.add_argument('-b', '--download-binary', action='store_true',
|
||||
help='download release binary.')
|
||||
parser.add_argument('-t', '--target-dir', action='store',
|
||||
help='target directory.', default='releases')
|
||||
parser.add_argument('tags', nargs='+',
|
||||
help="release tags. e.g.: v19.1.0 v19.0.0-rc.9")
|
||||
args = parser.parse_args()
|
||||
sys.exit(main(args))
|
Loading…
Reference in New Issue
Block a user