mirror of
https://github.com/dashpay/dash.git
synced 2024-12-24 19:42:46 +01:00
Compare commits
34 Commits
65fdcda82b
...
76d789bf31
Author | SHA1 | Date | |
---|---|---|---|
|
76d789bf31 | ||
|
7530f3d245 | ||
|
5bf0409eba | ||
|
a9cfd39390 | ||
|
0968a0023b | ||
|
04ce1fea52 | ||
|
16c2e13fb4 | ||
|
a1b256b06f | ||
|
c6dd3dd567 | ||
|
ff29c62103 | ||
|
27d9763b1b | ||
|
26cc5a1c90 | ||
|
d0131a5259 | ||
|
4f1b5c165b | ||
|
a49162ffae | ||
|
8dd0db7de9 | ||
|
187fe17650 | ||
|
eef863554a | ||
|
e7702292d1 | ||
|
b7099eed47 | ||
|
64cdc42130 | ||
|
440fd3fe21 | ||
|
3931608858 | ||
|
f147373a32 | ||
|
2a2a2693d0 | ||
|
11eeae2ab9 | ||
|
f16265dd50 | ||
|
b212ca0515 | ||
|
66e77f7879 | ||
|
995cae46af | ||
|
61a0140362 | ||
|
84deba5456 | ||
|
2d0e5d7370 | ||
|
136bf01154 |
12
.github/workflows/build.yml
vendored
12
.github/workflows/build.yml
vendored
@ -114,22 +114,22 @@ jobs:
|
||||
- build_target: linux64
|
||||
host: x86_64-pc-linux-gnu
|
||||
depends_on: linux64
|
||||
- build_target: linux64_tsan
|
||||
host: x86_64-pc-linux-gnu
|
||||
depends_on: linux64
|
||||
- build_target: linux64_ubsan
|
||||
- build_target: linux64_cxx20
|
||||
host: x86_64-pc-linux-gnu
|
||||
depends_on: linux64
|
||||
- build_target: linux64_fuzz
|
||||
host: x86_64-pc-linux-gnu
|
||||
depends_on: linux64
|
||||
- build_target: linux64_cxx20
|
||||
- build_target: linux64_nowallet
|
||||
host: x86_64-pc-linux-gnu
|
||||
depends_on: linux64
|
||||
- build_target: linux64_sqlite
|
||||
host: x86_64-pc-linux-gnu
|
||||
depends_on: linux64
|
||||
- build_target: linux64_nowallet
|
||||
- build_target: linux64_tsan
|
||||
host: x86_64-pc-linux-gnu
|
||||
depends_on: linux64
|
||||
- build_target: linux64_ubsan
|
||||
host: x86_64-pc-linux-gnu
|
||||
depends_on: linux64
|
||||
container:
|
||||
|
@ -40,26 +40,11 @@ builder-image:
|
||||
needs:
|
||||
- builder-image
|
||||
image: $CI_REGISTRY_IMAGE:builder-$CI_COMMIT_REF_SLUG
|
||||
variables:
|
||||
SDK_URL: https://bitcoincore.org/depends-sources/sdks
|
||||
XCODE_VERSION: "15.0"
|
||||
XCODE_BUILD_ID: 15A240d
|
||||
before_script:
|
||||
- echo HOST=$HOST
|
||||
- |
|
||||
if [ "$HOST" = "x86_64-apple-darwin" ]; then
|
||||
mkdir -p depends/SDKs
|
||||
mkdir -p depends/sdk-sources
|
||||
OSX_SDK_BASENAME="Xcode-${XCODE_VERSION}-${XCODE_BUILD_ID}-extracted-SDK-with-libcxx-headers.tar.gz"
|
||||
OSX_SDK_PATH="depends/sdk-sources/${OSX_SDK_BASENAME}"
|
||||
if [ ! -f "$OSX_SDK_PATH" ]; then
|
||||
echo "Downloading MacOS SDK"
|
||||
curl --location --fail "${SDK_URL}/${OSX_SDK_BASENAME}" -o "$OSX_SDK_PATH"
|
||||
fi
|
||||
if [ -f "$OSX_SDK_PATH" ]; then
|
||||
echo "Extracting MacOS SDK"
|
||||
tar -C depends/SDKs -xf "$OSX_SDK_PATH"
|
||||
fi
|
||||
echo HOST=${HOST}
|
||||
if [[ "${HOST}" == "x86_64-apple-darwin" ]]; then
|
||||
./contrib/containers/guix/scripts/setup-sdk
|
||||
fi
|
||||
script:
|
||||
- make -j$(nproc) -C depends HOST=$HOST $DEP_OPTS
|
||||
@ -193,13 +178,13 @@ x86_64-w64-mingw32:
|
||||
variables:
|
||||
HOST: x86_64-w64-mingw32
|
||||
|
||||
x86_64-pc-linux-gnu-debug:
|
||||
x86_64-pc-linux-gnu_debug:
|
||||
extends: .build-depends-template
|
||||
variables:
|
||||
HOST: x86_64-pc-linux-gnu
|
||||
DEP_OPTS: "DEBUG=1"
|
||||
|
||||
x86_64-pc-linux-gnu-nowallet:
|
||||
x86_64-pc-linux-gnu_nowallet:
|
||||
extends:
|
||||
- .build-depends-template
|
||||
- .skip-in-fast-mode-template
|
||||
@ -207,13 +192,13 @@ x86_64-pc-linux-gnu-nowallet:
|
||||
HOST: x86_64-pc-linux-gnu
|
||||
DEP_OPTS: "NO_WALLET=1"
|
||||
|
||||
x86_64-pc-linux-gnu-multiprocess:
|
||||
x86_64-pc-linux-gnu_multiprocess:
|
||||
extends:
|
||||
- .build-depends-template
|
||||
- .skip-in-fast-mode-template
|
||||
variables:
|
||||
HOST: x86_64-pc-linux-gnu
|
||||
DEP_OPTS: "MULTIPROCESS=1"
|
||||
DEP_OPTS: "DEBUG=1 MULTIPROCESS=1"
|
||||
|
||||
x86_64-apple-darwin:
|
||||
extends:
|
||||
@ -243,7 +228,7 @@ win64-build:
|
||||
linux64-build:
|
||||
extends: .build-template
|
||||
needs:
|
||||
- x86_64-pc-linux-gnu-debug
|
||||
- x86_64-pc-linux-gnu_debug
|
||||
variables:
|
||||
BUILD_TARGET: linux64
|
||||
|
||||
@ -252,7 +237,7 @@ linux64_cxx20-build:
|
||||
- .build-template
|
||||
- .skip-in-fast-mode-template
|
||||
needs:
|
||||
- x86_64-pc-linux-gnu-debug
|
||||
- x86_64-pc-linux-gnu_debug
|
||||
variables:
|
||||
BUILD_TARGET: linux64_cxx20
|
||||
|
||||
@ -261,7 +246,7 @@ linux64_sqlite-build:
|
||||
- .build-template
|
||||
- .skip-in-fast-mode-template
|
||||
needs:
|
||||
- x86_64-pc-linux-gnu-debug
|
||||
- x86_64-pc-linux-gnu_debug
|
||||
variables:
|
||||
BUILD_TARGET: linux64_sqlite
|
||||
|
||||
@ -270,7 +255,7 @@ linux64_fuzz-build:
|
||||
- .build-template
|
||||
- .skip-in-fast-mode-template
|
||||
needs:
|
||||
- x86_64-pc-linux-gnu-debug
|
||||
- x86_64-pc-linux-gnu_debug
|
||||
variables:
|
||||
BUILD_TARGET: linux64_fuzz
|
||||
|
||||
@ -279,7 +264,7 @@ linux64_fuzz-build:
|
||||
# - .build-template
|
||||
# - .skip-in-fast-mode-template
|
||||
# needs:
|
||||
# - x86_64-pc-linux-gnu-debug
|
||||
# - x86_64-pc-linux-gnu_debug
|
||||
# variables:
|
||||
# BUILD_TARGET: linux64_asan
|
||||
|
||||
@ -288,7 +273,7 @@ linux64_tsan-build:
|
||||
- .build-template
|
||||
- .skip-in-fast-mode-template
|
||||
needs:
|
||||
- x86_64-pc-linux-gnu-debug
|
||||
- x86_64-pc-linux-gnu_debug
|
||||
variables:
|
||||
BUILD_TARGET: linux64_tsan
|
||||
|
||||
@ -297,7 +282,7 @@ linux64_ubsan-build:
|
||||
- .build-template
|
||||
- .skip-in-fast-mode-template
|
||||
needs:
|
||||
- x86_64-pc-linux-gnu-debug
|
||||
- x86_64-pc-linux-gnu_debug
|
||||
variables:
|
||||
BUILD_TARGET: linux64_ubsan
|
||||
|
||||
@ -306,7 +291,7 @@ linux64_nowallet-build:
|
||||
- .build-template
|
||||
- .skip-in-fast-mode-template
|
||||
needs:
|
||||
- x86_64-pc-linux-gnu-nowallet
|
||||
- x86_64-pc-linux-gnu_nowallet
|
||||
variables:
|
||||
BUILD_TARGET: linux64_nowallet
|
||||
|
||||
@ -315,7 +300,7 @@ linux64_multiprocess-build:
|
||||
- .build-template
|
||||
- .skip-in-fast-mode-template
|
||||
needs:
|
||||
- x86_64-pc-linux-gnu-multiprocess
|
||||
- x86_64-pc-linux-gnu_multiprocess
|
||||
variables:
|
||||
BUILD_TARGET: linux64_multiprocess
|
||||
|
||||
@ -324,7 +309,7 @@ linux64_multiprocess-build:
|
||||
# - .build-template
|
||||
# - .skip-in-fast-mode-template
|
||||
# needs:
|
||||
# - x86_64-pc-linux-gnu-debug
|
||||
# - x86_64-pc-linux-gnu_debug
|
||||
# variables:
|
||||
# BUILD_TARGET: linux64_valgrind
|
||||
|
||||
|
@ -20,17 +20,8 @@ mkdir -p $CACHE_DIR/sdk-sources
|
||||
ln -s $CACHE_DIR/depends ${DEPENDS_DIR}/built
|
||||
ln -s $CACHE_DIR/sdk-sources ${DEPENDS_DIR}/sdk-sources
|
||||
|
||||
mkdir -p ${DEPENDS_DIR}/SDKs
|
||||
|
||||
if [ -n "$XCODE_VERSION" ]; then
|
||||
OSX_SDK_BASENAME="Xcode-${XCODE_VERSION}-${XCODE_BUILD_ID}-extracted-SDK-with-libcxx-headers.tar.gz"
|
||||
OSX_SDK_PATH="${DEPENDS_DIR}/sdk-sources/${OSX_SDK_BASENAME}"
|
||||
if [ ! -f "$OSX_SDK_PATH" ]; then
|
||||
curl --location --fail "${SDK_URL}/${OSX_SDK_BASENAME}" -o "$OSX_SDK_PATH"
|
||||
fi
|
||||
if [ -f "$OSX_SDK_PATH" ]; then
|
||||
tar -C ${DEPENDS_DIR}/SDKs -xf "$OSX_SDK_PATH"
|
||||
fi
|
||||
if [[ "${HOST}" == "x86_64-apple-darwin" ]]; then
|
||||
./contrib/containers/guix/scripts/setup-sdk
|
||||
fi
|
||||
|
||||
make $MAKEJOBS -C depends HOST=$HOST $DEP_OPTS
|
||||
|
@ -18,28 +18,30 @@ export UBSAN_OPTIONS="suppressions=${BASE_ROOT_DIR}/test/sanitizer_suppressions/
|
||||
|
||||
if [ "$BUILD_TARGET" = "arm-linux" ]; then
|
||||
source ./ci/test/00_setup_env_arm.sh
|
||||
elif [ "$BUILD_TARGET" = "win64" ]; then
|
||||
source ./ci/test/00_setup_env_win64.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64" ]; then
|
||||
source ./ci/test/00_setup_env_native_qt5.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64_asan" ]; then
|
||||
source ./ci/test/00_setup_env_native_asan.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64_cxx20" ]; then
|
||||
source ./ci/test/00_setup_env_native_cxx20.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64_fuzz" ]; then
|
||||
source ./ci/test/00_setup_env_native_fuzz.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64_multiprocess" ]; then
|
||||
source ./ci/test/00_setup_env_native_multiprocess.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64_nowallet" ]; then
|
||||
source ./ci/test/00_setup_env_native_nowallet.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64_sqlite" ]; then
|
||||
source ./ci/test/00_setup_env_native_sqlite.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64_tsan" ]; then
|
||||
source ./ci/test/00_setup_env_native_tsan.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64_ubsan" ]; then
|
||||
source ./ci/test/00_setup_env_native_ubsan.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64_fuzz" ]; then
|
||||
source ./ci/test/00_setup_env_native_fuzz.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64_cxx20" ]; then
|
||||
source ./ci/test/00_setup_env_native_cxx20.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64_sqlite" ]; then
|
||||
source ./ci/test/00_setup_env_native_sqlite.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64_nowallet" ]; then
|
||||
source ./ci/test/00_setup_env_native_nowallet.sh
|
||||
elif [ "$BUILD_TARGET" = "linux64_valgrind" ]; then
|
||||
source ./ci/test/00_setup_env_native_valgrind.sh
|
||||
elif [ "$BUILD_TARGET" = "mac" ]; then
|
||||
source ./ci/test/00_setup_env_mac.sh
|
||||
elif [ "$BUILD_TARGET" = "s390x" ]; then
|
||||
source ./ci/test/00_setup_env_s390x.sh
|
||||
elif [ "$BUILD_TARGET" = "win64" ]; then
|
||||
source ./ci/test/00_setup_env_win64.sh
|
||||
fi
|
||||
|
@ -42,7 +42,7 @@ echo "Using socketevents mode: $SOCKETEVENTS"
|
||||
EXTRA_ARGS="--dashd-arg=-socketevents=$SOCKETEVENTS"
|
||||
|
||||
set +e
|
||||
LD_LIBRARY_PATH=$DEPENDS_DIR/$HOST/lib ${TEST_RUNNER_ENV} ./test/functional/test_runner.py --ci --attempts=3 --ansi --combinedlogslen=4000 --timeout-factor=${TEST_RUNNER_TIMEOUT_FACTOR} ${TEST_RUNNER_EXTRA} --failfast --nocleanup --tmpdir=$(pwd)/testdatadirs $PASS_ARGS $EXTRA_ARGS
|
||||
LD_LIBRARY_PATH=$DEPENDS_DIR/$HOST/lib ./test/functional/test_runner.py --ci --attempts=3 --ansi --combinedlogslen=4000 --timeout-factor=${TEST_RUNNER_TIMEOUT_FACTOR} ${TEST_RUNNER_EXTRA} --failfast --nocleanup --tmpdir=$(pwd)/testdatadirs $PASS_ARGS $EXTRA_ARGS
|
||||
RESULT=$?
|
||||
set -e
|
||||
|
||||
|
@ -29,8 +29,8 @@ if [ "$DIRECT_WINE_EXEC_TESTS" = "true" ]; then
|
||||
wine ./src/test/test_dash.exe
|
||||
else
|
||||
if [ "$RUN_UNIT_TESTS_SEQUENTIAL" = "true" ]; then
|
||||
${TEST_RUNNER_ENV} ./src/test/test_dash --catch_system_errors=no -l test_suite
|
||||
./src/test/test_dash --catch_system_errors=no -l test_suite
|
||||
else
|
||||
${TEST_RUNNER_ENV} make $MAKEJOBS check VERBOSE=1
|
||||
make $MAKEJOBS check VERBOSE=1
|
||||
fi
|
||||
fi
|
||||
|
@ -43,7 +43,6 @@ export RUN_SECURITY_TESTS=${RUN_SECURITY_TESTS:-false}
|
||||
# This is needed because some ci machines have slow CPU or disk, so sanitizers
|
||||
# might be slow or a reindex might be waiting on disk IO.
|
||||
export TEST_RUNNER_TIMEOUT_FACTOR=${TEST_RUNNER_TIMEOUT_FACTOR:-4}
|
||||
export TEST_RUNNER_ENV=${TEST_RUNNER_ENV:-}
|
||||
export RUN_FUZZ_TESTS=${RUN_FUZZ_TESTS:-false}
|
||||
export EXPECTED_TESTS_DURATION_IN_SECONDS=${EXPECTED_TESTS_DURATION_IN_SECONDS:-1000}
|
||||
|
||||
|
@ -11,5 +11,5 @@ export PACKAGES="cmake python3 llvm clang"
|
||||
export DEP_OPTS="DEBUG=1 MULTIPROCESS=1"
|
||||
export GOAL="install"
|
||||
export TEST_RUNNER_EXTRA="--v2transport"
|
||||
export BITCOIN_CONFIG="--with-boost-process --enable-debug CC=clang CXX=clang++" # Use clang to avoid OOM
|
||||
export TEST_RUNNER_ENV="BITCOIND=dash-node"
|
||||
export BITCOIN_CONFIG="--with-boost-process --enable-debug CC=clang-16 CXX=clang++-16" # Use clang to avoid OOM
|
||||
export BITCOIND=dash-node # Used in functional tests
|
||||
|
@ -19,7 +19,6 @@ fi
|
||||
# Use debian to avoid 404 apt errors
|
||||
export CONTAINER_NAME=ci_s390x
|
||||
export RUN_UNIT_TESTS=true
|
||||
export TEST_RUNNER_ENV="LC_ALL=C"
|
||||
export RUN_FUNCTIONAL_TESTS=true
|
||||
export GOAL="install"
|
||||
export BITCOIN_CONFIG="--enable-reduce-exports --disable-gui-tests --with-boost-process" # GUI tests disabled for now, see https://github.com/bitcoin/bitcoin/issues/23730
|
||||
|
@ -7,14 +7,17 @@ ENV DEBIAN_FRONTEND="noninteractive" TZ="Europe/London"
|
||||
# (zlib1g-dev is needed for the Qt host binary builds, but should not be used by target binaries)
|
||||
ENV APT_ARGS="-y --no-install-recommends --no-upgrade"
|
||||
|
||||
|
||||
# Install packages for i386; disabled on aarch64 and arm64 hosts
|
||||
RUN (dpkg --print-architecture | grep -Eq 'aarch64|arm64' || dpkg --add-architecture i386)
|
||||
RUN (dpkg --print-architecture | grep -Eq 'aarch64|arm64' || (apt-get update && apt-get install $APT_ARGS \
|
||||
g++-multilib \
|
||||
wine32) && rm -rf /var/lib/apt/lists/*)
|
||||
|
||||
RUN apt-get update && apt-get install $APT_ARGS \
|
||||
# Install packages for i386 on amd64 hosts, then install common packages
|
||||
RUN set -ex; \
|
||||
apt-get update && \
|
||||
if [ "$(dpkg --print-architecture)" = "amd64" ]; then \
|
||||
dpkg --add-architecture i386 && \
|
||||
apt-get update && \
|
||||
apt-get install $APT_ARGS \
|
||||
g++-multilib \
|
||||
wine32; \
|
||||
fi; \
|
||||
apt-get install $APT_ARGS \
|
||||
autotools-dev \
|
||||
automake \
|
||||
autoconf \
|
||||
@ -23,13 +26,11 @@ RUN apt-get update && apt-get install $APT_ARGS \
|
||||
bsdmainutils \
|
||||
curl \
|
||||
ccache \
|
||||
clang \
|
||||
cmake \
|
||||
g++ \
|
||||
gettext \
|
||||
git \
|
||||
libc++-dev \
|
||||
libc++abi-dev \
|
||||
gnupg \
|
||||
libtool \
|
||||
libxcb-icccm4 \
|
||||
libxcb-image0 \
|
||||
@ -42,11 +43,38 @@ RUN apt-get update && apt-get install $APT_ARGS \
|
||||
libxcb-xinerama0 \
|
||||
libxcb-xkb1 \
|
||||
libxkbcommon-x11-0 \
|
||||
wget \
|
||||
lsb-release \
|
||||
software-properties-common \
|
||||
unzip \
|
||||
wget \
|
||||
m4 \
|
||||
pkg-config \
|
||||
zlib1g-dev
|
||||
zlib1g-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Clang+LLVM and set it as default
|
||||
# We don't need all packages but the default set doesn't include some
|
||||
# packages we want so we will need to install some of them manually.
|
||||
ARG LLVM_VERSION=16
|
||||
RUN set -ex; \
|
||||
echo "Installing LLVM and Clang ${LLVM_VERSION}..."; \
|
||||
curl -sL https://apt.llvm.org/llvm.sh | bash -s "${LLVM_VERSION}"; \
|
||||
echo "Installing additional packages..."; \
|
||||
apt-get update && apt-get install $APT_ARGS \
|
||||
"clang-format-${LLVM_VERSION}" \
|
||||
"clang-tidy-${LLVM_VERSION}" \
|
||||
"libc++-${LLVM_VERSION}-dev" \
|
||||
"libc++abi-${LLVM_VERSION}-dev" \
|
||||
"libclang-rt-${LLVM_VERSION}-dev"; \
|
||||
rm -rf /var/lib/apt/lists/*; \
|
||||
echo "Setting defaults..."; \
|
||||
lldbUpdAltArgs="update-alternatives --install /usr/bin/llvm-config llvm-config /usr/bin/llvm-config-${LLVM_VERSION} 100"; \
|
||||
for binName in clang clang++ clang-format clang-tidy clangd ld.lld lldb lldb-server; do \
|
||||
lldbUpdAltArgs="${lldbUpdAltArgs} --slave /usr/bin/${binName} ${binName} /usr/bin/${binName}-${LLVM_VERSION}"; \
|
||||
done; \
|
||||
sh -c "${lldbUpdAltArgs}";
|
||||
# LD_LIBRARY_PATH is empty by default, this is the first entry
|
||||
ENV LD_LIBRARY_PATH="/usr/lib/llvm-${LLVM_VERSION}/lib"
|
||||
|
||||
# Python setup
|
||||
# PYTHON_VERSION should match the value in .python-version
|
||||
@ -61,10 +89,11 @@ RUN apt-get update && apt-get install $APT_ARGS \
|
||||
libreadline-dev \
|
||||
libsqlite3-dev \
|
||||
libssl-dev \
|
||||
llvm \
|
||||
make \
|
||||
tk-dev \
|
||||
xz-utils
|
||||
xz-utils \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV PYENV_ROOT="/usr/local/pyenv"
|
||||
ENV PATH="${PYENV_ROOT}/shims:${PYENV_ROOT}/bin:${PATH}"
|
||||
RUN curl https://pyenv.run | bash \
|
||||
@ -82,19 +111,38 @@ RUN pip3 install \
|
||||
pyzmq==22.3.0 \
|
||||
vulture==2.3
|
||||
|
||||
# dash_hash
|
||||
ARG DASH_HASH_VERSION=1.4.0
|
||||
RUN git clone --depth 1 --no-tags --branch=${DASH_HASH_VERSION} https://github.com/dashpay/dash_hash
|
||||
RUN cd dash_hash && pip3 install -r requirements.txt .
|
||||
RUN set -ex; \
|
||||
cd /tmp; \
|
||||
git clone --depth 1 --no-tags --branch=${DASH_HASH_VERSION} https://github.com/dashpay/dash_hash; \
|
||||
cd dash_hash && pip3 install -r requirements.txt .; \
|
||||
cd .. && rm -rf dash_hash
|
||||
|
||||
ARG CPPCHECK_VERSION=2.13.0
|
||||
RUN set -ex; \
|
||||
curl -fL "https://github.com/danmar/cppcheck/archive/${CPPCHECK_VERSION}.tar.gz" -o /tmp/cppcheck.tar.gz; \
|
||||
mkdir -p /opt/cppcheck && tar -xzf /tmp/cppcheck.tar.gz -C /opt/cppcheck --strip-components=1 && rm /tmp/cppcheck.tar.gz; \
|
||||
cd /opt/cppcheck; \
|
||||
mkdir build && cd build && cmake .. && cmake --build . -j "$(( $(nproc) - 1 ))"; \
|
||||
mkdir /usr/local/share/Cppcheck && ln -s /opt/cppcheck/cfg/ /usr/local/share/Cppcheck/cfg; \
|
||||
rm -rf /tmp/cppcheck.tar.gz
|
||||
ENV PATH="/opt/cppcheck/build/bin:${PATH}"
|
||||
|
||||
ARG SHELLCHECK_VERSION=v0.7.1
|
||||
RUN set -ex; \
|
||||
curl -fL "https://github.com/koalaman/shellcheck/releases/download/${SHELLCHECK_VERSION}/shellcheck-${SHELLCHECK_VERSION}.linux.x86_64.tar.xz" -o /tmp/shellcheck.tar.xz; \
|
||||
mkdir -p /opt/shellcheck && tar -xf /tmp/shellcheck.tar.xz -C /opt/shellcheck --strip-components=1 && rm /tmp/shellcheck.tar.xz
|
||||
ENV PATH="/opt/shellcheck:${PATH}"
|
||||
|
||||
# Add user with specified (or default) user/group ids and setup configuration files
|
||||
ARG USER_ID=1000
|
||||
ARG GROUP_ID=1000
|
||||
|
||||
# add user with specified (or default) user/group ids
|
||||
ENV USER_ID="${USER_ID}"
|
||||
ENV GROUP_ID="${GROUP_ID}"
|
||||
RUN groupadd -g ${GROUP_ID} dash
|
||||
RUN useradd -u ${USER_ID} -g dash -s /bin/bash -m -d /home/dash dash
|
||||
RUN set -ex; \
|
||||
groupadd -g ${GROUP_ID} dash; \
|
||||
useradd -u ${USER_ID} -g dash -s /bin/bash -m -d /home/dash dash; \
|
||||
mkdir -p /home/dash/.config/gdb; \
|
||||
echo "add-auto-load-safe-path /usr/lib/llvm-${LLVM_VERSION}/lib" | tee /home/dash/.config/gdb/gdbinit; \
|
||||
chown ${USER_ID}:${GROUP_ID} -R /home/dash
|
||||
|
||||
# Packages needed for all target builds
|
||||
RUN apt-get update && apt-get install $APT_ARGS \
|
||||
@ -111,17 +159,8 @@ RUN apt-get update && apt-get install $APT_ARGS \
|
||||
valgrind \
|
||||
wine-stable \
|
||||
wine64 \
|
||||
xorriso
|
||||
|
||||
ARG CPPCHECK_VERSION=2.13.0
|
||||
RUN curl -sL "https://github.com/danmar/cppcheck/archive/${CPPCHECK_VERSION}.tar.gz" | tar -xvzf - --directory /tmp/
|
||||
RUN cd /tmp/cppcheck-${CPPCHECK_VERSION} && mkdir build && cd build && cmake .. && cmake --build . -j 8
|
||||
ENV PATH="/tmp/cppcheck-${CPPCHECK_VERSION}/build/bin:${PATH}"
|
||||
RUN mkdir /usr/local/share/Cppcheck && ln -s /tmp/cppcheck-${CPPCHECK_VERSION}/cfg/ /usr/local/share/Cppcheck/cfg
|
||||
|
||||
ARG SHELLCHECK_VERSION=v0.7.1
|
||||
RUN curl -sL "https://github.com/koalaman/shellcheck/releases/download/${SHELLCHECK_VERSION}/shellcheck-${SHELLCHECK_VERSION}.linux.x86_64.tar.xz" | tar --xz -xf - --directory /tmp/
|
||||
ENV PATH="/tmp/shellcheck-${SHELLCHECK_VERSION}:${PATH}"
|
||||
xorriso \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# This is a hack. It is needed because gcc-multilib and g++-multilib are conflicting with g++-arm-linux-gnueabihf. This is
|
||||
# due to gcc-multilib installing the following symbolic link, which is needed for -m32 support. However, this causes
|
||||
@ -135,20 +174,6 @@ RUN \
|
||||
update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix; \
|
||||
exit 0
|
||||
|
||||
ARG LLVM_VERSION=16
|
||||
# Setup Clang+LLVM support
|
||||
RUN apt-get update && apt-get install $APT_ARGS \
|
||||
lsb-release \
|
||||
software-properties-common \
|
||||
gnupg \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN cd /tmp && \
|
||||
wget https://apt.llvm.org/llvm.sh && \
|
||||
chmod +x llvm.sh && \
|
||||
/tmp/llvm.sh ${LLVM_VERSION} && \
|
||||
rm -rf /tmp/llvm.sh
|
||||
|
||||
RUN \
|
||||
mkdir -p /src/dash && \
|
||||
mkdir -p /cache/ccache && \
|
||||
|
@ -1,17 +1,18 @@
|
||||
version: "3.9"
|
||||
services:
|
||||
container:
|
||||
entrypoint: /bin/bash
|
||||
build:
|
||||
context: '..'
|
||||
dockerfile: './develop/Dockerfile'
|
||||
tty: true # Equivalent to -t
|
||||
stdin_open: true # Equivalent to -i
|
||||
ports:
|
||||
- "9998:9998" # Mainnet Ports
|
||||
- "9999:9999"
|
||||
- "19998:19998" # Testnet Ports
|
||||
- "19999:19999"
|
||||
security_opt:
|
||||
- seccomp:unconfined
|
||||
stdin_open: true # Equivalent to -i
|
||||
tty: true # Equivalent to -t
|
||||
|
||||
# A note about volumes:
|
||||
#
|
||||
|
@ -18,7 +18,11 @@ RUN apt-get update && \
|
||||
sudo \
|
||||
wget \
|
||||
xz-utils && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
rm -rf /var/lib/apt/lists/*; \
|
||||
targetLocale="en_US.UTF-8"; \
|
||||
locale-gen ${targetLocale} && \
|
||||
update-locale LC_ALL=${targetLocale} && \
|
||||
update-locale LANG=${targetLocale};
|
||||
|
||||
ARG guix_download_path=ftp://ftp.gnu.org/gnu/guix
|
||||
ARG guix_version=1.4.0
|
||||
@ -30,8 +34,7 @@ ENV PATH="/usr/local/bin:/usr/local/guix/current/bin:$PATH"
|
||||
|
||||
# Application Setup
|
||||
# https://guix.gnu.org/manual/en/html_node/Application-Setup.html
|
||||
ENV GUIX_LOCPATH="/usr/local/guix/profile" \
|
||||
LC_ALL="en_US.UTF-8"
|
||||
ENV GUIX_LOCPATH="/usr/local/guix/profile"
|
||||
|
||||
RUN guix_file_name=guix-binary-${guix_version}.$(uname -m)-linux.tar.xz && \
|
||||
eval "guix_checksum=\${guix_checksum_$(uname -m)}" && \
|
||||
@ -74,18 +77,19 @@ RUN usermod -aG sudo ${USERNAME} && \
|
||||
echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
||||
|
||||
# Copy required files to container
|
||||
COPY --from=docker_root ./motd.txt /etc/motd
|
||||
COPY --from=docker_root ./scripts/entrypoint /usr/local/bin/entrypoint
|
||||
COPY --from=docker_root ./scripts/guix-check /usr/local/bin/guix-check
|
||||
COPY --from=docker_root ./scripts/guix-start /usr/local/bin/guix-start
|
||||
COPY --from=docker_root ./motd.txt /etc/motd
|
||||
COPY --from=docker_root ./scripts/entrypoint /usr/local/bin/entrypoint
|
||||
COPY --from=docker_root ./scripts/guix-check /usr/local/bin/guix-check
|
||||
COPY --from=docker_root ./scripts/guix-start /usr/local/bin/guix-start
|
||||
COPY --from=docker_root ./scripts/setup-sdk /usr/local/bin/setup-sdk
|
||||
|
||||
# Create directories for mounting to save/restore cache and grant necessary permissions
|
||||
RUN mkdir -p \
|
||||
/home/${USERNAME}/.cache \
|
||||
/src/dash/depends/{built,sources,work} && \
|
||||
/src/dash/depends/{built,sources,work}; \
|
||||
chown -R ${USER_ID}:${GROUP_ID} \
|
||||
/home/${USERNAME}/.cache \
|
||||
/src
|
||||
/src;
|
||||
|
||||
WORKDIR "/src/dash"
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
version: "3.9"
|
||||
services:
|
||||
guix_ubuntu:
|
||||
build:
|
||||
|
@ -9,19 +9,10 @@ if [[ ! -d "${WORKSPACE_PATH}" || ! "${WORKSPACE_PATH}" = /* || ! -f "${WORKSPAC
|
||||
exit 1
|
||||
fi
|
||||
|
||||
XCODE_VERSION="15.0"
|
||||
XCODE_RELEASE="15A240d"
|
||||
XCODE_ARCHIVE="Xcode-${XCODE_VERSION}-${XCODE_RELEASE}-extracted-SDK-with-libcxx-headers"
|
||||
XCODE_SOURCE="${XCODE_SOURCE:-https://bitcoincore.org/depends-sources/sdks}"
|
||||
|
||||
export SDK_PATH="${SDK_PATH:-${WORKSPACE_PATH}/depends/SDKs}"
|
||||
export SDK_SRCS="${SDK_PATH:-${WORKSPACE_PATH}/depends/sdk-sources}"
|
||||
|
||||
# Check if macOS SDK is present, if not, download it
|
||||
if [[ ! -d "${SDK_PATH}/${XCODE_ARCHIVE}" ]]; then
|
||||
echo "Preparing macOS SDK..."
|
||||
mkdir -p "${SDK_PATH}"
|
||||
curl -L "${XCODE_SOURCE}/${XCODE_ARCHIVE}.tar.gz" | tar -xz -C "${SDK_PATH}"
|
||||
fi
|
||||
./contrib/containers/guix/scripts/setup-sdk
|
||||
|
||||
# Add safe.directory option only when WORKSPACE_PATH was specified via cmd-line arguments (happens in CI)
|
||||
if [[ -n "${1}" ]]; then
|
||||
|
27
contrib/containers/guix/scripts/setup-sdk
Executable file
27
contrib/containers/guix/scripts/setup-sdk
Executable file
@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) 2024 The Dash Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
export LC_ALL=C.UTF-8
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
SDK_URL="${SDK_URL:-https://bitcoincore.org/depends-sources/sdks}"
|
||||
SDK_PATH="${SDK_PATH:-depends/SDKs}"
|
||||
SDK_SRCS="${SDK_SOURCES:-depends/sdk-sources}"
|
||||
XCODE_VERSION="${XCODE_VERSION:-15.0}"
|
||||
XCODE_RELEASE="${XCODE_RELEASE:-15A240d}"
|
||||
XCODE_ARCHIVE="Xcode-${XCODE_VERSION}-${XCODE_RELEASE}-extracted-SDK-with-libcxx-headers"
|
||||
XCODE_AR_PATH="${SDK_SRCS}/${XCODE_ARCHIVE}.tar.gz"
|
||||
|
||||
if [ ! -d "${SDK_PATH}/${XCODE_ARCHIVE}" ]; then
|
||||
if [ ! -f "${XCODE_AR_PATH}" ]; then
|
||||
echo "Downloading macOS SDK..."
|
||||
mkdir -p "${SDK_SRCS}"
|
||||
curl --location --fail "${SDK_URL}/${XCODE_ARCHIVE}.tar.gz" -o "${XCODE_AR_PATH}"
|
||||
fi
|
||||
echo "Extracting macOS SDK..."
|
||||
mkdir -p "${SDK_PATH}"
|
||||
tar -C "${SDK_PATH}" -xf "${XCODE_AR_PATH}"
|
||||
fi
|
@ -159,9 +159,15 @@ $(package)_config_opts_linux += -dbus-runtime
|
||||
ifneq ($(LTO),)
|
||||
$(package)_config_opts_linux += -ltcg
|
||||
endif
|
||||
$(package)_config_opts_linux += -platform linux-g++ -xplatform bitcoin-linux-g++
|
||||
ifneq (,$(findstring -stdlib=libc++,$($(1)_cxx)))
|
||||
$(package)_config_opts_x86_64_linux = -xplatform linux-clang-libc++
|
||||
|
||||
ifneq (,$(findstring clang,$($(package)_cxx)))
|
||||
ifneq (,$(findstring -stdlib=libc++,$($(package)_cxx)))
|
||||
$(package)_config_opts_linux += -platform linux-clang-libc++ -xplatform linux-clang-libc++
|
||||
else
|
||||
$(package)_config_opts_linux += -platform linux-clang -xplatform linux-clang
|
||||
endif
|
||||
else
|
||||
$(package)_config_opts_linux += -platform linux-g++ -xplatform bitcoin-linux-g++
|
||||
endif
|
||||
|
||||
$(package)_config_opts_mingw32 = -no-opengl
|
||||
|
15
doc/release-notes-23065.md
Normal file
15
doc/release-notes-23065.md
Normal file
@ -0,0 +1,15 @@
|
||||
Notable changes
|
||||
===============
|
||||
|
||||
Updated RPCs
|
||||
------------
|
||||
|
||||
- `lockunspent` now optionally takes a third parameter, `persistent`, which
|
||||
causes the lock to be written persistently to the wallet database. This
|
||||
allows UTXOs to remain locked even after node restarts or crashes.
|
||||
|
||||
GUI changes
|
||||
-----------
|
||||
|
||||
- UTXOs which are locked via the GUI are now stored persistently in the
|
||||
wallet database, so are not lost on node shutdown or crash.
|
@ -178,7 +178,7 @@ Commit your signature for the signed macOS/Windows binaries:
|
||||
```sh
|
||||
pushd ./guix.sigs
|
||||
git add "${VERSION}/${SIGNER}"/all.SHA256SUMS{,.asc}
|
||||
git commit -m "Add ${SIGNER} ${VERSION} signed binaries signatures"
|
||||
git commit -m "Add attestations by ${SIGNER} for ${VERSION} codesigned"
|
||||
git push # Assuming you can push to the guix.sigs tree
|
||||
popd
|
||||
```
|
||||
|
@ -220,6 +220,16 @@ static void MutateTxLocktime(CMutableTransaction& tx, const std::string& cmdVal)
|
||||
tx.nLockTime = (unsigned int) newLocktime;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static T TrimAndParse(const std::string& int_str, const std::string& err)
|
||||
{
|
||||
const auto parsed{ToIntegral<T>(TrimString(int_str))};
|
||||
if (!parsed.has_value()) {
|
||||
throw std::runtime_error(err + " '" + int_str + "'");
|
||||
}
|
||||
return parsed.value();
|
||||
}
|
||||
|
||||
static void MutateTxAddInput(CMutableTransaction& tx, const std::string& strInput)
|
||||
{
|
||||
std::vector<std::string> vStrInputParts = SplitString(strInput, ':');
|
||||
@ -245,8 +255,9 @@ static void MutateTxAddInput(CMutableTransaction& tx, const std::string& strInpu
|
||||
|
||||
// extract the optional sequence number
|
||||
uint32_t nSequenceIn = CTxIn::SEQUENCE_FINAL;
|
||||
if (vStrInputParts.size() > 2)
|
||||
nSequenceIn = std::stoul(vStrInputParts[2]);
|
||||
if (vStrInputParts.size() > 2) {
|
||||
nSequenceIn = TrimAndParse<uint32_t>(vStrInputParts.at(2), "invalid TX sequence id");
|
||||
}
|
||||
|
||||
// append to transaction input list
|
||||
CTxIn txin(txid, vout, CScript(), nSequenceIn);
|
||||
@ -324,10 +335,10 @@ static void MutateTxAddOutMultiSig(CMutableTransaction& tx, const std::string& s
|
||||
CAmount value = ExtractAndValidateValue(vStrInputParts[0]);
|
||||
|
||||
// Extract REQUIRED
|
||||
uint32_t required = stoul(vStrInputParts[1]);
|
||||
const uint32_t required{TrimAndParse<uint32_t>(vStrInputParts.at(1), "invalid multisig required number")};
|
||||
|
||||
// Extract NUMKEYS
|
||||
uint32_t numkeys = stoul(vStrInputParts[2]);
|
||||
const uint32_t numkeys{TrimAndParse<uint32_t>(vStrInputParts.at(2), "invalid multisig total number")};
|
||||
|
||||
// Validate there are the correct number of pubkeys
|
||||
if (vStrInputParts.size() < numkeys + 3)
|
||||
|
@ -314,8 +314,8 @@ void CRollingBloomFilter::insert(Span<const unsigned char> vKey)
|
||||
/* FastMod works with the upper bits of h, so it is safe to ignore that the lower bits of h are already used for bit. */
|
||||
uint32_t pos = FastRange32(h, data.size());
|
||||
/* The lowest bit of pos is ignored, and set to zero for the first bit, and to one for the second. */
|
||||
data[pos & ~1] = (data[pos & ~1] & ~(((uint64_t)1) << bit)) | ((uint64_t)(nGeneration & 1)) << bit;
|
||||
data[pos | 1] = (data[pos | 1] & ~(((uint64_t)1) << bit)) | ((uint64_t)(nGeneration >> 1)) << bit;
|
||||
data[pos & ~1U] = (data[pos & ~1U] & ~(uint64_t{1} << bit)) | (uint64_t(nGeneration & 1)) << bit;
|
||||
data[pos | 1] = (data[pos | 1] & ~(uint64_t{1} << bit)) | (uint64_t(nGeneration >> 1)) << bit;
|
||||
}
|
||||
}
|
||||
|
||||
@ -326,7 +326,7 @@ bool CRollingBloomFilter::contains(Span<const unsigned char> vKey) const
|
||||
int bit = h & 0x3F;
|
||||
uint32_t pos = FastRange32(h, data.size());
|
||||
/* If the relevant bit is not set in either data[pos & ~1] or data[pos | 1], the filter does not contain vKey */
|
||||
if (!(((data[pos & ~1] | data[pos | 1]) >> bit) & 1)) {
|
||||
if (!(((data[pos & ~1U] | data[pos | 1]) >> bit) & 1)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -136,10 +136,10 @@ public:
|
||||
virtual bool setAddressReceiveRequest(const CTxDestination& dest, const std::string& id, const std::string& value) = 0;
|
||||
|
||||
//! Lock coin.
|
||||
virtual void lockCoin(const COutPoint& output) = 0;
|
||||
virtual bool lockCoin(const COutPoint& output, const bool write_to_db) = 0;
|
||||
|
||||
//! Unlock coin.
|
||||
virtual void unlockCoin(const COutPoint& output) = 0;
|
||||
virtual bool unlockCoin(const COutPoint& output) = 0;
|
||||
|
||||
//! Return whether coin is locked.
|
||||
virtual bool isLockedCoin(const COutPoint& output) = 0;
|
||||
|
@ -44,6 +44,7 @@
|
||||
#endif // ENABLE_WALLET
|
||||
|
||||
#include <boost/signals2/connection.hpp>
|
||||
#include <chrono>
|
||||
#include <memory>
|
||||
|
||||
#include <QApplication>
|
||||
@ -397,10 +398,10 @@ void BitcoinApplication::initializeResult(bool success, interfaces::BlockAndHead
|
||||
connect(paymentServer, &PaymentServer::message, [this](const QString& title, const QString& message, unsigned int style) {
|
||||
window->message(title, message, style);
|
||||
});
|
||||
QTimer::singleShot(100, paymentServer, &PaymentServer::uiReady);
|
||||
QTimer::singleShot(100ms, paymentServer, &PaymentServer::uiReady);
|
||||
}
|
||||
#endif
|
||||
pollShutdownTimer->start(200);
|
||||
pollShutdownTimer->start(SHUTDOWN_POLLING_DELAY);
|
||||
} else {
|
||||
Q_EMIT splashFinished(); // Make sure splash screen doesn't stick around during shutdown
|
||||
quit(); // Exit first main loop invocation
|
||||
|
@ -21,6 +21,7 @@
|
||||
#include <netbase.h>
|
||||
#include <util/system.h>
|
||||
#include <util/threadnames.h>
|
||||
#include <util/time.h>
|
||||
#include <validation.h>
|
||||
|
||||
#include <stdint.h>
|
||||
@ -323,7 +324,7 @@ static void BlockTipChanged(ClientModel* clientmodel, SynchronizationState sync_
|
||||
const bool throttle = (sync_state != SynchronizationState::POST_INIT && !fHeader) || sync_state == SynchronizationState::INIT_REINDEX;
|
||||
const int64_t now = throttle ? GetTimeMillis() : 0;
|
||||
int64_t& nLastUpdateNotification = fHeader ? nLastHeaderTipUpdateNotification : nLastBlockTipUpdateNotification;
|
||||
if (throttle && now < nLastUpdateNotification + MODEL_UPDATE_DELAY) {
|
||||
if (throttle && now < nLastUpdateNotification + count_milliseconds(MODEL_UPDATE_DELAY)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -207,7 +207,7 @@ void CoinControlDialog::buttonToggleLockClicked()
|
||||
item->setIcon(COLUMN_CHECKBOX, QIcon());
|
||||
}
|
||||
else{
|
||||
model->wallet().lockCoin(outpt);
|
||||
model->wallet().lockCoin(outpt, /* write_to_db = */ true);
|
||||
item->setDisabled(true);
|
||||
item->setIcon(COLUMN_CHECKBOX, GUIUtil::getIcon("lock_closed", GUIUtil::ThemedColor::RED));
|
||||
}
|
||||
@ -300,7 +300,7 @@ void CoinControlDialog::lockCoin()
|
||||
contextMenuItem->setCheckState(COLUMN_CHECKBOX, Qt::Unchecked);
|
||||
|
||||
COutPoint outpt(uint256S(contextMenuItem->data(COLUMN_ADDRESS, TxHashRole).toString().toStdString()), contextMenuItem->data(COLUMN_ADDRESS, VOutRole).toUInt());
|
||||
model->wallet().lockCoin(outpt);
|
||||
model->wallet().lockCoin(outpt, /* write_to_db = */ true);
|
||||
contextMenuItem->setDisabled(true);
|
||||
contextMenuItem->setIcon(COLUMN_CHECKBOX, GUIUtil::getIcon("lock_closed", GUIUtil::ThemedColor::RED));
|
||||
updateLabelLocked();
|
||||
|
@ -6,10 +6,16 @@
|
||||
#ifndef BITCOIN_QT_GUICONSTANTS_H
|
||||
#define BITCOIN_QT_GUICONSTANTS_H
|
||||
|
||||
#include <chrono>
|
||||
#include <cstdint>
|
||||
|
||||
/* Milliseconds between model updates */
|
||||
static const int MODEL_UPDATE_DELAY = 250;
|
||||
using namespace std::chrono_literals;
|
||||
|
||||
/* A delay between model updates */
|
||||
static constexpr auto MODEL_UPDATE_DELAY{250ms};
|
||||
|
||||
/* A delay between shutdown pollings */
|
||||
static constexpr auto SHUTDOWN_POLLING_DELAY{200ms};
|
||||
|
||||
/* AskPassphraseDialog -- Maximum passphrase length */
|
||||
static const int MAX_PASSPHRASE_SIZE = 1024;
|
||||
|
@ -24,6 +24,8 @@
|
||||
#include <util/underlying.h>
|
||||
|
||||
#include <QButtonGroup>
|
||||
#include <chrono>
|
||||
|
||||
#include <QDataWidgetMapper>
|
||||
#include <QDir>
|
||||
#include <QIntValidator>
|
||||
@ -457,7 +459,7 @@ void OptionsDialog::showRestartWarning(bool fPersistent)
|
||||
ui->statusLabel->setText(tr("This change would require a client restart."));
|
||||
// clear non-persistent status label after 10 seconds
|
||||
// Todo: should perhaps be a class attribute, if we extend the use of statusLabel
|
||||
QTimer::singleShot(10000, this, &OptionsDialog::clearStatusLabel);
|
||||
QTimer::singleShot(10s, this, &OptionsDialog::clearStatusLabel);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -24,10 +24,11 @@
|
||||
#include <node/interface_ui.h>
|
||||
#include <policy/fees.h>
|
||||
#include <txmempool.h>
|
||||
#include <validation.h>
|
||||
#include <wallet/coincontrol.h>
|
||||
#include <wallet/fees.h>
|
||||
#include <wallet/wallet.h>
|
||||
#include <validation.h>
|
||||
#include <chrono>
|
||||
|
||||
#include <array>
|
||||
#include <fstream>
|
||||
@ -1080,7 +1081,7 @@ SendConfirmationDialog::SendConfirmationDialog(const QString& title, const QStri
|
||||
int SendConfirmationDialog::exec()
|
||||
{
|
||||
updateYesButton();
|
||||
countDownTimer.start(1000);
|
||||
countDownTimer.start(1s);
|
||||
return QMessageBox::exec();
|
||||
}
|
||||
|
||||
|
@ -19,6 +19,8 @@
|
||||
#include <wallet/wallet.h>
|
||||
#include <walletinitinterface.h>
|
||||
|
||||
#include <chrono>
|
||||
|
||||
#include <QApplication>
|
||||
#include <QTimer>
|
||||
#include <QMessageBox>
|
||||
@ -39,7 +41,7 @@ void EditAddressAndSubmit(
|
||||
dialog->findChild<QLineEdit*>("labelEdit")->setText(label);
|
||||
dialog->findChild<QValidatedLineEdit*>("addressEdit")->setText(address);
|
||||
|
||||
ConfirmMessage(&warning_text, 5);
|
||||
ConfirmMessage(&warning_text, 5ms);
|
||||
dialog->accept();
|
||||
QCOMPARE(warning_text, expected_msg);
|
||||
}
|
||||
|
@ -2,6 +2,8 @@
|
||||
// Distributed under the MIT software license, see the accompanying
|
||||
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
#include <chrono>
|
||||
|
||||
#include <QApplication>
|
||||
#include <QMessageBox>
|
||||
#include <QPushButton>
|
||||
@ -9,7 +11,7 @@
|
||||
#include <QTimer>
|
||||
#include <QWidget>
|
||||
|
||||
void ConfirmMessage(QString* text, int msec)
|
||||
void ConfirmMessage(QString* text, std::chrono::milliseconds msec)
|
||||
{
|
||||
QTimer::singleShot(msec, [text]() {
|
||||
for (QWidget* widget : QApplication::topLevelWidgets()) {
|
||||
|
@ -5,7 +5,11 @@
|
||||
#ifndef BITCOIN_QT_TEST_UTIL_H
|
||||
#define BITCOIN_QT_TEST_UTIL_H
|
||||
|
||||
#include <QString>
|
||||
#include <chrono>
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
class QString;
|
||||
QT_END_NAMESPACE
|
||||
|
||||
/**
|
||||
* Press "Ok" button in message box dialog.
|
||||
@ -13,6 +17,6 @@
|
||||
* @param text - Optionally store dialog text.
|
||||
* @param msec - Number of milliseconds to pause before triggering the callback.
|
||||
*/
|
||||
void ConfirmMessage(QString* text = nullptr, int msec = 0);
|
||||
void ConfirmMessage(QString* text, std::chrono::milliseconds msec);
|
||||
|
||||
#endif // BITCOIN_QT_TEST_UTIL_H
|
||||
|
@ -25,6 +25,7 @@
|
||||
#include <qt/recentrequeststablemodel.h>
|
||||
#include <qt/receiverequestdialog.h>
|
||||
|
||||
#include <chrono>
|
||||
#include <memory>
|
||||
|
||||
#include <QAbstractButton>
|
||||
|
@ -23,6 +23,7 @@
|
||||
#include <optional>
|
||||
|
||||
#include <QCalendarWidget>
|
||||
#include <chrono>
|
||||
#include <QComboBox>
|
||||
#include <QDateTimeEdit>
|
||||
#include <QDesktopServices>
|
||||
@ -114,8 +115,8 @@ TransactionView::TransactionView(QWidget* parent) :
|
||||
amountWidget->setObjectName("amountWidget");
|
||||
hlayout->addWidget(amountWidget);
|
||||
|
||||
// Delay before filtering transactions in ms
|
||||
static const int input_filter_delay = 200;
|
||||
// Delay before filtering transactions
|
||||
static constexpr auto input_filter_delay{200ms};
|
||||
|
||||
QTimer* amount_typing_delay = new QTimer(this);
|
||||
amount_typing_delay->setSingleShot(true);
|
||||
|
@ -21,6 +21,7 @@
|
||||
#include <wallet/wallet.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <chrono>
|
||||
|
||||
#include <QApplication>
|
||||
#include <QMessageBox>
|
||||
@ -271,12 +272,12 @@ void CreateWalletActivity::createWallet()
|
||||
flags |= WALLET_FLAG_DESCRIPTORS;
|
||||
}
|
||||
|
||||
QTimer::singleShot(500, worker(), [this, name, flags] {
|
||||
QTimer::singleShot(500ms, worker(), [this, name, flags] {
|
||||
std::unique_ptr<interfaces::Wallet> wallet = node().walletLoader().createWallet(name, m_passphrase, flags, m_error_message, m_warning_message);
|
||||
|
||||
if (wallet) m_wallet_model = m_wallet_controller->getOrCreateWallet(std::move(wallet));
|
||||
|
||||
QTimer::singleShot(500, this, &CreateWalletActivity::finish);
|
||||
QTimer::singleShot(500ms, this, &CreateWalletActivity::finish);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -152,6 +152,7 @@ static const CRPCConvertParam vRPCConvertParams[] =
|
||||
{ "gettxoutsetinfo", 2, "use_index"},
|
||||
{ "lockunspent", 0, "unlock" },
|
||||
{ "lockunspent", 1, "transactions" },
|
||||
{ "lockunspent", 2, "persistent" },
|
||||
{ "send", 0, "outputs" },
|
||||
{ "send", 1, "conf_target" },
|
||||
{ "send", 3, "fee_rate"},
|
||||
|
@ -874,29 +874,30 @@ static RPCHelpMan decoderawtransaction()
|
||||
|
||||
static RPCHelpMan decodescript()
|
||||
{
|
||||
return RPCHelpMan{"decodescript",
|
||||
"\nDecode a hex-encoded script.\n",
|
||||
{
|
||||
{"hexstring", RPCArg::Type::STR_HEX, RPCArg::Optional::NO, "the hex-encoded script"},
|
||||
},
|
||||
RPCResult{
|
||||
RPCResult::Type::OBJ, "", "",
|
||||
return RPCHelpMan{
|
||||
"decodescript",
|
||||
"\nDecode a hex-encoded script.\n",
|
||||
{
|
||||
{"hexstring", RPCArg::Type::STR_HEX, RPCArg::Optional::NO, "the hex-encoded script"},
|
||||
},
|
||||
RPCResult{
|
||||
RPCResult::Type::OBJ, "", "",
|
||||
{
|
||||
{RPCResult::Type::STR, "asm", "Script public key"},
|
||||
{RPCResult::Type::STR, "type", "The output type (e.g. " + GetAllOutputTypes() + ")"},
|
||||
{RPCResult::Type::STR, "address", /* optional */ true, "Dash address (only if a well-defined address exists)"},
|
||||
{RPCResult::Type::NUM, "reqSigs", /* optional */ true, "(DEPRECATED, returned only if config option -deprecatedrpc=addresses is passed) Number of required signatures"},
|
||||
{RPCResult::Type::ARR, "addresses", /* optional */ true, "(DEPRECATED, returned only if config option -deprecatedrpc=addresses is passed) Array of Dash addresses",
|
||||
{
|
||||
{RPCResult::Type::STR, "asm", "Script public key"},
|
||||
{RPCResult::Type::STR, "type", "The output type (e.g. "+GetAllOutputTypes()+")"},
|
||||
{RPCResult::Type::STR, "address", /* optional */ true, "Dash address (only if a well-defined address exists)"},
|
||||
{RPCResult::Type::NUM, "reqSigs", /* optional */ true, "(DEPRECATED, returned only if config option -deprecatedrpc=addresses is passed) Number of required signatures"},
|
||||
{RPCResult::Type::ARR, "addresses", /* optional */ true, "(DEPRECATED, returned only if config option -deprecatedrpc=addresses is passed) Array of Dash addresses",
|
||||
{
|
||||
{RPCResult::Type::STR, "address", "Dash address"},
|
||||
}},
|
||||
{RPCResult::Type::STR, "p2sh", "address of P2SH script wrapping this redeem script (not returned if the script is already a P2SH)"},
|
||||
}
|
||||
},
|
||||
RPCExamples{
|
||||
HelpExampleCli("decodescript", "\"hexstring\"")
|
||||
+ HelpExampleRpc("decodescript", "\"hexstring\"")
|
||||
},
|
||||
{RPCResult::Type::STR, "address", "Dash address"},
|
||||
}},
|
||||
{RPCResult::Type::STR, "p2sh", "address of P2SH script wrapping this redeem script (not returned if the script is already a P2SH)"},
|
||||
},
|
||||
},
|
||||
RPCExamples{
|
||||
HelpExampleCli("decodescript", "\"hexstring\"")
|
||||
+ HelpExampleRpc("decodescript", "\"hexstring\"")
|
||||
},
|
||||
[&](const RPCHelpMan& self, const JSONRPCRequest& request) -> UniValue
|
||||
{
|
||||
RPCTypeCheck(request.params, {UniValue::VSTR});
|
||||
@ -911,11 +912,10 @@ static RPCHelpMan decodescript()
|
||||
}
|
||||
ScriptPubKeyToUniv(script, r, /* fIncludeHex */ false);
|
||||
|
||||
UniValue type;
|
||||
std::vector<std::vector<unsigned char>> solutions_data;
|
||||
const TxoutType which_type{Solver(script, solutions_data)};
|
||||
|
||||
type = find_value(r, "type");
|
||||
|
||||
if (type.isStr() && type.get_str() != "scripthash") {
|
||||
if (which_type != TxoutType::SCRIPTHASH) {
|
||||
// P2SH cannot be wrapped in a P2SH. If this script is already a P2SH,
|
||||
// don't return the address for a P2SH of the P2SH.
|
||||
r.pushKV("p2sh", EncodeDestination(ScriptHash(script)));
|
||||
|
@ -461,7 +461,7 @@ ssize_t FuzzedFileProvider::write(void* cookie, const char* buf, size_t size)
|
||||
SetFuzzedErrNo(fuzzed_file->m_fuzzed_data_provider);
|
||||
const ssize_t n = fuzzed_file->m_fuzzed_data_provider.ConsumeIntegralInRange<ssize_t>(0, size);
|
||||
if (AdditionOverflow(fuzzed_file->m_offset, (int64_t)n)) {
|
||||
return fuzzed_file->m_fuzzed_data_provider.ConsumeBool() ? 0 : -1;
|
||||
return 0;
|
||||
}
|
||||
fuzzed_file->m_offset += n;
|
||||
return n;
|
||||
|
@ -124,10 +124,9 @@ size_t CTxMemPoolEntry::GetTxSize() const
|
||||
return GetVirtualTransactionSize(nTxSize, sigOpCount);
|
||||
}
|
||||
|
||||
// Update the given tx for any in-mempool descendants.
|
||||
// Assumes that CTxMemPool::m_children is correct for the given tx and all
|
||||
// descendants.
|
||||
void CTxMemPool::UpdateForDescendants(txiter updateIt, cacheMap &cachedDescendants, const std::set<uint256> &setExclude)
|
||||
void CTxMemPool::UpdateForDescendants(txiter updateIt, cacheMap& cachedDescendants,
|
||||
const std::set<uint256>& setExclude, std::set<uint256>& descendants_to_remove,
|
||||
uint64_t ancestor_size_limit, uint64_t ancestor_count_limit)
|
||||
{
|
||||
CTxMemPoolEntry::Children stageEntries, descendants;
|
||||
stageEntries = updateIt->GetMemPoolChildrenConst();
|
||||
@ -164,17 +163,18 @@ void CTxMemPool::UpdateForDescendants(txiter updateIt, cacheMap &cachedDescendan
|
||||
cachedDescendants[updateIt].insert(mapTx.iterator_to(descendant));
|
||||
// Update ancestor state for each descendant
|
||||
mapTx.modify(mapTx.iterator_to(descendant), update_ancestor_state(updateIt->GetTxSize(), updateIt->GetModifiedFee(), 1, updateIt->GetSigOpCount()));
|
||||
// Don't directly remove the transaction here -- doing so would
|
||||
// invalidate iterators in cachedDescendants. Mark it for removal
|
||||
// by inserting into descendants_to_remove.
|
||||
if (descendant.GetCountWithAncestors() > ancestor_count_limit || descendant.GetSizeWithAncestors() > ancestor_size_limit) {
|
||||
descendants_to_remove.insert(descendant.GetTx().GetHash());
|
||||
}
|
||||
}
|
||||
}
|
||||
mapTx.modify(updateIt, update_descendant_state(modifySize, modifyFee, modifyCount));
|
||||
}
|
||||
|
||||
// vHashesToUpdate is the set of transaction hashes from a disconnected block
|
||||
// which has been re-added to the mempool.
|
||||
// for each entry, look for descendants that are outside vHashesToUpdate, and
|
||||
// add fee/size information for such descendants to the parent.
|
||||
// for each such descendant, also update the ancestor state to include the parent.
|
||||
void CTxMemPool::UpdateTransactionsFromBlock(const std::vector<uint256> &vHashesToUpdate)
|
||||
void CTxMemPool::UpdateTransactionsFromBlock(const std::vector<uint256> &vHashesToUpdate, uint64_t ancestor_size_limit, uint64_t ancestor_count_limit)
|
||||
{
|
||||
AssertLockHeld(cs);
|
||||
// For each entry in vHashesToUpdate, store the set of in-mempool, but not
|
||||
@ -186,6 +186,8 @@ void CTxMemPool::UpdateTransactionsFromBlock(const std::vector<uint256> &vHashes
|
||||
// accounted for in the state of their ancestors)
|
||||
std::set<uint256> setAlreadyIncluded(vHashesToUpdate.begin(), vHashesToUpdate.end());
|
||||
|
||||
std::set<uint256> descendants_to_remove;
|
||||
|
||||
// Iterate in reverse, so that whenever we are looking at a transaction
|
||||
// we are sure that all in-mempool descendants have already been processed.
|
||||
// This maximizes the benefit of the descendant cache and guarantees that
|
||||
@ -215,7 +217,15 @@ void CTxMemPool::UpdateTransactionsFromBlock(const std::vector<uint256> &vHashes
|
||||
}
|
||||
}
|
||||
} // release epoch guard for UpdateForDescendants
|
||||
UpdateForDescendants(it, mapMemPoolDescendantsToUpdate, setAlreadyIncluded);
|
||||
UpdateForDescendants(it, mapMemPoolDescendantsToUpdate, setAlreadyIncluded, descendants_to_remove, ancestor_size_limit, ancestor_count_limit);
|
||||
}
|
||||
|
||||
for (const auto& txid : descendants_to_remove) {
|
||||
// This txid may have been removed already in a prior call to removeRecursive.
|
||||
// Therefore we ensure it is not yet removed already.
|
||||
if (const std::optional<txiter> txiter = GetIter(txid)) {
|
||||
removeRecursive((*txiter)->GetTx(), MemPoolRemovalReason::SIZELIMIT);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -675,16 +675,25 @@ public:
|
||||
*/
|
||||
void RemoveStaged(setEntries& stage, bool updateDescendants, MemPoolRemovalReason reason) EXCLUSIVE_LOCKS_REQUIRED(cs);
|
||||
|
||||
/** When adding transactions from a disconnected block back to the mempool,
|
||||
* new mempool entries may have children in the mempool (which is generally
|
||||
* not the case when otherwise adding transactions).
|
||||
* UpdateTransactionsFromBlock() will find child transactions and update the
|
||||
* descendant state for each transaction in vHashesToUpdate (excluding any
|
||||
* child transactions present in vHashesToUpdate, which are already accounted
|
||||
* for). Note: vHashesToUpdate should be the set of transactions from the
|
||||
* disconnected block that have been accepted back into the mempool.
|
||||
/** UpdateTransactionsFromBlock is called when adding transactions from a
|
||||
* disconnected block back to the mempool, new mempool entries may have
|
||||
* children in the mempool (which is generally not the case when otherwise
|
||||
* adding transactions).
|
||||
* @post updated descendant state for descendants of each transaction in
|
||||
* vHashesToUpdate (excluding any child transactions present in
|
||||
* vHashesToUpdate, which are already accounted for). Updated state
|
||||
* includes add fee/size information for such descendants to the
|
||||
* parent and updated ancestor state to include the parent.
|
||||
*
|
||||
* @param[in] vHashesToUpdate The set of txids from the
|
||||
* disconnected block that have been accepted back into the mempool.
|
||||
* @param[in] ancestor_size_limit The maximum allowed size in virtual
|
||||
* bytes of an entry and its ancestors
|
||||
* @param[in] ancestor_count_limit The maximum allowed number of
|
||||
* transactions including the entry and its ancestors.
|
||||
*/
|
||||
void UpdateTransactionsFromBlock(const std::vector<uint256>& vHashesToUpdate) EXCLUSIVE_LOCKS_REQUIRED(cs, cs_main) LOCKS_EXCLUDED(m_epoch);
|
||||
void UpdateTransactionsFromBlock(const std::vector<uint256>& vHashesToUpdate,
|
||||
uint64_t ancestor_size_limit, uint64_t ancestor_count_limit) EXCLUSIVE_LOCKS_REQUIRED(cs, cs_main) LOCKS_EXCLUDED(m_epoch);
|
||||
|
||||
/** Try to calculate all in-mempool ancestors of entry.
|
||||
* (these are all calculated including the tx itself)
|
||||
@ -828,19 +837,38 @@ private:
|
||||
/** UpdateForDescendants is used by UpdateTransactionsFromBlock to update
|
||||
* the descendants for a single transaction that has been added to the
|
||||
* mempool but may have child transactions in the mempool, eg during a
|
||||
* chain reorg. setExclude is the set of descendant transactions in the
|
||||
* mempool that must not be accounted for (because any descendants in
|
||||
* setExclude were added to the mempool after the transaction being
|
||||
* updated and hence their state is already reflected in the parent
|
||||
* state).
|
||||
* chain reorg.
|
||||
*
|
||||
* cachedDescendants will be updated with the descendants of the transaction
|
||||
* being updated, so that future invocations don't need to walk the
|
||||
* same transaction again, if encountered in another transaction chain.
|
||||
* @pre CTxMemPool::m_children is correct for the given tx and all
|
||||
* descendants.
|
||||
* @pre cachedDescendants is an accurate cache where each entry has all
|
||||
* descendants of the corresponding key, including those that should
|
||||
* be removed for violation of ancestor limits.
|
||||
* @post if updateIt has any non-excluded descendants, cachedDescendants has
|
||||
* a new cache line for updateIt.
|
||||
* @post descendants_to_remove has a new entry for any descendant which exceeded
|
||||
* ancestor limits relative to updateIt.
|
||||
*
|
||||
* @param[in] updateIt the entry to update for its descendants
|
||||
* @param[in,out] cachedDescendants a cache where each line corresponds to all
|
||||
* descendants. It will be updated with the descendants of the transaction
|
||||
* being updated, so that future invocations don't need to walk the same
|
||||
* transaction again, if encountered in another transaction chain.
|
||||
* @param[in] setExclude the set of descendant transactions in the mempool
|
||||
* that must not be accounted for (because any descendants in setExclude
|
||||
* were added to the mempool after the transaction being updated and hence
|
||||
* their state is already reflected in the parent state).
|
||||
* @param[out] descendants_to_remove Populated with the txids of entries that
|
||||
* exceed ancestor limits. It's the responsibility of the caller to
|
||||
* removeRecursive them.
|
||||
* @param[in] ancestor_size_limit the max number of ancestral bytes allowed
|
||||
* for any descendant
|
||||
* @param[in] ancestor_count_limit the max number of ancestor transactions
|
||||
* allowed for any descendant
|
||||
*/
|
||||
void UpdateForDescendants(txiter updateIt,
|
||||
cacheMap &cachedDescendants,
|
||||
const std::set<uint256> &setExclude) EXCLUSIVE_LOCKS_REQUIRED(cs);
|
||||
void UpdateForDescendants(txiter updateIt, cacheMap& cachedDescendants,
|
||||
const std::set<uint256>& setExclude, std::set<uint256>& descendants_to_remove,
|
||||
uint64_t ancestor_size_limit, uint64_t ancestor_count_limit) EXCLUSIVE_LOCKS_REQUIRED(cs);
|
||||
/** Update ancestors of hash to add/remove it as a descendant transaction. */
|
||||
void UpdateAncestorsOf(bool add, txiter hash, setEntries &setAncestors) EXCLUSIVE_LOCKS_REQUIRED(cs);
|
||||
/** Set ancestor state for an entry */
|
||||
|
@ -365,7 +365,9 @@ void CChainState::MaybeUpdateMempoolForReorg(
|
||||
// previously-confirmed transactions back to the mempool.
|
||||
// UpdateTransactionsFromBlock finds descendants of any transactions in
|
||||
// the disconnectpool that were added back and cleans up the mempool state.
|
||||
m_mempool->UpdateTransactionsFromBlock(vHashUpdate);
|
||||
const uint64_t ancestor_count_limit = gArgs.GetArg("-limitancestorcount", DEFAULT_ANCESTOR_LIMIT);
|
||||
const uint64_t ancestor_size_limit = gArgs.GetArg("-limitancestorsize", DEFAULT_ANCESTOR_SIZE_LIMIT) * 1000;
|
||||
m_mempool->UpdateTransactionsFromBlock(vHashUpdate, ancestor_size_limit, ancestor_count_limit);
|
||||
|
||||
// Predicate to use for filtering transactions in removeForReorg.
|
||||
// Checks whether the transaction is still final and, if it spends a coinbase output, mature.
|
||||
|
@ -238,15 +238,17 @@ public:
|
||||
WalletBatch batch{m_wallet->GetDatabase()};
|
||||
return m_wallet->SetAddressReceiveRequest(batch, dest, id, value);
|
||||
}
|
||||
void lockCoin(const COutPoint& output) override
|
||||
bool lockCoin(const COutPoint& output, const bool write_to_db) override
|
||||
{
|
||||
LOCK(m_wallet->cs_wallet);
|
||||
return m_wallet->LockCoin(output);
|
||||
std::unique_ptr<WalletBatch> batch = write_to_db ? std::make_unique<WalletBatch>(m_wallet->GetDatabase()) : nullptr;
|
||||
return m_wallet->LockCoin(output, batch.get());
|
||||
}
|
||||
void unlockCoin(const COutPoint& output) override
|
||||
bool unlockCoin(const COutPoint& output) override
|
||||
{
|
||||
LOCK(m_wallet->cs_wallet);
|
||||
return m_wallet->UnlockCoin(output);
|
||||
std::unique_ptr<WalletBatch> batch = std::make_unique<WalletBatch>(m_wallet->GetDatabase());
|
||||
return m_wallet->UnlockCoin(output, batch.get());
|
||||
}
|
||||
bool isLockedCoin(const COutPoint& output) override
|
||||
{
|
||||
|
@ -2204,8 +2204,9 @@ static RPCHelpMan lockunspent()
|
||||
"If no transaction outputs are specified when unlocking then all current locked transaction outputs are unlocked.\n"
|
||||
"A locked transaction output will not be chosen by automatic coin selection, when spending Dash.\n"
|
||||
"Manually selected coins are automatically unlocked.\n"
|
||||
"Locks are stored in memory only. Nodes start with zero locked outputs, and the locked output list\n"
|
||||
"is always cleared (by virtue of process exit) when a node stops or fails.\n"
|
||||
"Locks are stored in memory only, unless persistent=true, in which case they will be written to the\n"
|
||||
"wallet database and loaded on node start. Unwritten (persistent=false) locks are always cleared\n"
|
||||
"(by virtue of process exit) when a node stops or fails. Unlocking will clear both persistent and not.\n"
|
||||
"Also see the listunspent call\n",
|
||||
{
|
||||
{"unlock", RPCArg::Type::BOOL, RPCArg::Optional::NO, "Whether to unlock (true) or lock (false) the specified transactions"},
|
||||
@ -2217,6 +2218,7 @@ static RPCHelpMan lockunspent()
|
||||
{"vout", RPCArg::Type::NUM, RPCArg::Optional::NO, "The output number"},
|
||||
},
|
||||
},
|
||||
{"persistent", RPCArg::Type::BOOL, RPCArg::Default{false}, "Whether to write/erase this lock in the wallet database, or keep the change in memory only. Ignored for unlocking."},
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -2232,6 +2234,8 @@ static RPCHelpMan lockunspent()
|
||||
+ HelpExampleCli("listlockunspent", "") +
|
||||
"\nUnlock the transaction again\n"
|
||||
+ HelpExampleCli("lockunspent", "true \"[{\\\"txid\\\":\\\"a08e6907dbbd3d809776dbfc5d82e371b764ed838b5655e72f463568df1aadf0\\\",\\\"vout\\\":1}]\"") +
|
||||
"\nLock the transaction persistently in the wallet database\n"
|
||||
+ HelpExampleCli("lockunspent", "false \"[{\\\"txid\\\":\\\"a08e6907dbbd3d809776dbfc5d82e371b764ed838b5655e72f463568df1aadf0\\\",\\\"vout\\\":1}]\" true") +
|
||||
"\nAs a JSON-RPC call\n"
|
||||
+ HelpExampleRpc("lockunspent", "false, \"[{\\\"txid\\\":\\\"a08e6907dbbd3d809776dbfc5d82e371b764ed838b5655e72f463568df1aadf0\\\",\\\"vout\\\":1}]\"")
|
||||
},
|
||||
@ -2250,9 +2254,13 @@ static RPCHelpMan lockunspent()
|
||||
|
||||
bool fUnlock = request.params[0].get_bool();
|
||||
|
||||
const bool persistent{request.params[2].isNull() ? false : request.params[2].get_bool()};
|
||||
|
||||
if (request.params[1].isNull()) {
|
||||
if (fUnlock)
|
||||
pwallet->UnlockAllCoins();
|
||||
if (fUnlock) {
|
||||
if (!pwallet->UnlockAllCoins())
|
||||
throw JSONRPCError(RPC_WALLET_ERROR, "Unlocking coins failed");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -2303,17 +2311,24 @@ static RPCHelpMan lockunspent()
|
||||
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, expected locked output");
|
||||
}
|
||||
|
||||
if (!fUnlock && is_locked) {
|
||||
if (!fUnlock && is_locked && !persistent) {
|
||||
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, output already locked");
|
||||
}
|
||||
|
||||
outputs.push_back(outpt);
|
||||
}
|
||||
|
||||
std::unique_ptr<WalletBatch> batch = nullptr;
|
||||
// Unlock is always persistent
|
||||
if (fUnlock || persistent) batch = std::make_unique<WalletBatch>(pwallet->GetDatabase());
|
||||
|
||||
// Atomically set (un)locked status for the outputs.
|
||||
for (const COutPoint& outpt : outputs) {
|
||||
if (fUnlock) pwallet->UnlockCoin(outpt);
|
||||
else pwallet->LockCoin(outpt);
|
||||
if (fUnlock) {
|
||||
if (!pwallet->UnlockCoin(outpt, batch.get())) throw JSONRPCError(RPC_WALLET_ERROR, "Unlocking coin failed");
|
||||
} else {
|
||||
if (!pwallet->LockCoin(outpt, batch.get())) throw JSONRPCError(RPC_WALLET_ERROR, "Locking coin failed");
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
|
@ -625,12 +625,17 @@ bool CWallet::IsSpent(const uint256& hash, unsigned int n) const
|
||||
return false;
|
||||
}
|
||||
|
||||
void CWallet::AddToSpends(const COutPoint& outpoint, const uint256& wtxid)
|
||||
void CWallet::AddToSpends(const COutPoint& outpoint, const uint256& wtxid, WalletBatch* batch)
|
||||
{
|
||||
mapTxSpends.insert(std::make_pair(outpoint, wtxid));
|
||||
setWalletUTXO.erase(outpoint);
|
||||
|
||||
setLockedCoins.erase(outpoint);
|
||||
if (batch) {
|
||||
UnlockCoin(outpoint, batch);
|
||||
} else {
|
||||
WalletBatch temp_batch(GetDatabase());
|
||||
UnlockCoin(outpoint, &temp_batch);
|
||||
}
|
||||
|
||||
std::pair<TxSpends::iterator, TxSpends::iterator> range;
|
||||
range = mapTxSpends.equal_range(outpoint);
|
||||
@ -638,7 +643,7 @@ void CWallet::AddToSpends(const COutPoint& outpoint, const uint256& wtxid)
|
||||
}
|
||||
|
||||
|
||||
void CWallet::AddToSpends(const uint256& wtxid)
|
||||
void CWallet::AddToSpends(const uint256& wtxid, WalletBatch* batch)
|
||||
{
|
||||
auto it = mapWallet.find(wtxid);
|
||||
assert(it != mapWallet.end());
|
||||
@ -647,7 +652,7 @@ void CWallet::AddToSpends(const uint256& wtxid)
|
||||
return;
|
||||
|
||||
for (const CTxIn& txin : thisTx.tx->vin)
|
||||
AddToSpends(txin.prevout, wtxid);
|
||||
AddToSpends(txin.prevout, wtxid, batch);
|
||||
}
|
||||
|
||||
bool CWallet::EncryptWallet(const SecureString& strWalletPassphrase)
|
||||
@ -915,7 +920,7 @@ CWalletTx* CWallet::AddToWallet(CTransactionRef tx, const CWalletTx::Confirmatio
|
||||
wtx.nOrderPos = IncOrderPosNext(&batch);
|
||||
wtx.m_it_wtxOrdered = wtxOrdered.insert(std::make_pair(wtx.nOrderPos, &wtx));
|
||||
wtx.nTimeSmart = ComputeTimeSmart(wtx);
|
||||
AddToSpends(hash);
|
||||
AddToSpends(hash, &batch);
|
||||
|
||||
std::vector<std::pair<const CTransactionRef&, unsigned int>> outputs;
|
||||
for(unsigned int i = 0; i < wtx.tx->vout.size(); ++i) {
|
||||
@ -4444,7 +4449,7 @@ void ReserveDestination::ReturnDestination()
|
||||
address = CNoDestination();
|
||||
}
|
||||
|
||||
void CWallet::LockCoin(const COutPoint& output)
|
||||
bool CWallet::LockCoin(const COutPoint& output, WalletBatch* batch)
|
||||
{
|
||||
AssertLockHeld(cs_wallet);
|
||||
setLockedCoins.insert(output);
|
||||
@ -4453,23 +4458,38 @@ void CWallet::LockCoin(const COutPoint& output)
|
||||
|
||||
fAnonymizableTallyCached = false;
|
||||
fAnonymizableTallyCachedNonDenom = false;
|
||||
if (batch) {
|
||||
return batch->WriteLockedUTXO(output);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void CWallet::UnlockCoin(const COutPoint& output)
|
||||
bool CWallet::UnlockCoin(const COutPoint& output, WalletBatch* batch)
|
||||
{
|
||||
AssertLockHeld(cs_wallet);
|
||||
setLockedCoins.erase(output);
|
||||
std::map<uint256, CWalletTx>::iterator it = mapWallet.find(output.hash);
|
||||
if (it != mapWallet.end()) it->second.MarkDirty(); // recalculate all credits for this tx
|
||||
|
||||
fAnonymizableTallyCached = false;
|
||||
fAnonymizableTallyCachedNonDenom = false;
|
||||
|
||||
bool was_locked = setLockedCoins.erase(output);
|
||||
if (batch && was_locked) {
|
||||
return batch->EraseLockedUTXO(output);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void CWallet::UnlockAllCoins()
|
||||
bool CWallet::UnlockAllCoins()
|
||||
{
|
||||
AssertLockHeld(cs_wallet);
|
||||
bool success = true;
|
||||
WalletBatch batch(GetDatabase());
|
||||
for (auto it = setLockedCoins.begin(); it != setLockedCoins.end(); ++it) {
|
||||
success &= batch.EraseLockedUTXO(*it);
|
||||
}
|
||||
setLockedCoins.clear();
|
||||
return success;
|
||||
}
|
||||
|
||||
bool CWallet::IsLockedCoin(uint256 hash, unsigned int n) const
|
||||
|
@ -759,8 +759,8 @@ private:
|
||||
*/
|
||||
typedef std::multimap<COutPoint, uint256> TxSpends;
|
||||
TxSpends mapTxSpends GUARDED_BY(cs_wallet);
|
||||
void AddToSpends(const COutPoint& outpoint, const uint256& wtxid) EXCLUSIVE_LOCKS_REQUIRED(cs_wallet);
|
||||
void AddToSpends(const uint256& wtxid) EXCLUSIVE_LOCKS_REQUIRED(cs_wallet);
|
||||
void AddToSpends(const COutPoint& outpoint, const uint256& wtxid, WalletBatch* batch = nullptr) EXCLUSIVE_LOCKS_REQUIRED(cs_wallet);
|
||||
void AddToSpends(const uint256& wtxid, WalletBatch* batch = nullptr) EXCLUSIVE_LOCKS_REQUIRED(cs_wallet);
|
||||
|
||||
std::set<COutPoint> setWalletUTXO;
|
||||
mutable std::map<COutPoint, int> mapOutpointRoundsCache GUARDED_BY(cs_wallet);
|
||||
@ -1033,9 +1033,9 @@ public:
|
||||
std::vector<OutputGroup> GroupOutputs(const std::vector<COutput>& outputs, bool separate_coins, const CFeeRate& effective_feerate, const CFeeRate& long_term_feerate, const CoinEligibilityFilter& filter, bool positive_only) const;
|
||||
|
||||
bool IsLockedCoin(uint256 hash, unsigned int n) const EXCLUSIVE_LOCKS_REQUIRED(cs_wallet);
|
||||
void LockCoin(const COutPoint& output) EXCLUSIVE_LOCKS_REQUIRED(cs_wallet);
|
||||
void UnlockCoin(const COutPoint& output) EXCLUSIVE_LOCKS_REQUIRED(cs_wallet);
|
||||
void UnlockAllCoins() EXCLUSIVE_LOCKS_REQUIRED(cs_wallet);
|
||||
bool LockCoin(const COutPoint& output, WalletBatch* batch = nullptr) EXCLUSIVE_LOCKS_REQUIRED(cs_wallet);
|
||||
bool UnlockCoin(const COutPoint& output, WalletBatch* batch = nullptr) EXCLUSIVE_LOCKS_REQUIRED(cs_wallet);
|
||||
bool UnlockAllCoins() EXCLUSIVE_LOCKS_REQUIRED(cs_wallet);
|
||||
void ListLockedCoins(std::vector<COutPoint>& vOutpts) const EXCLUSIVE_LOCKS_REQUIRED(cs_wallet);
|
||||
void ListProTxCoins(std::vector<COutPoint>& vOutpts) const EXCLUSIVE_LOCKS_REQUIRED(cs_wallet);
|
||||
|
||||
|
@ -47,6 +47,7 @@ const std::string HDCHAIN{"hdchain"};
|
||||
const std::string HDPUBKEY{"hdpubkey"};
|
||||
const std::string KEYMETA{"keymeta"};
|
||||
const std::string KEY{"key"};
|
||||
const std::string LOCKED_UTXO{"lockedutxo"};
|
||||
const std::string MASTER_KEY{"mkey"};
|
||||
const std::string MINVERSION{"minversion"};
|
||||
const std::string NAME{"name"};
|
||||
@ -308,6 +309,16 @@ bool WalletBatch::WriteDescriptorCacheItems(const uint256& desc_id, const Descri
|
||||
return true;
|
||||
}
|
||||
|
||||
bool WalletBatch::WriteLockedUTXO(const COutPoint& output)
|
||||
{
|
||||
return WriteIC(std::make_pair(DBKeys::LOCKED_UTXO, std::make_pair(output.hash, output.n)), uint8_t{'1'});
|
||||
}
|
||||
|
||||
bool WalletBatch::EraseLockedUTXO(const COutPoint& output)
|
||||
{
|
||||
return EraseIC(std::make_pair(DBKeys::LOCKED_UTXO, std::make_pair(output.hash, output.n)));
|
||||
}
|
||||
|
||||
class CWalletScanState {
|
||||
public:
|
||||
unsigned int nKeys{0};
|
||||
@ -709,6 +720,12 @@ ReadKeyValue(CWallet* pwallet, CDataStream& ssKey, CDataStream& ssValue,
|
||||
|
||||
wss.m_descriptor_crypt_keys.insert(std::make_pair(std::make_pair(desc_id, pubkey.GetID()), std::make_pair(pubkey, privkey)));
|
||||
wss.fIsEncrypted = true;
|
||||
} else if (strType == DBKeys::LOCKED_UTXO) {
|
||||
uint256 hash;
|
||||
uint32_t n;
|
||||
ssKey >> hash;
|
||||
ssKey >> n;
|
||||
pwallet->LockCoin(COutPoint(hash, n));
|
||||
} else if (strType != DBKeys::BESTBLOCK && strType != DBKeys::BESTBLOCK_NOMERKLE &&
|
||||
strType != DBKeys::MINVERSION && strType != DBKeys::ACENTRY &&
|
||||
strType != DBKeys::VERSION && strType != DBKeys::SETTINGS &&
|
||||
|
@ -74,6 +74,7 @@ extern const std::string HDCHAIN;
|
||||
extern const std::string HDPUBKEY;
|
||||
extern const std::string KEY;
|
||||
extern const std::string KEYMETA;
|
||||
extern const std::string LOCKED_UTXO;
|
||||
extern const std::string MASTER_KEY;
|
||||
extern const std::string MINVERSION;
|
||||
extern const std::string NAME;
|
||||
@ -219,6 +220,9 @@ public:
|
||||
bool WriteDescriptorLastHardenedCache(const CExtPubKey& xpub, const uint256& desc_id, uint32_t key_exp_index);
|
||||
bool WriteDescriptorCacheItems(const uint256& desc_id, const DescriptorCache& cache);
|
||||
|
||||
bool WriteLockedUTXO(const COutPoint& output);
|
||||
bool EraseLockedUTXO(const COutPoint& output);
|
||||
|
||||
/// Write destination data key,value tuple to database
|
||||
bool WriteDestData(const std::string &address, const std::string &key, const std::string &value);
|
||||
/// Erase destination data tuple from wallet database
|
||||
|
@ -6,8 +6,11 @@
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
from test_framework.blocktools import COINBASE_MATURITY
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import assert_equal, assert_greater_than, assert_raises_rpc_error, create_confirmed_utxos, create_lots_of_big_transactions, gen_return_txouts
|
||||
from test_framework.util import assert_equal, assert_greater_than, assert_raises_rpc_error, gen_return_txouts
|
||||
from test_framework.wallet import MiniWallet
|
||||
|
||||
|
||||
class MempoolLimitTest(BitcoinTestFramework):
|
||||
def set_test_params(self):
|
||||
@ -20,55 +23,59 @@ class MempoolLimitTest(BitcoinTestFramework):
|
||||
]]
|
||||
self.supports_cli = False
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
def send_large_txs(self, node, miniwallet, txouts, fee_rate, tx_batch_size):
|
||||
for _ in range(tx_batch_size):
|
||||
tx = miniwallet.create_self_transfer(from_node=node, fee_rate=fee_rate)['tx']
|
||||
for txout in txouts:
|
||||
tx.vout.append(txout)
|
||||
miniwallet.sendrawtransaction(from_node=node, tx_hex=tx.serialize().hex())
|
||||
|
||||
def run_test(self):
|
||||
txouts = gen_return_txouts()
|
||||
relayfee = self.nodes[0].getnetworkinfo()['relayfee']
|
||||
node=self.nodes[0]
|
||||
miniwallet = MiniWallet(node)
|
||||
relayfee = node.getnetworkinfo()['relayfee']
|
||||
|
||||
self.log.info('Check that mempoolminfee is minrelytxfee')
|
||||
assert_equal(self.nodes[0].getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
|
||||
assert_equal(self.nodes[0].getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
|
||||
self.log.info('Check that mempoolminfee is minrelaytxfee')
|
||||
assert_equal(node.getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
|
||||
assert_equal(node.getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
|
||||
|
||||
txids = []
|
||||
utxos = create_confirmed_utxos(self, relayfee, self.nodes[0], 491)
|
||||
tx_batch_size = 25
|
||||
num_of_batches = 3
|
||||
# Generate UTXOs to flood the mempool
|
||||
# 1 to create a tx initially that will be evicted from the mempool later
|
||||
# 3 batches of multiple transactions with a fee rate much higher than the previous UTXO
|
||||
# And 1 more to verify that this tx does not get added to the mempool with a fee rate less than the mempoolminfee
|
||||
self.generate(miniwallet, 1 + (num_of_batches * tx_batch_size) + 1)
|
||||
|
||||
# Mine 99 blocks so that the UTXOs are allowed to be spent
|
||||
self.generate(node, COINBASE_MATURITY - 1)
|
||||
|
||||
self.log.info('Create a mempool tx that will be evicted')
|
||||
us0 = utxos.pop()
|
||||
inputs = [{ "txid" : us0["txid"], "vout" : us0["vout"]}]
|
||||
outputs = {self.nodes[0].getnewaddress() : 0.0001}
|
||||
tx = self.nodes[0].createrawtransaction(inputs, outputs)
|
||||
self.nodes[0].settxfee(relayfee) # specifically fund this tx with low fee
|
||||
txF = self.nodes[0].fundrawtransaction(tx)
|
||||
self.nodes[0].settxfee(0) # return to automatic fee selection
|
||||
txFS = self.nodes[0].signrawtransactionwithwallet(txF['hex'])
|
||||
txid = self.nodes[0].sendrawtransaction(txFS['hex'])
|
||||
tx_to_be_evicted_id = miniwallet.send_self_transfer(from_node=node, fee_rate=relayfee)["txid"]
|
||||
|
||||
relayfee = self.nodes[0].getnetworkinfo()['relayfee']
|
||||
base_fee = relayfee*100
|
||||
for i in range (3):
|
||||
txids.append([])
|
||||
txids[i] = create_lots_of_big_transactions(self.nodes[0], txouts, utxos[30*i:30*i+30], 30, (i+1)*base_fee)
|
||||
# Increase the tx fee rate massively to give the subsequent transactions a higher priority in the mempool
|
||||
base_fee = relayfee * 1000
|
||||
|
||||
self.log.info("Fill up the mempool with txs with higher fee rate")
|
||||
for batch_of_txid in range(num_of_batches):
|
||||
fee_rate=(batch_of_txid + 1) * base_fee
|
||||
self.send_large_txs(node, miniwallet, txouts, fee_rate, tx_batch_size)
|
||||
|
||||
self.log.info('The tx should be evicted by now')
|
||||
assert txid not in self.nodes[0].getrawmempool()
|
||||
txdata = self.nodes[0].gettransaction(txid)
|
||||
assert txdata['confirmations'] == 0 #confirmation should still be 0
|
||||
# The number of transactions created should be greater than the ones present in the mempool
|
||||
assert_greater_than(tx_batch_size * num_of_batches, len(node.getrawmempool()))
|
||||
# Initial tx created should not be present in the mempool anymore as it had a lower fee rate
|
||||
assert tx_to_be_evicted_id not in node.getrawmempool()
|
||||
|
||||
self.log.info('Check that mempoolminfee is larger than minrelytxfee')
|
||||
assert_equal(self.nodes[0].getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
|
||||
assert_greater_than(self.nodes[0].getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
|
||||
self.log.info('Check that mempoolminfee is larger than minrelaytxfee')
|
||||
assert_equal(node.getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
|
||||
assert_greater_than(node.getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
|
||||
|
||||
# Deliberately try to create a tx with a fee less than the minimum mempool fee to assert that it does not get added to the mempool
|
||||
self.log.info('Create a mempool tx that will not pass mempoolminfee')
|
||||
us0 = utxos.pop()
|
||||
inputs = [{ "txid" : us0["txid"], "vout" : us0["vout"]}]
|
||||
outputs = {self.nodes[0].getnewaddress() : 0.0001}
|
||||
tx = self.nodes[0].createrawtransaction(inputs, outputs)
|
||||
# specifically fund this tx with a fee < mempoolminfee, >= than minrelaytxfee
|
||||
txF = self.nodes[0].fundrawtransaction(tx, {'feeRate': relayfee})
|
||||
txFS = self.nodes[0].signrawtransactionwithwallet(txF['hex'])
|
||||
assert_raises_rpc_error(-26, "mempool min fee not met", self.nodes[0].sendrawtransaction, txFS['hex'])
|
||||
assert_raises_rpc_error(-26, "mempool min fee not met", miniwallet.send_self_transfer, from_node=node, fee_rate=relayfee, mempool_valid=False)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
MempoolLimitTest().main()
|
||||
|
@ -17,7 +17,7 @@ from test_framework.util import assert_equal
|
||||
class MempoolUpdateFromBlockTest(BitcoinTestFramework):
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
self.extra_args = [['-limitdescendantsize=1000', '-limitancestorsize=1000']]
|
||||
self.extra_args = [['-limitdescendantsize=1000', '-limitancestorsize=1000', '-limitancestorcount=100']]
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
@ -3,14 +3,26 @@
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
import random
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import assert_equal
|
||||
from test_framework.messages import (
|
||||
COIN,
|
||||
MAX_MONEY,
|
||||
uint256_to_string,
|
||||
)
|
||||
from test_framework.util import (
|
||||
assert_equal,
|
||||
assert_is_hex_string,
|
||||
assert_raises_rpc_error,
|
||||
)
|
||||
|
||||
'''
|
||||
rpc_coinjoin.py
|
||||
|
||||
Tests CoinJoin basic RPC
|
||||
'''
|
||||
# See coinjoin/options.h
|
||||
COINJOIN_ROUNDS_DEFAULT = 4
|
||||
COINJOIN_ROUNDS_MAX = 16
|
||||
COINJOIN_ROUNDS_MIN = 2
|
||||
COINJOIN_TARGET_MAX = int(MAX_MONEY / COIN)
|
||||
COINJOIN_TARGET_MIN = 2
|
||||
|
||||
class CoinJoinTest(BitcoinTestFramework):
|
||||
def set_test_params(self):
|
||||
@ -19,45 +31,128 @@ class CoinJoinTest(BitcoinTestFramework):
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def run_test(self):
|
||||
self.test_coinjoin_start_stop()
|
||||
self.test_coinjoin_setamount()
|
||||
self.test_coinjoin_setrounds()
|
||||
def setup_nodes(self):
|
||||
self.add_nodes(self.num_nodes)
|
||||
self.start_nodes()
|
||||
|
||||
def test_coinjoin_start_stop(self):
|
||||
# Start Mixing
|
||||
self.nodes[0].coinjoin("start")
|
||||
# Get CoinJoin info
|
||||
cj_info = self.nodes[0].getcoinjoininfo()
|
||||
# Ensure that it started properly
|
||||
def run_test(self):
|
||||
node = self.nodes[0]
|
||||
|
||||
node.createwallet(wallet_name='w1', blank=True, disable_private_keys=False)
|
||||
w1 = node.get_wallet_rpc('w1')
|
||||
self.test_salt_presence(w1)
|
||||
self.test_coinjoin_start_stop(w1)
|
||||
self.test_setcoinjoinamount(w1)
|
||||
self.test_setcoinjoinrounds(w1)
|
||||
self.test_coinjoinsalt(w1)
|
||||
w1.unloadwallet()
|
||||
|
||||
node.createwallet(wallet_name='w2', blank=True, disable_private_keys=True)
|
||||
w2 = node.get_wallet_rpc('w2')
|
||||
self.test_coinjoinsalt_disabled(w2)
|
||||
w2.unloadwallet()
|
||||
|
||||
def test_salt_presence(self, node):
|
||||
self.log.info('Salt should be automatically generated in new wallet')
|
||||
# Will raise exception if no salt generated
|
||||
assert_is_hex_string(node.coinjoinsalt('get'))
|
||||
|
||||
def test_coinjoin_start_stop(self, node):
|
||||
self.log.info('"coinjoin" subcommands should update mixing status')
|
||||
# Start mix session and ensure it's reported
|
||||
node.coinjoin('start')
|
||||
cj_info = node.getcoinjoininfo()
|
||||
assert_equal(cj_info['enabled'], True)
|
||||
assert_equal(cj_info['running'], True)
|
||||
# Repeated start should yield error
|
||||
assert_raises_rpc_error(-32603, 'Mixing has been started already.', node.coinjoin, 'start')
|
||||
|
||||
# Stop mixing
|
||||
self.nodes[0].coinjoin("stop")
|
||||
# Get CoinJoin info
|
||||
cj_info = self.nodes[0].getcoinjoininfo()
|
||||
# Ensure that it stopped properly
|
||||
# Stop mix session and ensure it's reported
|
||||
node.coinjoin('stop')
|
||||
cj_info = node.getcoinjoininfo()
|
||||
assert_equal(cj_info['enabled'], True)
|
||||
assert_equal(cj_info['running'], False)
|
||||
# Repeated stop should yield error
|
||||
assert_raises_rpc_error(-32603, 'No mix session to stop', node.coinjoin, 'stop')
|
||||
|
||||
def test_coinjoin_setamount(self):
|
||||
# Try normal values
|
||||
self.nodes[0].setcoinjoinamount(50)
|
||||
cj_info = self.nodes[0].getcoinjoininfo()
|
||||
assert_equal(cj_info['max_amount'], 50)
|
||||
# Reset mix session
|
||||
assert_equal(node.coinjoin('reset'), "Mixing was reset")
|
||||
|
||||
# Try large values
|
||||
self.nodes[0].setcoinjoinamount(1200000)
|
||||
cj_info = self.nodes[0].getcoinjoininfo()
|
||||
assert_equal(cj_info['max_amount'], 1200000)
|
||||
def test_setcoinjoinamount(self, node):
|
||||
self.log.info('"setcoinjoinamount" should update mixing target')
|
||||
# Test normal and large values
|
||||
for value in [COINJOIN_TARGET_MIN, 50, 1200000, COINJOIN_TARGET_MAX]:
|
||||
node.setcoinjoinamount(value)
|
||||
assert_equal(node.getcoinjoininfo()['max_amount'], value)
|
||||
# Test values below minimum and above maximum
|
||||
for value in [COINJOIN_TARGET_MIN - 1, COINJOIN_TARGET_MAX + 1]:
|
||||
assert_raises_rpc_error(-8, "Invalid amount of DASH as mixing goal amount", node.setcoinjoinamount, value)
|
||||
|
||||
def test_coinjoin_setrounds(self):
|
||||
# Try normal values
|
||||
self.nodes[0].setcoinjoinrounds(5)
|
||||
cj_info = self.nodes[0].getcoinjoininfo()
|
||||
assert_equal(cj_info['max_rounds'], 5)
|
||||
def test_setcoinjoinrounds(self, node):
|
||||
self.log.info('"setcoinjoinrounds" should update mixing rounds')
|
||||
# Test acceptable values
|
||||
for value in [COINJOIN_ROUNDS_MIN, COINJOIN_ROUNDS_DEFAULT, COINJOIN_ROUNDS_MAX]:
|
||||
node.setcoinjoinrounds(value)
|
||||
assert_equal(node.getcoinjoininfo()['max_rounds'], value)
|
||||
# Test values below minimum and above maximum
|
||||
for value in [COINJOIN_ROUNDS_MIN - 1, COINJOIN_ROUNDS_MAX + 1]:
|
||||
assert_raises_rpc_error(-8, "Invalid number of rounds", node.setcoinjoinrounds, value)
|
||||
|
||||
def test_coinjoinsalt(self, node):
|
||||
self.log.info('"coinjoinsalt generate" should fail if salt already present')
|
||||
assert_raises_rpc_error(-32600, 'Wallet "w1" already has set CoinJoin salt!', node.coinjoinsalt, 'generate')
|
||||
|
||||
self.log.info('"coinjoinsalt" subcommands should succeed if no balance and not mixing')
|
||||
# 'coinjoinsalt generate' should return a new salt if overwrite enabled
|
||||
s1 = node.coinjoinsalt('get')
|
||||
assert_equal(node.coinjoinsalt('generate', True), True)
|
||||
s2 = node.coinjoinsalt('get')
|
||||
assert s1 != s2
|
||||
|
||||
# 'coinjoinsalt get' should fetch newly generated value (i.e. new salt should persist)
|
||||
node.unloadwallet('w1')
|
||||
node.loadwallet('w1')
|
||||
node = self.nodes[0].get_wallet_rpc('w1')
|
||||
assert_equal(s2, node.coinjoinsalt('get'))
|
||||
|
||||
# 'coinjoinsalt set' should work with random hashes
|
||||
s1 = uint256_to_string(random.getrandbits(256))
|
||||
node.coinjoinsalt('set', s1)
|
||||
assert_equal(s1, node.coinjoinsalt('get'))
|
||||
assert s1 != s2
|
||||
|
||||
# 'coinjoinsalt set' shouldn't work with nonsense values
|
||||
s2 = format(0, '064x')
|
||||
assert_raises_rpc_error(-8, "Invalid CoinJoin salt value", node.coinjoinsalt, 'set', s2, True)
|
||||
s2 = s2[0:63] + 'h'
|
||||
assert_raises_rpc_error(-8, "salt must be hexadecimal string (not '%s')" % s2, node.coinjoinsalt, 'set', s2, True)
|
||||
|
||||
self.log.info('"coinjoinsalt generate" and "coinjoinsalt set" should fail if mixing')
|
||||
# Start mix session
|
||||
node.coinjoin('start')
|
||||
assert_equal(node.getcoinjoininfo()['running'], True)
|
||||
|
||||
# 'coinjoinsalt generate' and 'coinjoinsalt set' should fail when mixing
|
||||
assert_raises_rpc_error(-4, 'Wallet "w1" is currently mixing, cannot change salt!', node.coinjoinsalt, 'generate', True)
|
||||
assert_raises_rpc_error(-4, 'Wallet "w1" is currently mixing, cannot change salt!', node.coinjoinsalt, 'set', s1, True)
|
||||
|
||||
# 'coinjoinsalt get' should still work
|
||||
assert_equal(node.coinjoinsalt('get'), s1)
|
||||
|
||||
# Stop mix session
|
||||
node.coinjoin('stop')
|
||||
assert_equal(node.getcoinjoininfo()['running'], False)
|
||||
|
||||
# 'coinjoinsalt generate' and 'coinjoinsalt set' should start working again
|
||||
assert_equal(node.coinjoinsalt('generate', True), True)
|
||||
assert_equal(node.coinjoinsalt('set', s1, True), True)
|
||||
|
||||
def test_coinjoinsalt_disabled(self, node):
|
||||
self.log.info('"coinjoinsalt" subcommands should fail if private keys disabled')
|
||||
for subcommand in ['generate', 'get']:
|
||||
assert_raises_rpc_error(-32600, 'Wallet "w2" has private keys disabled, cannot perform CoinJoin!', node.coinjoinsalt, subcommand)
|
||||
s1 = uint256_to_string(random.getrandbits(256))
|
||||
assert_raises_rpc_error(-32600, 'Wallet "w2" has private keys disabled, cannot perform CoinJoin!', node.coinjoinsalt, 'set', s1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
CoinJoinTest().main()
|
||||
|
@ -22,9 +22,6 @@ class InvalidAddressErrorMessageTest(BitcoinTestFramework):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 1
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def test_validateaddress(self):
|
||||
node = self.nodes[0]
|
||||
|
||||
@ -50,7 +47,10 @@ class InvalidAddressErrorMessageTest(BitcoinTestFramework):
|
||||
|
||||
def run_test(self):
|
||||
self.test_validateaddress()
|
||||
self.test_getaddressinfo()
|
||||
|
||||
if self.is_wallet_compiled():
|
||||
self.init_wallet(node=0)
|
||||
self.test_getaddressinfo()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -473,12 +473,12 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
||||
|
||||
def import_deterministic_coinbase_privkeys(self):
|
||||
for i in range(len(self.nodes)):
|
||||
self.init_wallet(i)
|
||||
self.init_wallet(node=i)
|
||||
|
||||
def init_wallet(self, i):
|
||||
wallet_name = self.default_wallet_name if self.wallet_names is None else self.wallet_names[i] if i < len(self.wallet_names) else False
|
||||
def init_wallet(self, *, node):
|
||||
wallet_name = self.default_wallet_name if self.wallet_names is None else self.wallet_names[node] if node < len(self.wallet_names) else False
|
||||
if wallet_name is not False:
|
||||
n = self.nodes[i]
|
||||
n = self.nodes[node]
|
||||
if wallet_name is not None:
|
||||
n.createwallet(wallet_name=wallet_name, descriptors=self.options.descriptors, load_on_startup=True)
|
||||
n.importprivkey(privkey=n.get_deterministic_priv_key().key, label='coinbase', rescan=True)
|
||||
|
@ -892,8 +892,6 @@ class RPCCoverage():
|
||||
# Consider RPC generate covered, because it is overloaded in
|
||||
# test_framework/test_node.py and not seen by the coverage check.
|
||||
covered_cmds = set({'generate'})
|
||||
# TODO: implement functional tests for coinjoinsalt
|
||||
covered_cmds.add('coinjoinsalt')
|
||||
# TODO: implement functional tests for voteraw
|
||||
covered_cmds.add('voteraw')
|
||||
# TODO: implement functional tests for getmerkleblocks
|
||||
|
@ -135,9 +135,9 @@ class WalletBackupTest(BitcoinTestFramework):
|
||||
assert os.path.exists(wallet_file)
|
||||
|
||||
def init_three(self):
|
||||
self.init_wallet(0)
|
||||
self.init_wallet(1)
|
||||
self.init_wallet(2)
|
||||
self.init_wallet(node=0)
|
||||
self.init_wallet(node=1)
|
||||
self.init_wallet(node=2)
|
||||
|
||||
def run_test(self):
|
||||
self.log.info("Generating initial blockchain")
|
||||
|
@ -129,13 +129,49 @@ class WalletTest(BitcoinTestFramework):
|
||||
# Exercise locking of unspent outputs
|
||||
unspent_0 = self.nodes[2].listunspent()[0]
|
||||
unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]}
|
||||
# Trying to unlock an output which isn't locked should error
|
||||
assert_raises_rpc_error(-8, "Invalid parameter, expected locked output", self.nodes[2].lockunspent, True, [unspent_0])
|
||||
|
||||
# Locking an already-locked output should error
|
||||
self.nodes[2].lockunspent(False, [unspent_0])
|
||||
assert_raises_rpc_error(-8, "Invalid parameter, output already locked", self.nodes[2].lockunspent, False, [unspent_0])
|
||||
assert_raises_rpc_error(-6, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 200)
|
||||
assert_equal([unspent_0], self.nodes[2].listlockunspent())
|
||||
self.nodes[2].lockunspent(True, [unspent_0])
|
||||
|
||||
# Restarting the node should clear the lock
|
||||
self.restart_node(2)
|
||||
self.nodes[2].lockunspent(False, [unspent_0])
|
||||
|
||||
# Unloading and reloating the wallet should clear the lock
|
||||
assert_equal(self.nodes[0].listwallets(), [self.default_wallet_name])
|
||||
self.nodes[2].unloadwallet(self.default_wallet_name)
|
||||
self.nodes[2].loadwallet(self.default_wallet_name)
|
||||
assert_equal(len(self.nodes[2].listlockunspent()), 0)
|
||||
|
||||
# Locking non-persistently, then re-locking persistently, is allowed
|
||||
self.nodes[2].lockunspent(False, [unspent_0])
|
||||
self.nodes[2].lockunspent(False, [unspent_0], True)
|
||||
|
||||
# Restarting the node with the lock written to the wallet should keep the lock
|
||||
self.restart_node(2)
|
||||
assert_raises_rpc_error(-8, "Invalid parameter, output already locked", self.nodes[2].lockunspent, False, [unspent_0])
|
||||
|
||||
# Unloading and reloading the wallet with a persistent lock should keep the lock
|
||||
self.nodes[2].unloadwallet(self.default_wallet_name)
|
||||
self.nodes[2].loadwallet(self.default_wallet_name)
|
||||
assert_raises_rpc_error(-8, "Invalid parameter, output already locked", self.nodes[2].lockunspent, False, [unspent_0])
|
||||
|
||||
# Locked outputs should not be used, even if they are the only available funds
|
||||
assert_raises_rpc_error(-6, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20)
|
||||
assert_equal([unspent_0], self.nodes[2].listlockunspent())
|
||||
|
||||
# Unlocking should remove the persistent lock
|
||||
self.nodes[2].lockunspent(True, [unspent_0])
|
||||
self.restart_node(2)
|
||||
assert_equal(len(self.nodes[2].listlockunspent()), 0)
|
||||
|
||||
# Reconnect node 2 after restarts
|
||||
self.connect_nodes(1, 2)
|
||||
self.connect_nodes(0, 2)
|
||||
|
||||
assert_raises_rpc_error(-8, "txid must be of length 64 (not 34, for '0000000000000000000000000000000000')",
|
||||
self.nodes[2].lockunspent, False,
|
||||
[{"txid": "0000000000000000000000000000000000", "vout": 0}])
|
||||
|
@ -23,7 +23,7 @@ class ListDescriptorsTest(BitcoinTestFramework):
|
||||
self.skip_if_no_sqlite()
|
||||
|
||||
# do not create any wallet by default
|
||||
def init_wallet(self, i):
|
||||
def init_wallet(self, *, node):
|
||||
return
|
||||
|
||||
def run_test(self):
|
||||
|
@ -239,5 +239,16 @@ class UpgradeWalletTest(BitcoinTestFramework):
|
||||
desc_wallet = self.nodes[0].get_wallet_rpc("desc_upgrade")
|
||||
self.test_upgradewallet(desc_wallet, previous_version=120200, expected_version=120200)
|
||||
|
||||
self.log.info("Checking that descriptor wallets without privkeys do nothing, successfully")
|
||||
self.nodes[0].createwallet(wallet_name="desc_upgrade_nopriv", descriptors=True, disable_private_keys=True)
|
||||
desc_wallet = self.nodes[0].get_wallet_rpc("desc_upgrade_nopriv")
|
||||
self.test_upgradewallet(desc_wallet, previous_version=120200, expected_version=120200)
|
||||
|
||||
if self.is_bdb_compiled():
|
||||
self.log.info("Upgrading a wallet with private keys disabled")
|
||||
self.nodes[0].createwallet(wallet_name="privkeys_disabled_upgrade", disable_private_keys=True, descriptors=False)
|
||||
disabled_wallet = self.nodes[0].get_wallet_rpc("privkeys_disabled_upgrade")
|
||||
self.test_upgradewallet(disabled_wallet, previous_version=120200, expected_version=120200)
|
||||
|
||||
if __name__ == '__main__':
|
||||
UpgradeWalletTest().main()
|
||||
|
@ -48,7 +48,6 @@ implicit-integer-sign-change:*/include/c++/
|
||||
implicit-integer-sign-change:*/new_allocator.h
|
||||
implicit-integer-sign-change:addrman.h
|
||||
implicit-integer-sign-change:bech32.cpp
|
||||
implicit-integer-sign-change:common/bloom.cpp
|
||||
implicit-integer-sign-change:chain.cpp
|
||||
implicit-integer-sign-change:chain.h
|
||||
implicit-integer-sign-change:compat/stdin.cpp
|
||||
|
@ -425,6 +425,30 @@
|
||||
"output_cmp": "txcreatedata2.json",
|
||||
"description": "Creates a new transaction with one input, one address output and one data (zero value) output (output in json)"
|
||||
},
|
||||
{ "exec": "./dash-tx",
|
||||
"args":
|
||||
["-create",
|
||||
"in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0:11aa"],
|
||||
"return_code": 1,
|
||||
"error_txt": "error: invalid TX sequence id '11aa'",
|
||||
"description": "Try to parse a sequence number outside the allowed range"
|
||||
},
|
||||
{ "exec": "./dash-tx",
|
||||
"args":
|
||||
["-create",
|
||||
"in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0:-1"],
|
||||
"return_code": 1,
|
||||
"error_txt": "error: invalid TX sequence id '-1'",
|
||||
"description": "Try to parse a sequence number outside the allowed range"
|
||||
},
|
||||
{ "exec": "./dash-tx",
|
||||
"args":
|
||||
["-create",
|
||||
"in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0:4294967296"],
|
||||
"return_code": 1,
|
||||
"error_txt": "error: invalid TX sequence id '4294967296'",
|
||||
"description": "Try to parse a sequence number outside the allowed range"
|
||||
},
|
||||
{ "exec": "./dash-tx",
|
||||
"args":
|
||||
["-create",
|
||||
@ -433,6 +457,14 @@
|
||||
"output_cmp": "txcreatedata_seq0.hex",
|
||||
"description": "Creates a new transaction with one input with sequence number and one address output"
|
||||
},
|
||||
{ "exec": "./dash-tx",
|
||||
"args":
|
||||
["-create",
|
||||
"in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0: 4294967293 ",
|
||||
"outaddr=0.18:XijDvbYpPmznwgpWD3DkdYNfGmRP2KoVSk"],
|
||||
"output_cmp": "txcreatedata_seq0.hex",
|
||||
"description": "Creates a new transaction with one input with sequence number (+whitespace) and one address output"
|
||||
},
|
||||
{ "exec": "./dash-tx",
|
||||
"args":
|
||||
["-json",
|
||||
@ -457,15 +489,27 @@
|
||||
"output_cmp": "txcreatedata_seq1.json",
|
||||
"description": "Adds a new input with sequence number to a transaction (output in json)"
|
||||
},
|
||||
{ "exec": "./dash-tx",
|
||||
"args": ["-create", "outmultisig=1:-2:3:02a5:021:02df", "nversion=1"],
|
||||
"return_code": 1,
|
||||
"error_txt": "error: invalid multisig required number '-2'",
|
||||
"description": "Try to parse a multisig number outside the allowed range"
|
||||
},
|
||||
{ "exec": "./dash-tx",
|
||||
"args": ["-create", "outmultisig=1:2:3a:02a5:021:02df", "nversion=1"],
|
||||
"return_code": 1,
|
||||
"error_txt": "error: invalid multisig total number '3a'",
|
||||
"description": "Try to parse a multisig number outside the allowed range"
|
||||
},
|
||||
{ "exec": "./dash-tx",
|
||||
"args": ["-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485", "nversion=1"],
|
||||
"output_cmp": "txcreatemultisig1.hex",
|
||||
"description": "Creates a new transaction with a single 2-of-3 multisig output"
|
||||
},
|
||||
{ "exec": "./dash-tx",
|
||||
"args": ["-json", "-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485", "nversion=1"],
|
||||
"args": ["-json", "-create", "outmultisig=1: 2: 3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485", "nversion=1"],
|
||||
"output_cmp": "txcreatemultisig1.json",
|
||||
"description": "Creates a new transaction with a single 2-of-3 multisig output (output in json)"
|
||||
"description": "Creates a new transaction with a single 2-of-3 multisig output (with whitespace, output in json)"
|
||||
},
|
||||
{ "exec": "./dash-tx",
|
||||
"args": ["-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485:S", "nversion=1"],
|
||||
|
Loading…
Reference in New Issue
Block a user