mirror of
https://github.com/dashpay/dash.git
synced 2024-12-24 19:42:46 +01:00
Merge pull request #4437 from PastaPastaPasta/backport-sept-17
Backport 14264, 14324, 14583, 14426, 13687
This commit is contained in:
commit
d2c975dbd7
@ -288,7 +288,7 @@ after_success:
|
||||
- stage: test
|
||||
env: >-
|
||||
HOST=x86_64-unknown-linux-gnu
|
||||
PACKAGES="python3"
|
||||
PACKAGES="python3-zmq"
|
||||
DEP_OPTS="NO_WALLET=1"
|
||||
GOAL="install"
|
||||
BITCOIN_CONFIG="--enable-glibc-back-compat --enable-reduce-exports"
|
||||
|
@ -29,29 +29,38 @@ Common `host-platform-triplets` for cross compilation are:
|
||||
|
||||
No other options are needed, the paths are automatically configured.
|
||||
|
||||
Install the required dependencies: Ubuntu & Debian
|
||||
--------------------------------------------------
|
||||
### Install the required dependencies: Ubuntu & Debian
|
||||
|
||||
For macOS cross compilation:
|
||||
#### For macOS cross compilation
|
||||
|
||||
sudo apt-get install curl librsvg2-bin libtiff-tools bsdmainutils imagemagick libcap-dev libz-dev libbz2-dev python3-setuptools libtinfo5
|
||||
|
||||
For Win64 cross compilation:
|
||||
#### For Win64 cross compilation
|
||||
|
||||
- see [build-windows.md](../doc/build-windows.md#cross-compilation-for-ubuntu-and-windows-subsystem-for-linux)
|
||||
|
||||
For linux (including i386, ARM) cross compilation:
|
||||
#### For linux (including i386, ARM) cross compilation
|
||||
|
||||
sudo apt-get install curl g++-aarch64-linux-gnu g++-4.8-aarch64-linux-gnu gcc-4.8-aarch64-linux-gnu binutils-aarch64-linux-gnu g++-arm-linux-gnueabihf g++-4.8-arm-linux-gnueabihf gcc-4.8-arm-linux-gnueabihf binutils-arm-linux-gnueabihf g++-4.8-multilib gcc-4.8-multilib binutils-gold bsdmainutils
|
||||
Common linux dependencies:
|
||||
|
||||
sudo apt-get install make automake cmake curl g++-multilib libtool binutils-gold bsdmainutils pkg-config python3
|
||||
|
||||
For linux ARM cross compilation:
|
||||
|
||||
sudo apt-get install g++-arm-linux-gnueabihf binutils-arm-linux-gnueabihf
|
||||
|
||||
For linux AARCH64 cross compilation:
|
||||
|
||||
sudo apt-get install g++-aarch64-linux-gnu binutils-aarch64-linux-gnu
|
||||
|
||||
For linux RISC-V 64-bit cross compilation (there are no packages for 32-bit):
|
||||
|
||||
sudo apt-get install curl g++-riscv64-linux-gnu binutils-riscv64-linux-gnu
|
||||
sudo apt-get install g++-riscv64-linux-gnu binutils-riscv64-linux-gnu
|
||||
|
||||
RISC-V known issue: gcc-7.3.0 and gcc-7.3.1 result in a broken `test_dash` executable (see https://github.com/bitcoin/bitcoin/pull/13543),
|
||||
this is apparently fixed in gcc-8.1.0.
|
||||
|
||||
Dependency Options:
|
||||
### Dependency Options
|
||||
The following can be set when running make: `make FOO=bar`
|
||||
|
||||
<dl>
|
||||
@ -83,7 +92,7 @@ from llvm.org. Clang 8 or later is required.</dd>
|
||||
If some packages are not built, for example `make NO_WALLET=1`, the appropriate
|
||||
options will be passed to Dash Core's configure. In this case, `--disable-wallet`.
|
||||
|
||||
Additional targets:
|
||||
### Additional targets
|
||||
|
||||
download: run 'make download' to fetch all sources without building them
|
||||
download-osx: run 'make download-osx' to fetch all sources needed for macOS builds
|
||||
|
@ -20,7 +20,7 @@ Base build dependencies
|
||||
brew install automake libtool pkg-config
|
||||
```
|
||||
|
||||
If you want to build the disk image with `make deploy` (.dmg / optional), you need RSVG
|
||||
If you want to build the disk image with `make deploy` (.dmg / optional), you need RSVG:
|
||||
```bash
|
||||
brew install librsvg
|
||||
```
|
||||
@ -51,13 +51,13 @@ Running
|
||||
|
||||
Dash Core is now available at `./src/dashd`
|
||||
|
||||
Before running, you may create an empty configuration file.
|
||||
Before running, you may create an empty configuration file:
|
||||
|
||||
touch "/Users/${USER}/Library/Application Support/DashCore/dash.conf"
|
||||
|
||||
chmod 600 "/Users/${USER}/Library/Application Support/DashCore/dash.conf"
|
||||
|
||||
The first time you run dashd, it will start downloading the blockchain. This process could take several hours.
|
||||
The first time you run dashd, it will start downloading the blockchain. This process could take many hours, or even days on slower than average systems.
|
||||
|
||||
You can monitor the download process by looking at the debug.log file:
|
||||
|
||||
|
@ -57,9 +57,7 @@ Hardening Flags:
|
||||
|
||||
|
||||
Hardening enables the following features:
|
||||
|
||||
* Position Independent Executable
|
||||
Build position independent code to take advantage of Address Space Layout Randomization
|
||||
* _Position Independent Executable_: Build position independent code to take advantage of Address Space Layout Randomization
|
||||
offered by some kernels. Attackers who can cause execution of code at an arbitrary memory
|
||||
location are thwarted if they don't know where anything useful is located.
|
||||
The stack and heap are randomly located by default, but this allows the code section to be
|
||||
@ -77,8 +75,7 @@ Hardening enables the following features:
|
||||
TYPE
|
||||
ET_DYN
|
||||
|
||||
* Non-executable Stack
|
||||
If the stack is executable then trivial stack-based buffer overflow exploits are possible if
|
||||
* _Non-executable Stack_: If the stack is executable then trivial stack-based buffer overflow exploits are possible if
|
||||
vulnerable buffers are found. By default, Dash Core should be built with a non-executable stack,
|
||||
but if one of the libraries it uses asks for an executable stack or someone makes a mistake
|
||||
and uses a compiler extension which requires an executable stack, it will silently build an
|
||||
|
@ -6,6 +6,7 @@
|
||||
#include <sys/file.h>
|
||||
#include <sys/utsname.h>
|
||||
#else
|
||||
#define NOMINMAX
|
||||
#include <codecvt>
|
||||
#include <windows.h>
|
||||
#endif
|
||||
@ -110,7 +111,7 @@ bool FileLock::TryLock()
|
||||
return false;
|
||||
}
|
||||
_OVERLAPPED overlapped = {0};
|
||||
if (!LockFileEx(hFile, LOCKFILE_EXCLUSIVE_LOCK | LOCKFILE_FAIL_IMMEDIATELY, 0, 0, 0, &overlapped)) {
|
||||
if (!LockFileEx(hFile, LOCKFILE_EXCLUSIVE_LOCK | LOCKFILE_FAIL_IMMEDIATELY, 0, std::numeric_limits<DWORD>::max(), std::numeric_limits<DWORD>::max(), &overlapped)) {
|
||||
reason = GetErrorReason();
|
||||
return false;
|
||||
}
|
||||
|
@ -14,9 +14,6 @@ class ConfArgsTest(BitcoinTestFramework):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 1
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def test_config_file_parser(self):
|
||||
# Assume node is stopped
|
||||
|
||||
@ -95,19 +92,27 @@ class ConfArgsTest(BitcoinTestFramework):
|
||||
with open(conf_file, 'w', encoding='utf8') as f:
|
||||
f.write("datadir=" + new_data_dir + "\n")
|
||||
f.write(conf_file_contents)
|
||||
self.nodes[0].assert_start_raises_init_error(['-conf=' + conf_file], 'Error reading configuration file: specified data directory "' + new_data_dir + '" does not exist.')
|
||||
|
||||
# Temporarily disabled, because this test would access the user's home dir (~/.bitcoin)
|
||||
#self.nodes[0].assert_start_raises_init_error(['-conf=' + conf_file], 'Error reading configuration file: specified data directory "' + new_data_dir + '" does not exist.')
|
||||
|
||||
# Create the directory and ensure the config file now works
|
||||
os.mkdir(new_data_dir)
|
||||
self.start_node(0, ['-conf='+conf_file, '-wallet=w1'])
|
||||
self.stop_node(0)
|
||||
assert os.path.exists(os.path.join(new_data_dir, self.chain, 'wallets', 'w1'))
|
||||
# Temporarily disabled, because this test would access the user's home dir (~/.bitcoin)
|
||||
#self.start_node(0, ['-conf='+conf_file, '-wallet=w1'])
|
||||
#self.stop_node(0)
|
||||
#assert os.path.exists(os.path.join(new_data_dir, self.chain, 'blocks'))
|
||||
#if self.is_wallet_compiled():
|
||||
#assert os.path.exists(os.path.join(new_data_dir, self.chain, 'wallets', 'w1'))
|
||||
|
||||
# Ensure command line argument overrides datadir in conf
|
||||
os.mkdir(new_data_dir_2)
|
||||
self.nodes[0].datadir = new_data_dir_2
|
||||
self.start_node(0, ['-datadir='+new_data_dir_2, '-conf='+conf_file, '-wallet=w2'])
|
||||
assert os.path.exists(os.path.join(new_data_dir_2, self.chain, 'wallets', 'w2'))
|
||||
assert os.path.exists(os.path.join(new_data_dir_2, self.chain, 'blocks'))
|
||||
if self.is_wallet_compiled():
|
||||
assert os.path.exists(os.path.join(new_data_dir_2, self.chain, 'wallets', 'w2'))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
ConfArgsTest().main()
|
||||
|
36
test/functional/feature_filelock.py
Executable file
36
test/functional/feature_filelock.py
Executable file
@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2018 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
"""Check that it's not possible to start a second bitcoind instance using the same datadir or wallet."""
|
||||
import os
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.test_node import ErrorMatch
|
||||
|
||||
class FilelockTest(BitcoinTestFramework):
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 2
|
||||
|
||||
def setup_network(self):
|
||||
self.add_nodes(self.num_nodes, extra_args=None)
|
||||
self.nodes[0].start([])
|
||||
self.nodes[0].wait_for_rpc_connection()
|
||||
|
||||
def run_test(self):
|
||||
datadir = os.path.join(self.nodes[0].datadir, 'regtest')
|
||||
self.log.info("Using datadir {}".format(datadir))
|
||||
|
||||
self.log.info("Check that we can't start a second dashd instance using the same datadir")
|
||||
expected_msg = "Error: Cannot obtain a lock on data directory {}. Dash Core is probably already running.".format(datadir)
|
||||
self.nodes[1].assert_start_raises_init_error(extra_args=['-datadir={}'.format(self.nodes[0].datadir), '-noserver'], expected_msg=expected_msg)
|
||||
|
||||
if self.is_wallet_compiled():
|
||||
wallet_dir = os.path.join(datadir, 'wallets')
|
||||
self.log.info("Check that we can't start a second dashd instance using the same wallet")
|
||||
expected_msg = "Error: Error initializing wallet database environment"
|
||||
self.nodes[1].assert_start_raises_init_error(extra_args=['-walletdir={}'.format(wallet_dir), '-noserver'], expected_msg=expected_msg, match=ErrorMatch.PARTIAL_REGEX)
|
||||
|
||||
if __name__ == '__main__':
|
||||
FilelockTest().main()
|
@ -5,17 +5,16 @@
|
||||
"""Test the -alertnotify, -blocknotify and -walletnotify options."""
|
||||
import os
|
||||
|
||||
from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import assert_equal, wait_until, connect_nodes_bi
|
||||
|
||||
|
||||
class NotificationsTest(BitcoinTestFramework):
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
self.setup_clean_chain = True
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def setup_network(self):
|
||||
self.alertnotify_dir = os.path.join(self.options.tmpdir, "alertnotify")
|
||||
self.blocknotify_dir = os.path.join(self.options.tmpdir, "blocknotify")
|
||||
@ -25,7 +24,7 @@ class NotificationsTest(BitcoinTestFramework):
|
||||
os.mkdir(self.walletnotify_dir)
|
||||
|
||||
# -alertnotify and -blocknotify on node0, walletnotify on node1
|
||||
self.extra_args = [["-blockversion=2",
|
||||
self.extra_args = [[
|
||||
"-alertnotify=echo > {}".format(os.path.join(self.alertnotify_dir, '%s')),
|
||||
"-blocknotify=echo > {}".format(os.path.join(self.blocknotify_dir, '%s'))],
|
||||
["-blockversion=211",
|
||||
@ -36,7 +35,7 @@ class NotificationsTest(BitcoinTestFramework):
|
||||
def run_test(self):
|
||||
self.log.info("test -blocknotify")
|
||||
block_count = 10
|
||||
blocks = self.nodes[1].generate(block_count)
|
||||
blocks = self.nodes[1].generatetoaddress(block_count, self.nodes[1].getnewaddress() if self.is_wallet_compiled() else ADDRESS_BCRT1_UNSPENDABLE)
|
||||
|
||||
# wait at most 10 seconds for expected number of files before reading the content
|
||||
wait_until(lambda: len(os.listdir(self.blocknotify_dir)) == block_count, timeout=10)
|
||||
@ -44,27 +43,29 @@ class NotificationsTest(BitcoinTestFramework):
|
||||
# directory content should equal the generated blocks hashes
|
||||
assert_equal(sorted(blocks), sorted(os.listdir(self.blocknotify_dir)))
|
||||
|
||||
self.log.info("test -walletnotify")
|
||||
# wait at most 10 seconds for expected number of files before reading the content
|
||||
wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
|
||||
if self.is_wallet_compiled():
|
||||
self.log.info("test -walletnotify")
|
||||
# wait at most 10 seconds for expected number of files before reading the content
|
||||
wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
|
||||
|
||||
# directory content should equal the generated transaction hashes
|
||||
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
|
||||
assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir)))
|
||||
self.stop_node(1)
|
||||
for tx_file in os.listdir(self.walletnotify_dir):
|
||||
os.remove(os.path.join(self.walletnotify_dir, tx_file))
|
||||
# directory content should equal the generated transaction hashes
|
||||
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
|
||||
assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir)))
|
||||
self.stop_node(1)
|
||||
|
||||
self.log.info("test -walletnotify after rescan")
|
||||
# restart node to rescan to force wallet notifications
|
||||
self.start_node(1)
|
||||
connect_nodes_bi(self.nodes, 0, 1)
|
||||
for tx_file in os.listdir(self.walletnotify_dir):
|
||||
os.remove(os.path.join(self.walletnotify_dir, tx_file))
|
||||
|
||||
wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
|
||||
self.log.info("test -walletnotify after rescan")
|
||||
# restart node to rescan to force wallet notifications
|
||||
self.start_node(1)
|
||||
connect_nodes_bi(self.nodes, 0, 1)
|
||||
|
||||
# directory content should equal the generated transaction hashes
|
||||
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
|
||||
assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir)))
|
||||
wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
|
||||
|
||||
# directory content should equal the generated transaction hashes
|
||||
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
|
||||
assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir)))
|
||||
|
||||
# TODO: add test for `-alertnotify` large fork notifications
|
||||
|
||||
|
@ -12,9 +12,6 @@ class TestBitcoinCli(BitcoinTestFramework):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 1
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def run_test(self):
|
||||
"""Main test logic"""
|
||||
|
||||
@ -22,9 +19,10 @@ class TestBitcoinCli(BitcoinTestFramework):
|
||||
assert "Dash Core RPC client version" in cli_response
|
||||
|
||||
self.log.info("Compare responses from getwalletinfo RPC and `dash-cli getwalletinfo`")
|
||||
cli_response = self.nodes[0].cli.getwalletinfo()
|
||||
rpc_response = self.nodes[0].getwalletinfo()
|
||||
assert_equal(cli_response, rpc_response)
|
||||
if self.is_wallet_compiled():
|
||||
cli_response = self.nodes[0].cli.getwalletinfo()
|
||||
rpc_response = self.nodes[0].getwalletinfo()
|
||||
assert_equal(cli_response, rpc_response)
|
||||
|
||||
self.log.info("Compare responses from getblockchaininfo RPC and `dash-cli getblockchaininfo`")
|
||||
cli_response = self.nodes[0].cli.getblockchaininfo()
|
||||
@ -52,27 +50,31 @@ class TestBitcoinCli(BitcoinTestFramework):
|
||||
|
||||
self.log.info("Compare responses from `dash-cli -getinfo` and the RPCs data is retrieved from.")
|
||||
cli_get_info = self.nodes[0].cli('-getinfo').send_cli()
|
||||
wallet_info = self.nodes[0].getwalletinfo()
|
||||
if self.is_wallet_compiled():
|
||||
wallet_info = self.nodes[0].getwalletinfo()
|
||||
network_info = self.nodes[0].getnetworkinfo()
|
||||
blockchain_info = self.nodes[0].getblockchaininfo()
|
||||
|
||||
assert_equal(cli_get_info['version'], network_info['version'])
|
||||
assert_equal(cli_get_info['protocolversion'], network_info['protocolversion'])
|
||||
assert_equal(cli_get_info['walletversion'], wallet_info['walletversion'])
|
||||
assert_equal(cli_get_info['balance'], wallet_info['balance'])
|
||||
assert_equal(cli_get_info['coinjoin_balance'], wallet_info['coinjoin_balance'])
|
||||
if self.is_wallet_compiled():
|
||||
assert_equal(cli_get_info['walletversion'], wallet_info['walletversion'])
|
||||
assert_equal(cli_get_info['balance'], wallet_info['balance'])
|
||||
assert_equal(cli_get_info['coinjoin_balance'], wallet_info['coinjoin_balance'])
|
||||
assert_equal(cli_get_info['blocks'], blockchain_info['blocks'])
|
||||
assert_equal(cli_get_info['timeoffset'], network_info['timeoffset'])
|
||||
assert_equal(cli_get_info['connections'], network_info['connections'])
|
||||
assert_equal(cli_get_info['proxy'], network_info['networks'][0]['proxy'])
|
||||
assert_equal(cli_get_info['difficulty'], blockchain_info['difficulty'])
|
||||
assert_equal(cli_get_info['testnet'], blockchain_info['chain'] == "test")
|
||||
assert_equal(cli_get_info['balance'], wallet_info['balance'])
|
||||
assert_equal(cli_get_info['keypoololdest'], wallet_info['keypoololdest'])
|
||||
assert_equal(cli_get_info['keypoolsize'], wallet_info['keypoolsize'])
|
||||
assert_equal(cli_get_info['paytxfee'], wallet_info['paytxfee'])
|
||||
assert_equal(cli_get_info['relayfee'], network_info['relayfee'])
|
||||
# unlocked_until is not tested because the wallet is not encrypted
|
||||
if self.is_wallet_compiled():
|
||||
assert_equal(cli_get_info['balance'], wallet_info['balance'])
|
||||
assert_equal(cli_get_info['keypoololdest'], wallet_info['keypoololdest'])
|
||||
assert_equal(cli_get_info['keypoolsize'], wallet_info['keypoolsize'])
|
||||
assert_equal(cli_get_info['paytxfee'], wallet_info['paytxfee'])
|
||||
assert_equal(cli_get_info['relayfee'], network_info['relayfee'])
|
||||
# unlocked_until is not tested because the wallet is not encrypted
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
TestBitcoinCli().main()
|
||||
|
@ -7,6 +7,7 @@ import struct
|
||||
|
||||
from codecs import encode
|
||||
|
||||
from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.messages import dashhash
|
||||
from test_framework.util import (
|
||||
@ -46,7 +47,6 @@ class ZMQTest (BitcoinTestFramework):
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_py3_zmq()
|
||||
self.skip_if_no_bitcoind_zmq()
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def setup_nodes(self):
|
||||
import zmq
|
||||
@ -85,7 +85,7 @@ class ZMQTest (BitcoinTestFramework):
|
||||
def _zmq_test(self):
|
||||
num_blocks = 5
|
||||
self.log.info("Generate %(n)d blocks (and %(n)d coinbase txes)" % {"n": num_blocks})
|
||||
genhashes = self.nodes[0].generate(num_blocks)
|
||||
genhashes = self.nodes[0].generatetoaddress(num_blocks, ADDRESS_BCRT1_UNSPENDABLE)
|
||||
self.sync_all()
|
||||
|
||||
for x in range(num_blocks):
|
||||
@ -106,17 +106,19 @@ class ZMQTest (BitcoinTestFramework):
|
||||
block = self.rawblock.receive()
|
||||
assert_equal(genhashes[x], dashhash_helper(block[:80]))
|
||||
|
||||
self.log.info("Wait for tx from second node")
|
||||
payment_txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
|
||||
self.sync_all()
|
||||
if self.is_wallet_compiled():
|
||||
self.log.info("Wait for tx from second node")
|
||||
payment_txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
|
||||
self.sync_all()
|
||||
|
||||
# Should receive the broadcasted txid.
|
||||
txid = self.hashtx.receive()
|
||||
assert_equal(payment_txid, txid.hex())
|
||||
# Should receive the broadcasted txid.
|
||||
txid = self.hashtx.receive()
|
||||
assert_equal(payment_txid, txid.hex())
|
||||
|
||||
# Should receive the broadcasted raw transaction.
|
||||
hex = self.rawtx.receive()
|
||||
assert_equal(payment_txid, hash256(hex).hex())
|
||||
|
||||
# Should receive the broadcasted raw transaction.
|
||||
hex = self.rawtx.receive()
|
||||
assert_equal(payment_txid, hash256(hex).hex())
|
||||
|
||||
self.log.info("Test the getzmqnotifications RPC")
|
||||
assert_equal(self.nodes[0].getzmqnotifications(), [
|
||||
|
@ -12,8 +12,12 @@
|
||||
from .script import hash256, hash160, CScript
|
||||
from .util import hex_str_to_bytes
|
||||
|
||||
# Note unlike in bitcoin, this address isn't bech32 since we don't (at this time) support bech32.
|
||||
ADDRESS_BCRT1_UNSPENDABLE = 'yVg3NBUHNEhgDceqwVUjsZHreC5PBHnUo9'
|
||||
|
||||
chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
|
||||
|
||||
|
||||
def byte_to_base58(b, version):
|
||||
result = ''
|
||||
str = b.hex()
|
||||
|
@ -206,6 +206,7 @@ BASE_SCRIPTS = [
|
||||
'rpc_platform_filter.py',
|
||||
'feature_dip0020_activation.py',
|
||||
'feature_uacomment.py',
|
||||
'feature_filelock.py',
|
||||
'p2p_unrequested_blocks.py',
|
||||
'p2p_blockfilters.py',
|
||||
'feature_asmap.py',
|
||||
|
Loading…
Reference in New Issue
Block a user