Merge remote-tracking branch 'bitcoin/0.12' into mergebtc12, fix/dashify
NOTE: .travis.yml was mostly preserved, probably needs additional attention
This commit is contained in:
commit
6af7a58d92
@ -7,8 +7,7 @@
|
||||
# IPv6 support
|
||||
|
||||
sudo: false
|
||||
dist: precise
|
||||
group: legacy
|
||||
dist: trusty
|
||||
|
||||
os: linux
|
||||
language: cpp
|
||||
@ -93,6 +92,7 @@ matrix:
|
||||
- compiler: gcc
|
||||
before_install:
|
||||
- git clone https://github.com/dashpay/dash_hash
|
||||
- export PATH=$(echo $PATH | tr ':' "\n" | sed '/\/opt\/python/d' | tr "\n" ":" | sed "s|::|:|g")
|
||||
install:
|
||||
- cd dash_hash && python setup.py install --user && cd ..
|
||||
- if [ "$PYZMQ" = "true" ]; then pip install pyzmq --user ; fi
|
||||
@ -107,7 +107,6 @@ script:
|
||||
- OUTDIR=$BASE_OUTDIR/$TRAVIS_PULL_REQUEST/$TRAVIS_JOB_NUMBER-$HOST
|
||||
- BITCOIN_CONFIG_ALL="--disable-dependency-tracking --prefix=$TRAVIS_BUILD_DIR/depends/$HOST --bindir=$OUTDIR/bin --libdir=$OUTDIR/lib"
|
||||
- depends/$HOST/native/bin/ccache --max-size=$CCACHE_SIZE
|
||||
- if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then export CCACHE_READONLY=1; fi
|
||||
- test -n "$USE_SHELL" && eval '"$USE_SHELL" -c "./autogen.sh"' || ./autogen.sh
|
||||
- ./configure --cache-file=config.cache $BITCOIN_CONFIG_ALL $BITCOIN_CONFIG || ( cat config.log && false)
|
||||
- make distdir PACKAGE=dash VERSION=$HOST
|
||||
|
@ -50,10 +50,10 @@ submit new unit tests for old code. Unit tests can be compiled and run
|
||||
|
||||
There are also [regression and integration tests](/qa) of the RPC interface, written
|
||||
in Python, that are run automatically on the build server.
|
||||
These tests can be run with: `qa/pull-tester/rpc-tests.py`
|
||||
These tests can be run (if the [test dependencies](/qa) are installed) with: `qa/pull-tester/rpc-tests.py`
|
||||
|
||||
The Travis CI system makes sure that every pull request is built for Windows
|
||||
and Linux, OSX, and that unit and sanity tests are automatically run.
|
||||
and Linux, OS X, and that unit and sanity tests are automatically run.
|
||||
|
||||
### Manual Quality Assurance (QA) Testing
|
||||
|
||||
|
@ -384,7 +384,7 @@ AC_DEFUN([_BITCOIN_QT_FIND_LIBS_WITH_PKGCONFIG],[
|
||||
|
||||
dnl qt version is set to 'auto' and the preferred version wasn't found. Now try the other.
|
||||
if test x$have_qt = xno && test x$bitcoin_qt_want_version = xauto; then
|
||||
if test x$auto_priority_version = x$qt5; then
|
||||
if test x$auto_priority_version = xqt5; then
|
||||
PKG_CHECK_MODULES([QT], [$qt4_modules], [QT_INCLUDES="$QT_CFLAGS"; have_qt=yes; QT_LIB_PREFIX=Qt; bitcoin_qt_got_major_vers=4], [have_qt=no])
|
||||
else
|
||||
PKG_CHECK_MODULES([QT], [$qt5_modules], [QT_INCLUDES="$QT_CFLAGS"; have_qt=yes; QT_LIB_PREFIX=Qt5; bitcoin_qt_got_major_vers=5], [have_qt=no])
|
||||
|
21
configure.ac
21
configure.ac
@ -252,7 +252,7 @@ case $host in
|
||||
fi
|
||||
|
||||
CPPFLAGS="$CPPFLAGS -D_MT -DWIN32 -D_WINDOWS -DBOOST_THREAD_USE_LIB"
|
||||
LEVELDB_TARGET_FLAGS="TARGET_OS=OS_WINDOWS_CROSSCOMPILE"
|
||||
LEVELDB_TARGET_FLAGS="-DOS_WINDOWS"
|
||||
if test "x$CXXFLAGS_overridden" = "xno"; then
|
||||
CXXFLAGS="$CXXFLAGS -w"
|
||||
fi
|
||||
@ -274,7 +274,7 @@ case $host in
|
||||
;;
|
||||
*darwin*)
|
||||
TARGET_OS=darwin
|
||||
LEVELDB_TARGET_FLAGS="TARGET_OS=Darwin"
|
||||
LEVELDB_TARGET_FLAGS="-DOS_MACOSX"
|
||||
if test x$cross_compiling != xyes; then
|
||||
BUILD_OS=darwin
|
||||
AC_CHECK_PROG([PORT],port, port)
|
||||
@ -337,8 +337,11 @@ case $host in
|
||||
;;
|
||||
*linux*)
|
||||
TARGET_OS=linux
|
||||
LEVELDB_TARGET_FLAGS="-DOS_LINUX"
|
||||
;;
|
||||
*)
|
||||
OTHER_OS=`echo ${host_os} | awk '{print toupper($0)}'`
|
||||
LEVELDB_TARGET_FLAGS="-DOS_${OTHER_OS}"
|
||||
;;
|
||||
esac
|
||||
|
||||
@ -523,6 +526,18 @@ if test x$use_reduce_exports = xyes; then
|
||||
[AC_MSG_ERROR([Cannot set default symbol visibility. Use --disable-reduce-exports.])])
|
||||
fi
|
||||
|
||||
dnl This can go away when we require c++11
|
||||
TEMP_CXXFLAGS="$CXXFLAGS"
|
||||
CXXFLAGS="$CXXFLAGS -std=c++0x"
|
||||
AC_MSG_CHECKING(for c++11 atomics)
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
|
||||
#include <atomic>
|
||||
]],[[]])],
|
||||
[ AC_MSG_RESULT(yes); LEVELDB_ATOMIC_CPPFLAGS="-DLEVELDB_ATOMIC_PRESENT"; LEVELDB_ATOMIC_CXXFLAGS="-std=c++0x"],
|
||||
[ AC_MSG_RESULT(no)]
|
||||
)
|
||||
CXXFLAGS="$TEMP_CXXFLAGS"
|
||||
|
||||
LEVELDB_CPPFLAGS=
|
||||
LIBLEVELDB=
|
||||
LIBMEMENV=
|
||||
@ -940,6 +955,8 @@ AC_SUBST(TESTDEFS)
|
||||
AC_SUBST(LEVELDB_TARGET_FLAGS)
|
||||
AC_SUBST(MINIUPNPC_CPPFLAGS)
|
||||
AC_SUBST(MINIUPNPC_LIBS)
|
||||
AC_SUBST(LEVELDB_ATOMIC_CPPFLAGS)
|
||||
AC_SUBST(LEVELDB_ATOMIC_CXXFLAGS)
|
||||
AC_CONFIG_FILES([Makefile src/Makefile share/setup.nsi share/qt/Info.plist src/test/buildenv.py])
|
||||
AC_CONFIG_FILES([qa/pull-tester/run-bitcoind-for-test.sh],[chmod +x qa/pull-tester/run-bitcoind-for-test.sh])
|
||||
AC_CONFIG_FILES([qa/pull-tester/tests_config.py],[chmod +x qa/pull-tester/tests_config.py])
|
||||
|
@ -33,5 +33,5 @@ script: |
|
||||
|
||||
tar -xf ${UNSIGNED}
|
||||
./detached-sig-apply.sh ${UNSIGNED} signature/osx
|
||||
${WRAP_DIR}/genisoimage -no-cache-inodes -D -l -probe -V "Dash-Core" -no-pad -r -apple -o uncompressed.dmg signed-app
|
||||
${WRAP_DIR}/genisoimage -no-cache-inodes -D -l -probe -V "Dash-Core" -no-pad -r -dir-mode 0755 -apple -o uncompressed.dmg signed-app
|
||||
${WRAP_DIR}/dmg dmg uncompressed.dmg ${OUTDIR}/${SIGNED}
|
||||
|
@ -9,16 +9,10 @@ The following are some helpful notes on how to run Dash on your native platform.
|
||||
|
||||
### Unix
|
||||
|
||||
You need the Qt4 run-time libraries to run Dash-Qt. On Debian or Ubuntu:
|
||||
|
||||
sudo apt-get install libqtgui4
|
||||
|
||||
Unpack the files into a directory and run:
|
||||
|
||||
- bin/32/dash-qt (GUI, 32-bit) or bin/32/dashd (headless, 32-bit)
|
||||
- bin/64/dash-qt (GUI, 64-bit) or bin/64/dashd (headless, 64-bit)
|
||||
|
||||
|
||||
- `bin/bitcoin-qt` (GUI) or
|
||||
- `bin/bitcoind` (headless)
|
||||
|
||||
### Windows
|
||||
|
||||
|
@ -2,8 +2,6 @@ Dash Core version 0.12.1 is now available from:
|
||||
|
||||
<https://www.dash.org/downloads/>
|
||||
|
||||
This is a new minor version release, including the BIP9, BIP68 and BIP112
|
||||
softfork, various bugfixes and updated translations.
|
||||
|
||||
|
||||
|
||||
@ -28,109 +26,10 @@ earlier.
|
||||
Notable changes
|
||||
===============
|
||||
|
||||
First version bits BIP9 softfork deployment
|
||||
-------------------------------------------
|
||||
Example item
|
||||
---------------------------------------
|
||||
|
||||
This release includes a soft fork deployment to enforce [BIP68][],
|
||||
[BIP112][] and [BIP113][] using the [BIP9][] deployment mechanism.
|
||||
|
||||
The deployment sets the block version number to 0x20000001 between
|
||||
midnight 1st May 2016 and midnight 1st May 2017 to signal readiness for
|
||||
deployment. The version number consists of 0x20000000 to indicate version
|
||||
bits together with setting bit 0 to indicate support for this combined
|
||||
deployment, shown as "csv" in the `getblockchaininfo` RPC call.
|
||||
|
||||
For more information about the soft forking change, please see
|
||||
<https://github.com/bitcoin/bitcoin/pull/7648>
|
||||
|
||||
This specific backport pull-request can be viewed at
|
||||
<https://github.com/bitcoin/bitcoin/pull/7543>
|
||||
|
||||
[BIP9]: https://github.com/bitcoin/bips/blob/master/bip-0009.mediawiki
|
||||
[BIP68]: https://github.com/bitcoin/bips/blob/master/bip-0068.mediawiki
|
||||
[BIP112]: https://github.com/bitcoin/bips/blob/master/bip-0112.mediawiki
|
||||
[BIP113]: https://github.com/bitcoin/bips/blob/master/bip-0113.mediawiki
|
||||
|
||||
BIP68 soft fork to enforce sequence locks for relative locktime
|
||||
---------------------------------------------------------------
|
||||
|
||||
[BIP68][] introduces relative lock-time consensus-enforced semantics of
|
||||
the sequence number field to enable a signed transaction input to remain
|
||||
invalid for a defined period of time after confirmation of its corresponding
|
||||
outpoint.
|
||||
|
||||
For more information about the implementation, see
|
||||
<https://github.com/bitcoin/bitcoin/pull/7184>
|
||||
|
||||
BIP112 soft fork to enforce OP_CHECKSEQUENCEVERIFY
|
||||
--------------------------------------------------
|
||||
|
||||
[BIP112][] redefines the existing OP_NOP3 as OP_CHECKSEQUENCEVERIFY (CSV)
|
||||
for a new opcode in the Bitcoin scripting system that in combination with
|
||||
[BIP68][] allows execution pathways of a script to be restricted based
|
||||
on the age of the output being spent.
|
||||
|
||||
For more information about the implementation, see
|
||||
<https://github.com/bitcoin/bitcoin/pull/7524>
|
||||
|
||||
BIP113 locktime enforcement soft fork
|
||||
-------------------------------------
|
||||
|
||||
Bitcoin Core 0.11.2 previously introduced mempool-only locktime
|
||||
enforcement using GetMedianTimePast(). This release seeks to
|
||||
consensus enforce the rule.
|
||||
|
||||
Bitcoin transactions currently may specify a locktime indicating when
|
||||
they may be added to a valid block. Current consensus rules require
|
||||
that blocks have a block header time greater than the locktime specified
|
||||
in any transaction in that block.
|
||||
|
||||
Miners get to choose what time they use for their header time, with the
|
||||
consensus rule being that no node will accept a block whose time is more
|
||||
than two hours in the future. This creates a incentive for miners to
|
||||
set their header times to future values in order to include locktimed
|
||||
transactions which weren't supposed to be included for up to two more
|
||||
hours.
|
||||
|
||||
The consensus rules also specify that valid blocks may have a header
|
||||
time greater than that of the median of the 11 previous blocks. This
|
||||
GetMedianTimePast() time has a key feature we generally associate with
|
||||
time: it can't go backwards.
|
||||
|
||||
[BIP113][] specifies a soft fork enforced in this release that
|
||||
weakens this perverse incentive for individual miners to use a future
|
||||
time by requiring that valid blocks have a computed GetMedianTimePast()
|
||||
greater than the locktime specified in any transaction in that block.
|
||||
|
||||
Mempool inclusion rules currently require transactions to be valid for
|
||||
immediate inclusion in a block in order to be accepted into the mempool.
|
||||
This release begins applying the BIP113 rule to received transactions,
|
||||
so transaction whose time is greater than the GetMedianTimePast() will
|
||||
no longer be accepted into the mempool.
|
||||
|
||||
**Implication for miners:** you will begin rejecting transactions that
|
||||
would not be valid under BIP113, which will prevent you from producing
|
||||
invalid blocks when BIP113 is enforced on the network. Any
|
||||
transactions which are valid under the current rules but not yet valid
|
||||
under the BIP113 rules will either be mined by other miners or delayed
|
||||
until they are valid under BIP113. Note, however, that time-based
|
||||
locktime transactions are more or less unseen on the network currently.
|
||||
|
||||
**Implication for users:** GetMedianTimePast() always trails behind the
|
||||
current time, so a transaction locktime set to the present time will be
|
||||
rejected by nodes running this release until the median time moves
|
||||
forward. To compensate, subtract one hour (3,600 seconds) from your
|
||||
locktimes to allow those transactions to be included in mempools at
|
||||
approximately the expected time.
|
||||
|
||||
For more information about the implementation, see
|
||||
<https://github.com/bitcoin/bitcoin/pull/6566>
|
||||
|
||||
Miscellaneous
|
||||
-------------
|
||||
|
||||
The p2p alert system is off by default. To turn on, use `-alert` with
|
||||
startup configuration.
|
||||
Example text.
|
||||
|
||||
0.12.1 Change log
|
||||
=================
|
||||
@ -140,57 +39,48 @@ behavior, not code moves, refactors and string updates. For convenience in locat
|
||||
the code changes and accompanying discussion, both the pull request and
|
||||
git merge commit are mentioned.
|
||||
|
||||
### RPC and other APIs
|
||||
- #7739 `7ffc2bd` Add abandoned status to listtransactions (jonasschnelli)
|
||||
### RPC and REST
|
||||
|
||||
Asm script outputs replacements for OP_NOP2 and OP_NOP3
|
||||
-------------------------------------------------------
|
||||
|
||||
OP_NOP2 has been renamed to OP_CHECKLOCKTIMEVERIFY by [BIP
|
||||
65](https://github.com/bitcoin/bips/blob/master/bip-0065.mediawiki)
|
||||
|
||||
OP_NOP3 has been renamed to OP_CHECKSEQUENCEVERIFY by [BIP
|
||||
112](https://github.com/bitcoin/bips/blob/master/bip-0112.mediawiki)
|
||||
|
||||
The following outputs are affected by this change:
|
||||
- RPC `getrawtransaction` (in verbose mode)
|
||||
- RPC `decoderawtransaction`
|
||||
- RPC `decodescript`
|
||||
- REST `/rest/tx/` (JSON format)
|
||||
- REST `/rest/block/` (JSON format when including extended tx details)
|
||||
- `bitcoin-tx -json`
|
||||
|
||||
### Configuration and command-line options
|
||||
|
||||
### Block and transaction handling
|
||||
- #7543 `834aaef` Backport BIP9, BIP68 and BIP112 with softfork (btcdrak)
|
||||
|
||||
### P2P protocol and network code
|
||||
- #7804 `90f1d24` Track block download times per individual block (sipa)
|
||||
- #7832 `4c3a00d` Reduce block timeout to 10 minutes (laanwj)
|
||||
|
||||
### Validation
|
||||
- #7821 `4226aac` init: allow shutdown during 'Activating best chain...' (laanwj)
|
||||
- #7835 `46898e7` Version 2 transactions remain non-standard until CSV activates (sdaftuar)
|
||||
|
||||
### Build system
|
||||
- #7487 `00d57b4` Workaround Travis-side CI issues (luke-jr)
|
||||
- #7606 `a10da9a` No need to set -L and --location for curl (MarcoFalke)
|
||||
- #7614 `ca8f160` Add curl to packages (now needed for depends) (luke-jr)
|
||||
- #7776 `a784675` Remove unnecessary executables from gitian release (laanwj)
|
||||
|
||||
### Wallet
|
||||
- #7715 `19866c1` Fix calculation of balances and available coins. (morcos)
|
||||
|
||||
### GUI
|
||||
|
||||
### Tests and QA
|
||||
|
||||
### Miscellaneous
|
||||
- #7617 `f04f4fd` Fix markdown syntax and line terminate LogPrint (MarcoFalke)
|
||||
- #7747 `4d035bc` added depends cross compile info (accraze)
|
||||
- #7741 `a0cea89` Mark p2p alert system as deprecated (btcdrak)
|
||||
- #7780 `c5f94f6` Disable bad-chain alert (btcdrak)
|
||||
|
||||
Credits
|
||||
=======
|
||||
|
||||
Thanks to everyone who directly contributed to this release:
|
||||
|
||||
- accraze
|
||||
- Alex Morcos
|
||||
- BtcDrak
|
||||
- Jonas Schnelli
|
||||
- Luke Dashjr
|
||||
- MarcoFalke
|
||||
- Mark Friedenbach
|
||||
- NicolasDorier
|
||||
- Pieter Wuille
|
||||
- Suhas Daftuar
|
||||
- Wladimir J. van der Laan
|
||||
|
||||
As well as everyone that helped translating on [Transifex](https://www.transifex.com/projects/p/bitcoin/).
|
||||
|
||||
@ -213,3 +103,4 @@ These release are considered obsolete. Old changelogs can be found here:
|
||||
- [v0.11.0](release-notes/dash/release-notes-0.11.0.md) released Jan/15/2015
|
||||
- [v0.10.x](release-notes/dash/release-notes-0.10.0.md) released Sep/25/2014
|
||||
- [v0.9.x](release-notes/dash/release-notes-0.9.0.md) released Mar/13/2014
|
||||
|
||||
|
891
doc/release-notes/release-notes-0.12.0.md
Normal file
891
doc/release-notes/release-notes-0.12.0.md
Normal file
@ -0,0 +1,891 @@
|
||||
Bitcoin Core version 0.12.0 is now available from:
|
||||
|
||||
<https://bitcoin.org/bin/bitcoin-core-0.12.0/>
|
||||
|
||||
This is a new major version release, bringing new features and other improvements.
|
||||
|
||||
Please report bugs using the issue tracker at github:
|
||||
|
||||
<https://github.com/bitcoin/bitcoin/issues>
|
||||
|
||||
Upgrading and downgrading
|
||||
=========================
|
||||
|
||||
How to Upgrade
|
||||
--------------
|
||||
|
||||
If you are running an older version, shut it down. Wait until it has completely
|
||||
shut down (which might take a few minutes for older versions), then run the
|
||||
installer (on Windows) or just copy over /Applications/Bitcoin-Qt (on Mac) or
|
||||
bitcoind/bitcoin-qt (on Linux).
|
||||
|
||||
Downgrade warning
|
||||
-----------------
|
||||
|
||||
### Downgrade to a version < 0.10.0
|
||||
|
||||
Because release 0.10.0 and later makes use of headers-first synchronization and
|
||||
parallel block download (see further), the block files and databases are not
|
||||
backwards-compatible with pre-0.10 versions of Bitcoin Core or other software:
|
||||
|
||||
* Blocks will be stored on disk out of order (in the order they are
|
||||
received, really), which makes it incompatible with some tools or
|
||||
other programs. Reindexing using earlier versions will also not work
|
||||
anymore as a result of this.
|
||||
|
||||
* The block index database will now hold headers for which no block is
|
||||
stored on disk, which earlier versions won't support.
|
||||
|
||||
If you want to be able to downgrade smoothly, make a backup of your entire data
|
||||
directory. Without this your node will need start syncing (or importing from
|
||||
bootstrap.dat) anew afterwards. It is possible that the data from a completely
|
||||
synchronised 0.10 node may be usable in older versions as-is, but this is not
|
||||
supported and may break as soon as the older version attempts to reindex.
|
||||
|
||||
This does not affect wallet forward or backward compatibility.
|
||||
|
||||
### Downgrade to a version < 0.12.0
|
||||
|
||||
Because release 0.12.0 and later will obfuscate the chainstate on every
|
||||
fresh sync or reindex, the chainstate is not backwards-compatible with
|
||||
pre-0.12 versions of Bitcoin Core or other software.
|
||||
|
||||
If you want to downgrade after you have done a reindex with 0.12.0 or later,
|
||||
you will need to reindex when you first start Bitcoin Core version 0.11 or
|
||||
earlier.
|
||||
|
||||
Notable changes
|
||||
===============
|
||||
|
||||
Signature validation using libsecp256k1
|
||||
---------------------------------------
|
||||
|
||||
ECDSA signatures inside Bitcoin transactions now use validation using
|
||||
[libsecp256k1](https://github.com/bitcoin-core/secp256k1) instead of OpenSSL.
|
||||
|
||||
Depending on the platform, this means a significant speedup for raw signature
|
||||
validation speed. The advantage is largest on x86_64, where validation is over
|
||||
five times faster. In practice, this translates to a raw reindexing and new
|
||||
block validation times that are less than half of what it was before.
|
||||
|
||||
Libsecp256k1 has undergone very extensive testing and validation.
|
||||
|
||||
A side effect of this change is that libconsensus no longer depends on OpenSSL.
|
||||
|
||||
Reduce upload traffic
|
||||
---------------------
|
||||
|
||||
A major part of the outbound traffic is caused by serving historic blocks to
|
||||
other nodes in initial block download state.
|
||||
|
||||
It is now possible to reduce the total upload traffic via the `-maxuploadtarget`
|
||||
parameter. This is *not* a hard limit but a threshold to minimize the outbound
|
||||
traffic. When the limit is about to be reached, the uploaded data is cut by not
|
||||
serving historic blocks (blocks older than one week).
|
||||
Moreover, any SPV peer is disconnected when they request a filtered block.
|
||||
|
||||
This option can be specified in MiB per day and is turned off by default
|
||||
(`-maxuploadtarget=0`).
|
||||
The recommended minimum is 144 * MAX_BLOCK_SIZE (currently 144MB) per day.
|
||||
|
||||
Whitelisted peers will never be disconnected, although their traffic counts for
|
||||
calculating the target.
|
||||
|
||||
A more detailed documentation about keeping traffic low can be found in
|
||||
[/doc/reduce-traffic.md](/doc/reduce-traffic.md).
|
||||
|
||||
Direct headers announcement (BIP 130)
|
||||
-------------------------------------
|
||||
|
||||
Between compatible peers, [BIP 130]
|
||||
(https://github.com/bitcoin/bips/blob/master/bip-0130.mediawiki)
|
||||
direct headers announcement is used. This means that blocks are advertised by
|
||||
announcing their headers directly, instead of just announcing the hash. In a
|
||||
reorganization, all new headers are sent, instead of just the new tip. This
|
||||
can often prevent an extra roundtrip before the actual block is downloaded.
|
||||
|
||||
With this change, pruning nodes are now able to relay new blocks to compatible
|
||||
peers.
|
||||
|
||||
Memory pool limiting
|
||||
--------------------
|
||||
|
||||
Previous versions of Bitcoin Core had their mempool limited by checking
|
||||
a transaction's fees against the node's minimum relay fee. There was no
|
||||
upper bound on the size of the mempool and attackers could send a large
|
||||
number of transactions paying just slighly more than the default minimum
|
||||
relay fee to crash nodes with relatively low RAM. A temporary workaround
|
||||
for previous versions of Bitcoin Core was to raise the default minimum
|
||||
relay fee.
|
||||
|
||||
Bitcoin Core 0.12 will have a strict maximum size on the mempool. The
|
||||
default value is 300 MB and can be configured with the `-maxmempool`
|
||||
parameter. Whenever a transaction would cause the mempool to exceed
|
||||
its maximum size, the transaction that (along with in-mempool descendants) has
|
||||
the lowest total feerate (as a package) will be evicted and the node's effective
|
||||
minimum relay feerate will be increased to match this feerate plus the initial
|
||||
minimum relay feerate. The initial minimum relay feerate is set to
|
||||
1000 satoshis per kB.
|
||||
|
||||
Bitcoin Core 0.12 also introduces new default policy limits on the length and
|
||||
size of unconfirmed transaction chains that are allowed in the mempool
|
||||
(generally limiting the length of unconfirmed chains to 25 transactions, with a
|
||||
total size of 101 KB). These limits can be overriden using command line
|
||||
arguments; see the extended help (`--help -help-debug`) for more information.
|
||||
|
||||
Opt-in Replace-by-fee transactions
|
||||
----------------------------------
|
||||
|
||||
It is now possible to replace transactions in the transaction memory pool of
|
||||
Bitcoin Core 0.12 nodes. Bitcoin Core will only allow replacement of
|
||||
transactions which have any of their inputs' `nSequence` number set to less
|
||||
than `0xffffffff - 1`. Moreover, a replacement transaction may only be
|
||||
accepted when it pays sufficient fee, as described in [BIP 125]
|
||||
(https://github.com/bitcoin/bips/blob/master/bip-0125.mediawiki).
|
||||
|
||||
Transaction replacement can be disabled with a new command line option,
|
||||
`-mempoolreplacement=0`. Transactions signaling replacement under BIP125 will
|
||||
still be allowed into the mempool in this configuration, but replacements will
|
||||
be rejected. This option is intended for miners who want to continue the
|
||||
transaction selection behavior of previous releases.
|
||||
|
||||
The `-mempoolreplacement` option is *not recommended* for wallet users seeking
|
||||
to avoid receipt of unconfirmed opt-in transactions, because this option does
|
||||
not prevent transactions which are replaceable under BIP 125 from being accepted
|
||||
(only subsequent replacements, which other nodes on the network that implement
|
||||
BIP 125 are likely to relay and mine). Wallet users wishing to detect whether
|
||||
a transaction is subject to replacement under BIP 125 should instead use the
|
||||
updated RPC calls `gettransaction` and `listtransactions`, which now have an
|
||||
additional field in the output indicating if a transaction is replaceable under
|
||||
BIP125 ("bip125-replaceable").
|
||||
|
||||
Note that the wallet in Bitcoin Core 0.12 does not yet have support for
|
||||
creating transactions that would be replaceable under BIP 125.
|
||||
|
||||
|
||||
RPC: Random-cookie RPC authentication
|
||||
-------------------------------------
|
||||
|
||||
When no `-rpcpassword` is specified, the daemon now uses a special 'cookie'
|
||||
file for authentication. This file is generated with random content when the
|
||||
daemon starts, and deleted when it exits. Its contents are used as
|
||||
authentication token. Read access to this file controls who can access through
|
||||
RPC. By default it is stored in the data directory but its location can be
|
||||
overridden with the option `-rpccookiefile`.
|
||||
|
||||
This is similar to Tor's CookieAuthentication: see
|
||||
https://www.torproject.org/docs/tor-manual.html.en
|
||||
|
||||
This allows running bitcoind without having to do any manual configuration.
|
||||
|
||||
Relay: Any sequence of pushdatas in OP_RETURN outputs now allowed
|
||||
-----------------------------------------------------------------
|
||||
|
||||
Previously OP_RETURN outputs with a payload were only relayed and mined if they
|
||||
had a single pushdata. This restriction has been lifted to allow any
|
||||
combination of data pushes and numeric constant opcodes (OP_1 to OP_16) after
|
||||
the OP_RETURN. The limit on OP_RETURN output size is now applied to the entire
|
||||
serialized scriptPubKey, 83 bytes by default. (the previous 80 byte default plus
|
||||
three bytes overhead)
|
||||
|
||||
Relay and Mining: Priority transactions
|
||||
---------------------------------------
|
||||
|
||||
Bitcoin Core has a heuristic 'priority' based on coin value and age. This
|
||||
calculation is used for relaying of transactions which do not pay the
|
||||
minimum relay fee, and can be used as an alternative way of sorting
|
||||
transactions for mined blocks. Bitcoin Core will relay transactions with
|
||||
insufficient fees depending on the setting of `-limitfreerelay=<r>` (default:
|
||||
`r=15` kB per minute) and `-blockprioritysize=<s>`.
|
||||
|
||||
In Bitcoin Core 0.12, when mempool limit has been reached a higher minimum
|
||||
relay fee takes effect to limit memory usage. Transactions which do not meet
|
||||
this higher effective minimum relay fee will not be relayed or mined even if
|
||||
they rank highly according to the priority heuristic.
|
||||
|
||||
The mining of transactions based on their priority is also now disabled by
|
||||
default. To re-enable it, simply set `-blockprioritysize=<n>` where is the size
|
||||
in bytes of your blocks to reserve for these transactions. The old default was
|
||||
50k, so to retain approximately the same policy, you would set
|
||||
`-blockprioritysize=50000`.
|
||||
|
||||
Additionally, as a result of computational simplifications, the priority value
|
||||
used for transactions received with unconfirmed inputs is lower than in prior
|
||||
versions due to avoiding recomputing the amounts as input transactions confirm.
|
||||
|
||||
External miner policy set via the `prioritisetransaction` RPC to rank
|
||||
transactions already in the mempool continues to work as it has previously.
|
||||
Note, however, that if mining priority transactions is left disabled, the
|
||||
priority delta will be ignored and only the fee metric will be effective.
|
||||
|
||||
This internal automatic prioritization handling is being considered for removal
|
||||
entirely in Bitcoin Core 0.13, and it is at this time undecided whether the
|
||||
more accurate priority calculation for chained unconfirmed transactions will be
|
||||
restored. Community direction on this topic is particularly requested to help
|
||||
set project priorities.
|
||||
|
||||
Automatically use Tor hidden services
|
||||
-------------------------------------
|
||||
|
||||
Starting with Tor version 0.2.7.1 it is possible, through Tor's control socket
|
||||
API, to create and destroy 'ephemeral' hidden services programmatically.
|
||||
Bitcoin Core has been updated to make use of this.
|
||||
|
||||
This means that if Tor is running (and proper authorization is available),
|
||||
Bitcoin Core automatically creates a hidden service to listen on, without
|
||||
manual configuration. Bitcoin Core will also use Tor automatically to connect
|
||||
to other .onion nodes if the control socket can be successfully opened. This
|
||||
will positively affect the number of available .onion nodes and their usage.
|
||||
|
||||
This new feature is enabled by default if Bitcoin Core is listening, and
|
||||
a connection to Tor can be made. It can be configured with the `-listenonion`,
|
||||
`-torcontrol` and `-torpassword` settings. To show verbose debugging
|
||||
information, pass `-debug=tor`.
|
||||
|
||||
Notifications through ZMQ
|
||||
-------------------------
|
||||
|
||||
Bitcoind can now (optionally) asynchronously notify clients through a
|
||||
ZMQ-based PUB socket of the arrival of new transactions and blocks.
|
||||
This feature requires installation of the ZMQ C API library 4.x and
|
||||
configuring its use through the command line or configuration file.
|
||||
Please see [docs/zmq.md](/doc/zmq.md) for details of operation.
|
||||
|
||||
Wallet: Transaction fees
|
||||
------------------------
|
||||
|
||||
Various improvements have been made to how the wallet calculates
|
||||
transaction fees.
|
||||
|
||||
Users can decide to pay a predefined fee rate by setting `-paytxfee=<n>`
|
||||
(or `settxfee <n>` rpc during runtime). A value of `n=0` signals Bitcoin
|
||||
Core to use floating fees. By default, Bitcoin Core will use floating
|
||||
fees.
|
||||
|
||||
Based on past transaction data, floating fees approximate the fees
|
||||
required to get into the `m`th block from now. This is configurable
|
||||
with `-txconfirmtarget=<m>` (default: `2`).
|
||||
|
||||
Sometimes, it is not possible to give good estimates, or an estimate
|
||||
at all. Therefore, a fallback value can be set with `-fallbackfee=<f>`
|
||||
(default: `0.0002` BTC/kB).
|
||||
|
||||
At all times, Bitcoin Core will cap fees at `-maxtxfee=<x>` (default:
|
||||
0.10) BTC.
|
||||
Furthermore, Bitcoin Core will never create transactions paying less than
|
||||
the current minimum relay fee.
|
||||
Finally, a user can set the minimum fee rate for all transactions with
|
||||
`-mintxfee=<i>`, which defaults to 1000 satoshis per kB.
|
||||
|
||||
Wallet: Negative confirmations and conflict detection
|
||||
-----------------------------------------------------
|
||||
|
||||
The wallet will now report a negative number for confirmations that indicates
|
||||
how deep in the block chain the conflict is found. For example, if a transaction
|
||||
A has 5 confirmations and spends the same input as a wallet transaction B, B
|
||||
will be reported as having -5 confirmations. If another wallet transaction C
|
||||
spends an output from B, it will also be reported as having -5 confirmations.
|
||||
To detect conflicts with historical transactions in the chain a one-time
|
||||
`-rescan` may be needed.
|
||||
|
||||
Unlike earlier versions, unconfirmed but non-conflicting transactions will never
|
||||
get a negative confirmation count. They are not treated as spendable unless
|
||||
they're coming from ourself (change) and accepted into our local mempool,
|
||||
however. The new "trusted" field in the `listtransactions` RPC output
|
||||
indicates whether outputs of an unconfirmed transaction are considered
|
||||
spendable.
|
||||
|
||||
Wallet: Merkle branches removed
|
||||
-------------------------------
|
||||
|
||||
Previously, every wallet transaction stored a Merkle branch to prove its
|
||||
presence in blocks. This wasn't being used for more than an expensive
|
||||
sanity check. Since 0.12, these are no longer stored. When loading a
|
||||
0.12 wallet into an older version, it will automatically rescan to avoid
|
||||
failed checks.
|
||||
|
||||
Wallet: Pruning
|
||||
---------------
|
||||
|
||||
With 0.12 it is possible to use wallet functionality in pruned mode.
|
||||
This can reduce the disk usage from currently around 60 GB to
|
||||
around 2 GB.
|
||||
|
||||
However, rescans as well as the RPCs `importwallet`, `importaddress`,
|
||||
`importprivkey` are disabled.
|
||||
|
||||
To enable block pruning set `prune=<N>` on the command line or in
|
||||
`bitcoin.conf`, where `N` is the number of MiB to allot for
|
||||
raw block & undo data.
|
||||
|
||||
A value of 0 disables pruning. The minimal value above 0 is 550. Your
|
||||
wallet is as secure with high values as it is with low ones. Higher
|
||||
values merely ensure that your node will not shut down upon blockchain
|
||||
reorganizations of more than 2 days - which are unlikely to happen in
|
||||
practice. In future releases, a higher value may also help the network
|
||||
as a whole: stored blocks could be served to other nodes.
|
||||
|
||||
For further information about pruning, you may also consult the [release
|
||||
notes of v0.11.0](https://github.com/bitcoin/bitcoin/blob/v0.11.0/doc/release-notes.md#block-file-pruning).
|
||||
|
||||
`NODE_BLOOM` service bit
|
||||
------------------------
|
||||
|
||||
Support for the `NODE_BLOOM` service bit, as described in [BIP
|
||||
111](https://github.com/bitcoin/bips/blob/master/bip-0111.mediawiki), has been
|
||||
added to the P2P protocol code.
|
||||
|
||||
BIP 111 defines a service bit to allow peers to advertise that they support
|
||||
bloom filters (such as used by SPV clients) explicitly. It also bumps the protocol
|
||||
version to allow peers to identify old nodes which allow bloom filtering of the
|
||||
connection despite lacking the new service bit.
|
||||
|
||||
In this version, it is only enforced for peers that send protocol versions
|
||||
`>=70011`. For the next major version it is planned that this restriction will be
|
||||
removed. It is recommended to update SPV clients to check for the `NODE_BLOOM`
|
||||
service bit for nodes that report versions newer than 70011.
|
||||
|
||||
Option parsing behavior
|
||||
-----------------------
|
||||
|
||||
Command line options are now parsed strictly in the order in which they are
|
||||
specified. It used to be the case that `-X -noX` ends up, unintuitively, with X
|
||||
set, as `-X` had precedence over `-noX`. This is no longer the case. Like for
|
||||
other software, the last specified value for an option will hold.
|
||||
|
||||
RPC: Low-level API changes
|
||||
--------------------------
|
||||
|
||||
- Monetary amounts can be provided as strings. This means that for example the
|
||||
argument to sendtoaddress can be "0.0001" instead of 0.0001. This can be an
|
||||
advantage if a JSON library insists on using a lossy floating point type for
|
||||
numbers, which would be dangerous for monetary amounts.
|
||||
|
||||
* The `asm` property of each scriptSig now contains the decoded signature hash
|
||||
type for each signature that provides a valid defined hash type.
|
||||
|
||||
* OP_NOP2 has been renamed to OP_CHECKLOCKTIMEVERIFY by [BIP 65](https://github.com/bitcoin/bips/blob/master/bip-0065.mediawiki)
|
||||
|
||||
The following items contain assembly representations of scriptSig signatures
|
||||
and are affected by this change:
|
||||
|
||||
- RPC `getrawtransaction`
|
||||
- RPC `decoderawtransaction`
|
||||
- RPC `decodescript`
|
||||
- REST `/rest/tx/` (JSON format)
|
||||
- REST `/rest/block/` (JSON format when including extended tx details)
|
||||
- `bitcoin-tx -json`
|
||||
|
||||
For example, the `scriptSig.asm` property of a transaction input that
|
||||
previously showed an assembly representation of:
|
||||
|
||||
304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c509001 400000 OP_NOP2
|
||||
|
||||
now shows as:
|
||||
|
||||
304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c5090[ALL] 400000 OP_CHECKLOCKTIMEVERIFY
|
||||
|
||||
Note that the output of the RPC `decodescript` did not change because it is
|
||||
configured specifically to process scriptPubKey and not scriptSig scripts.
|
||||
|
||||
RPC: SSL support dropped
|
||||
------------------------
|
||||
|
||||
SSL support for RPC, previously enabled by the option `rpcssl` has been dropped
|
||||
from both the client and the server. This was done in preparation for removing
|
||||
the dependency on OpenSSL for the daemon completely.
|
||||
|
||||
Trying to use `rpcssl` will result in an error:
|
||||
|
||||
Error: SSL mode for RPC (-rpcssl) is no longer supported.
|
||||
|
||||
If you are one of the few people that relies on this feature, a flexible
|
||||
migration path is to use `stunnel`. This is an utility that can tunnel
|
||||
arbitrary TCP connections inside SSL. On e.g. Ubuntu it can be installed with:
|
||||
|
||||
sudo apt-get install stunnel4
|
||||
|
||||
Then, to tunnel a SSL connection on 28332 to a RPC server bound on localhost on port 18332 do:
|
||||
|
||||
stunnel -d 28332 -r 127.0.0.1:18332 -p stunnel.pem -P ''
|
||||
|
||||
It can also be set up system-wide in inetd style.
|
||||
|
||||
Another way to re-attain SSL would be to setup a httpd reverse proxy. This solution
|
||||
would allow the use of different authentication, loadbalancing, on-the-fly compression and
|
||||
caching. A sample config for apache2 could look like:
|
||||
|
||||
Listen 443
|
||||
|
||||
NameVirtualHost *:443
|
||||
<VirtualHost *:443>
|
||||
|
||||
SSLEngine On
|
||||
SSLCertificateFile /etc/apache2/ssl/server.crt
|
||||
SSLCertificateKeyFile /etc/apache2/ssl/server.key
|
||||
|
||||
<Location /bitcoinrpc>
|
||||
ProxyPass http://127.0.0.1:8332/
|
||||
ProxyPassReverse http://127.0.0.1:8332/
|
||||
# optional enable digest auth
|
||||
# AuthType Digest
|
||||
# ...
|
||||
|
||||
# optional bypass bitcoind rpc basic auth
|
||||
# RequestHeader set Authorization "Basic <hash>"
|
||||
# get the <hash> from the shell with: base64 <<< bitcoinrpc:<password>
|
||||
</Location>
|
||||
|
||||
# Or, balance the load:
|
||||
# ProxyPass / balancer://balancer_cluster_name
|
||||
|
||||
</VirtualHost>
|
||||
|
||||
Mining Code Changes
|
||||
-------------------
|
||||
|
||||
The mining code in 0.12 has been optimized to be significantly faster and use less
|
||||
memory. As part of these changes, consensus critical calculations are cached on a
|
||||
transaction's acceptance into the mempool and the mining code now relies on the
|
||||
consistency of the mempool to assemble blocks. However all blocks are still tested
|
||||
for validity after assembly.
|
||||
|
||||
Other P2P Changes
|
||||
-----------------
|
||||
|
||||
The list of banned peers is now stored on disk rather than in memory.
|
||||
Restarting bitcoind will no longer clear out the list of banned peers; instead
|
||||
a new RPC call (`clearbanned`) can be used to manually clear the list. The new
|
||||
`setban` RPC call can also be used to manually ban or unban a peer.
|
||||
|
||||
0.12.0 Change log
|
||||
=================
|
||||
|
||||
Detailed release notes follow. This overview includes changes that affect
|
||||
behavior, not code moves, refactors and string updates. For convenience in locating
|
||||
the code changes and accompanying discussion, both the pull request and
|
||||
git merge commit are mentioned.
|
||||
|
||||
### RPC and REST
|
||||
|
||||
- #6121 `466f0ea` Convert entire source tree from json_spirit to UniValue (Jonas Schnelli)
|
||||
- #6234 `d38cd47` fix rpcmining/getblocktemplate univalue transition logic error (Jonas Schnelli)
|
||||
- #6239 `643114f` Don't go through double in AmountFromValue and ValueFromAmount (Wladimir J. van der Laan)
|
||||
- #6266 `ebab5d3` Fix univalue handling of \u0000 characters. (Daniel Kraft)
|
||||
- #6276 `f3d4dbb` Fix getbalance * 0 (Tom Harding)
|
||||
- #6257 `5ebe7db` Add `paytxfee` and `errors` JSON fields where appropriate (Stephen)
|
||||
- #6271 `754aae5` New RPC command disconnectnode (Alex van der Peet)
|
||||
- #6158 `0abfa8a` Add setban/listbanned RPC commands (Jonas Schnelli)
|
||||
- #6307 `7ecdcd9` rpcban fixes (Jonas Schnelli)
|
||||
- #6290 `5753988` rpc: make `gettxoutsettinfo` run lock-free (Wladimir J. van der Laan)
|
||||
- #6262 `247b914` Return all available information via RPC call "validateaddress" (dexX7)
|
||||
- #6339 `c3f0490` UniValue: don't escape solidus, keep espacing of reverse solidus (Jonas Schnelli)
|
||||
- #6353 `6bcb0a2` Show softfork status in getblockchaininfo (Wladimir J. van der Laan)
|
||||
- #6247 `726e286` Add getblockheader RPC call (Peter Todd)
|
||||
- #6362 `d6db115` Fix null id in RPC response during startup (Forrest Voight)
|
||||
- #5486 `943b322` [REST] JSON support for /rest/headers (Jonas Schnelli)
|
||||
- #6379 `c52e8b3` rpc: Accept scientific notation for monetary amounts in JSON (Wladimir J. van der Laan)
|
||||
- #6388 `fd5dfda` rpc: Implement random-cookie based authentication (Wladimir J. van der Laan)
|
||||
- #6457 `3c923e8` Include pruned state in chaininfo.json (Simon Males)
|
||||
- #6456 `bfd807f` rpc: Avoid unnecessary parsing roundtrip in number formatting, fix locale issue (Wladimir J. van der Laan)
|
||||
- #6380 `240b30e` rpc: Accept strings in AmountFromValue (Wladimir J. van der Laan)
|
||||
- #6346 `6bb2805` Add OP_RETURN support in createrawtransaction RPC call, add tests. (paveljanik)
|
||||
- #6013 `6feeec1` [REST] Add memory pool API (paveljanik)
|
||||
- #6576 `da9beb2` Stop parsing JSON after first finished construct. (Daniel Kraft)
|
||||
- #5677 `9aa9099` libevent-based http server (Wladimir J. van der Laan)
|
||||
- #6633 `bbc2b39` Report minimum ping time in getpeerinfo (Matt Corallo)
|
||||
- #6648 `cd381d7` Simplify logic of REST request suffix parsing. (Daniel Kraft)
|
||||
- #6695 `5e21388` libevent http fixes (Wladimir J. van der Laan)
|
||||
- #5264 `48efbdb` show scriptSig signature hash types in transaction decodes. fixes #3166 (mruddy)
|
||||
- #6719 `1a9f19a` Make HTTP server shutdown more graceful (Wladimir J. van der Laan)
|
||||
- #6859 `0fbfc51` http: Restrict maximum size of http + headers (Wladimir J. van der Laan)
|
||||
- #5936 `bf7c195` [RPC] Add optional locktime to createrawtransaction (Tom Harding)
|
||||
- #6877 `26f5b34` rpc: Add maxmempool and effective min fee to getmempoolinfo (Wladimir J. van der Laan)
|
||||
- #6970 `92701b3` Fix crash in validateaddress with -disablewallet (Wladimir J. van der Laan)
|
||||
- #5574 `755b4ba` Expose GUI labels in RPC as comments (Luke-Jr)
|
||||
- #6990 `dbd2c13` http: speed up shutdown (Wladimir J. van der Laan)
|
||||
- #7013 `36baa9f` Remove LOCK(cs_main) from decodescript (Peter Todd)
|
||||
- #6999 `972bf9c` add (max)uploadtarget infos to getnettotals RPC help (Jonas Schnelli)
|
||||
- #7011 `31de241` Add mediantime to getblockchaininfo (Peter Todd)
|
||||
- #7065 `f91e29f` http: add Boost 1.49 compatibility (Wladimir J. van der Laan)
|
||||
- #7087 `be281d8` [Net]Add -enforcenodebloom option (Patrick Strateman)
|
||||
- #7044 `438ee59` RPC: Added additional config option for multiple RPC users. (Gregory Sanders)
|
||||
- #7072 `c143c49` [RPC] Add transaction size to JSON output (Nikita Zhavoronkov)
|
||||
- #7022 `9afbd96` Change default block priority size to 0 (Alex Morcos)
|
||||
- #7141 `c0c08c7` rpc: Don't translate warning messages (Wladimir J. van der Laan)
|
||||
- #7312 `fd4bd50` Add RPC call abandontransaction (Alex Morcos)
|
||||
- #7222 `e25b158` RPC: indicate which transactions are replaceable (Suhas Daftuar)
|
||||
- #7472 `b2f2b85` rpc: Add WWW-Authenticate header to 401 response (Wladimir J. van der Laan)
|
||||
- #7469 `9cb31e6` net.h fix spelling: misbeha{b,v}ing (Matt)
|
||||
|
||||
### Configuration and command-line options
|
||||
|
||||
- #6164 `8d05ec7` Allow user to use -debug=1 to enable all debugging (lpescher)
|
||||
- #5288 `4452205` Added `-whiteconnections=<n>` option (Josh Lehan)
|
||||
- #6284 `10ac38e` Fix argument parsing oddity with -noX (Wladimir J. van der Laan)
|
||||
- #6489 `c9c017a` Give a better error message if system clock is bad (Casey Rodarmor)
|
||||
- #6462 `c384800` implement uacomment config parameter which can add comments to user agent as per BIP-0014 (Pavol Rusnak)
|
||||
- #6647 `a3babc8` Sanitize uacomment (MarcoFalke)
|
||||
- #6742 `3b2d37c` Changed logging to make -logtimestamps to work also for -printtoconsole (arnuschky)
|
||||
- #6846 `2cd020d` alias -h for -help (Daniel Cousens)
|
||||
- #6622 `7939164` Introduce -maxuploadtarget (Jonas Schnelli)
|
||||
- #6881 `2b62551` Debug: Add option for microsecond precision in debug.log (Suhas Daftuar)
|
||||
- #6776 `e06c14f` Support -checkmempool=N, which runs checks once every N transactions (Pieter Wuille)
|
||||
- #6896 `d482c0a` Make -checkmempool=1 not fail through int32 overflow (Pieter Wuille)
|
||||
- #6993 `b632145` Add -blocksonly option (Patrick Strateman)
|
||||
- #7323 `a344880` 0.12: Backport -bytespersigop option (Luke-Jr)
|
||||
- #7386 `da83ecd` Add option `-permitrbf` to set transaction replacement policy (Wladimir J. van der Laan)
|
||||
- #7290 `b16b5bc` Add missing options help (MarcoFalke)
|
||||
- #7440 `c76bfff` Rename permitrbf to mempoolreplacement and provide minimal string-list forward compatibility (Luke-Jr)
|
||||
|
||||
### Block and transaction handling
|
||||
|
||||
- #6203 `f00b623` Remove P2SH coinbase flag, no longer interesting (Luke-Jr)
|
||||
- #6222 `9c93ee5` Explicitly set tx.nVersion for the genesis block and mining tests (Mark Friedenbach)
|
||||
- #5985 `3a1d3e8` Fix removing of orphan transactions (Alex Morcos)
|
||||
- #6221 `dd8fe82` Prune: Support noncontiguous block files (Adam Weiss)
|
||||
- #6124 `41076aa` Mempool only CHECKLOCKTIMEVERIFY (BIP65) verification, unparameterized version (Peter Todd)
|
||||
- #6329 `d0a10c1` acceptnonstdtxn option to skip (most) "non-standard transaction" checks, for testnet/regtest only (Luke-Jr)
|
||||
- #6410 `7cdefb9` Implement accurate memory accounting for mempool (Pieter Wuille)
|
||||
- #6444 `24ce77d` Exempt unspendable transaction outputs from dust checks (dexX7)
|
||||
- #5913 `a0625b8` Add absurdly high fee message to validation state (Shaul Kfir)
|
||||
- #6177 `2f746c6` Prevent block.nTime from decreasing (Mark Friedenbach)
|
||||
- #6377 `e545371` Handle no chain tip available in InvalidChainFound() (Ross Nicoll)
|
||||
- #6551 `39ddaeb` Handle leveldb::DestroyDB() errors on wipe failure (Adam Weiss)
|
||||
- #6654 `b0ce450` Mempool package tracking (Suhas Daftuar)
|
||||
- #6715 `82d2aef` Fix mempool packages (Suhas Daftuar)
|
||||
- #6680 `4f44530` use CBlockIndex instead of uint256 for UpdatedBlockTip signal (Jonas Schnelli)
|
||||
- #6650 `4fac576` Obfuscate chainstate (James O'Beirne)
|
||||
- #6777 `9caaf6e` Unobfuscate chainstate data in CCoinsViewDB::GetStats (James O'Beirne)
|
||||
- #6722 `3b20e23` Limit mempool by throwing away the cheapest txn and setting min relay fee to it (Matt Corallo)
|
||||
- #6889 `38369dd` fix locking issue with new mempool limiting (Jonas Schnelli)
|
||||
- #6464 `8f3b3cd` Always clean up manual transaction prioritization (Casey Rodarmor)
|
||||
- #6865 `d0badb9` Fix chainstate serialized_size computation (Pieter Wuille)
|
||||
- #6566 `ff057f4` BIP-113: Mempool-only median time-past as endpoint for lock-time calculations (Mark Friedenbach)
|
||||
- #6934 `3038eb6` Restores mempool only BIP113 enforcement (Gregory Maxwell)
|
||||
- #6965 `de7d459` Benchmark sanity checks and fork checks in ConnectBlock (Matt Corallo)
|
||||
- #6918 `eb6172a` Make sigcache faster, more efficient, larger (Pieter Wuille)
|
||||
- #6771 `38ed190` Policy: Lower default limits for tx chains (Alex Morcos)
|
||||
- #6932 `73fa5e6` ModifyNewCoins saves database lookups (Alex Morcos)
|
||||
- #5967 `05d5918` Alter assumptions in CCoinsViewCache::BatchWrite (Alex Morcos)
|
||||
- #6871 `0e93586` nSequence-based Full-RBF opt-in (Peter Todd)
|
||||
- #7008 `eb77416` Lower bound priority (Alex Morcos)
|
||||
- #6915 `2ef5ffa` [Mempool] Improve removal of invalid transactions after reorgs (Suhas Daftuar)
|
||||
- #6898 `4077ad2` Rewrite CreateNewBlock (Alex Morcos)
|
||||
- #6872 `bdda4d5` Remove UTXO cache entries when the tx they were added for is removed/does not enter mempool (Matt Corallo)
|
||||
- #7062 `12c469b` [Mempool] Fix mempool limiting and replace-by-fee for PrioritiseTransaction (Suhas Daftuar)
|
||||
- #7276 `76de36f` Report non-mandatory script failures correctly (Pieter Wuille)
|
||||
- #7217 `e08b7cb` Mark blocks with too many sigops as failed (Suhas Daftuar)
|
||||
- #7387 `f4b2ce8` Get rid of inaccurate ScriptSigArgsExpected (Pieter Wuille)
|
||||
|
||||
### P2P protocol and network code
|
||||
|
||||
- #6172 `88a7ead` Ignore getheaders requests when not synced (Suhas Daftuar)
|
||||
- #5875 `9d60602` Be stricter in processing unrequested blocks (Suhas Daftuar)
|
||||
- #6256 `8ccc07c` Use best header chain timestamps to detect partitioning (Gavin Andresen)
|
||||
- #6283 `a903ad7` make CAddrMan::size() return the correct type of size_t (Diapolo)
|
||||
- #6272 `40400d5` Improve proxy initialization (continues #4871) (Wladimir J. van der Laan, Diapolo)
|
||||
- #6310 `66e5465` banlist.dat: store banlist on disk (Jonas Schnelli)
|
||||
- #6412 `1a2de32` Test whether created sockets are select()able (Pieter Wuille)
|
||||
- #6498 `219b916` Keep track of recently rejected transactions with a rolling bloom filter (cont'd) (Peter Todd)
|
||||
- #6556 `70ec975` Fix masking of irrelevant bits in address groups. (Alex Morcos)
|
||||
- #6530 `ea19c2b` Improve addrman Select() performance when buckets are nearly empty (Pieter Wuille)
|
||||
- #6583 `af9305a` add support for miniupnpc api version 14 (Pavel Vasin)
|
||||
- #6374 `69dc5b5` Connection slot exhaustion DoS mitigation (Patrick Strateman)
|
||||
- #6636 `536207f` net: correctly initialize nMinPingUsecTime (Wladimir J. van der Laan)
|
||||
- #6579 `0c27795` Add NODE_BLOOM service bit and bump protocol version (Matt Corallo)
|
||||
- #6148 `999c8be` Relay blocks when pruning (Suhas Daftuar)
|
||||
- #6588 `cf9bb11` In (strCommand == "tx"), return if AlreadyHave() (Tom Harding)
|
||||
- #6974 `2f71b07` Always allow getheaders from whitelisted peers (Wladimir J. van der Laan)
|
||||
- #6639 `bd629d7` net: Automatically create hidden service, listen on Tor (Wladimir J. van der Laan)
|
||||
- #6984 `9ffc687` don't enforce maxuploadtarget's disconnect for whitelisted peers (Jonas Schnelli)
|
||||
- #7046 `c322652` Net: Improve blocks only mode. (Patrick Strateman)
|
||||
- #7090 `d6454f6` Connect to Tor hidden services by default (when listening on Tor) (Peter Todd)
|
||||
- #7106 `c894fbb` Fix and improve relay from whitelisted peers (Pieter Wuille)
|
||||
- #7129 `5d5ef3a` Direct headers announcement (rebase of #6494) (Pieter Wuille)
|
||||
- #7079 `1b5118b` Prevent peer flooding inv request queue (redux) (redux) (Gregory Maxwell)
|
||||
- #7166 `6ba25d2` Disconnect on mempool requests from peers when over the upload limit. (Gregory Maxwell)
|
||||
- #7133 `f31955d` Replace setInventoryKnown with a rolling bloom filter (rebase of #7100) (Pieter Wuille)
|
||||
- #7174 `82aff88` Don't do mempool lookups for "mempool" command without a filter (Matt Corallo)
|
||||
- #7179 `44fef99` net: Fix sent reject messages for blocks and transactions (Wladimir J. van der Laan)
|
||||
- #7181 `8fc174a` net: Add and document network messages in protocol.h (Wladimir J. van der Laan)
|
||||
- #7125 `10b88be` Replace global trickle node with random delays (Pieter Wuille)
|
||||
- #7415 `cb83beb` net: Hardcoded seeds update January 2016 (Wladimir J. van der Laan)
|
||||
- #7438 `e2d9a58` Do not absolutely protect local peers; decide group ties based on time (Gregory Maxwell)
|
||||
- #7439 `86755bc` Add whitelistforcerelay to control forced relaying. [#7099 redux] (Gregory Maxwell)
|
||||
- #7482 `e16f5b4` Ensure headers count is correct (Suhas Daftuar)
|
||||
|
||||
### Validation
|
||||
|
||||
- #5927 `8d9f0a6` Reduce checkpoints' effect on consensus. (Pieter Wuille)
|
||||
- #6299 `24f2489` Bugfix: Don't check the genesis block header before accepting it (Jorge Timón)
|
||||
- #6361 `d7ada03` Use real number of cores for default -par, ignore virtual cores (Wladimir J. van der Laan)
|
||||
- #6519 `87f37e2` Make logging for validation optional (Wladimir J. van der Laan)
|
||||
- #6351 `2a1090d` CHECKLOCKTIMEVERIFY (BIP65) IsSuperMajority() soft-fork (Peter Todd)
|
||||
- #6931 `54e8bfe` Skip BIP 30 verification where not necessary (Alex Morcos)
|
||||
- #6954 `e54ebbf` Switch to libsecp256k1-based ECDSA validation (Pieter Wuille)
|
||||
- #6508 `61457c2` Switch to a constant-space Merkle root/branch algorithm. (Pieter Wuille)
|
||||
- #6914 `327291a` Add pre-allocated vector type and use it for CScript (Pieter Wuille)
|
||||
- #7500 `889e5b3` Correctly report high-S violations (Pieter Wuille)
|
||||
|
||||
|
||||
### Build system
|
||||
|
||||
- #6210 `0e4f2a0` build: disable optional use of gmp in internal secp256k1 build (Wladimir J. van der Laan)
|
||||
- #6214 `87406aa` [OSX] revert renaming of Bitcoin-Qt.app and use CFBundleDisplayName (partial revert of #6116) (Jonas Schnelli)
|
||||
- #6218 `9d67b10` build/gitian misc updates (Cory Fields)
|
||||
- #6269 `d4565b6` gitian: Use the new bitcoin-detached-sigs git repo for OSX signatures (Cory Fields)
|
||||
- #6418 `d4a910c` Add autogen.sh to source tarball. (randy-waterhouse)
|
||||
- #6373 `1ae3196` depends: non-qt bumps for 0.12 (Cory Fields)
|
||||
- #6434 `059b352` Preserve user-passed CXXFLAGS with --enable-debug (Gavin Andresen)
|
||||
- #6501 `fee6554` Misc build fixes (Cory Fields)
|
||||
- #6600 `ef4945f` Include bitcoin-tx binary on Debian/Ubuntu (Zak Wilcox)
|
||||
- #6619 `4862708` depends: bump miniupnpc and ccache (Michael Ford)
|
||||
- #6801 `ae69a75` [depends] Latest config.guess and config.sub (Michael Ford)
|
||||
- #6938 `193f7b5` build: If both Qt4 and Qt5 are installed, use Qt5 (Wladimir J. van der Laan)
|
||||
- #7092 `348b281` build: Set osx permissions in the dmg to make Gatekeeper happy (Cory Fields)
|
||||
- #6980 `eccd671` [Depends] Bump Boost, miniupnpc, ccache & zeromq (Michael Ford)
|
||||
- #7424 `aa26ee0` Add security/export checks to gitian and fix current failures (Cory Fields)
|
||||
|
||||
### Wallet
|
||||
|
||||
- #6183 `87550ee` Fix off-by-one error w/ nLockTime in the wallet (Peter Todd)
|
||||
- #6057 `ac5476e` re-enable wallet in autoprune (Jonas Schnelli)
|
||||
- #6356 `9e6c33b` Delay initial pruning until after wallet init (Adam Weiss)
|
||||
- #6088 `91389e5` fundrawtransaction (Matt Corallo)
|
||||
- #6415 `ddd8d80` Implement watchonly support in fundrawtransaction (Matt Corallo)
|
||||
- #6567 `0f0f323` Fix crash when mining with empty keypool. (Daniel Kraft)
|
||||
- #6688 `4939eab` Fix locking in GetTransaction. (Alex Morcos)
|
||||
- #6645 `4dbd43e` Enable wallet key imports without rescan in pruned mode. (Gregory Maxwell)
|
||||
- #6550 `5b77244` Do not store Merkle branches in the wallet. (Pieter Wuille)
|
||||
- #5924 `12a7712` Clean up change computation in CreateTransaction. (Daniel Kraft)
|
||||
- #6906 `48b5b84` Reject invalid pubkeys when reading ckey items from the wallet. (Gregory Maxwell)
|
||||
- #7010 `e0a5ef8` Fix fundrawtransaction handling of includeWatching (Peter Todd)
|
||||
- #6851 `616d61b` Optimisation: Store transaction list order in memory rather than compute it every need (Luke-Jr)
|
||||
- #6134 `e92377f` Improve usage of fee estimation code (Alex Morcos)
|
||||
- #7103 `a775182` [wallet, rpc tests] Fix settxfee, paytxfee (MarcoFalke)
|
||||
- #7105 `30c2d8c` Keep track of explicit wallet conflicts instead of using mempool (Pieter Wuille)
|
||||
- #7096 `9490bd7` [Wallet] Improve minimum absolute fee GUI options (Jonas Schnelli)
|
||||
- #6216 `83f06ca` Take the training wheels off anti-fee-sniping (Peter Todd)
|
||||
- #4906 `96e8d12` Issue#1643: Coinselection prunes extraneous inputs from ApproximateBestSubset (Murch)
|
||||
- #7200 `06c6a58` Checks for null data transaction before issuing error to debug.log (Andy Craze)
|
||||
- #7296 `a36d79b` Add sane fallback for fee estimation (Alex Morcos)
|
||||
- #7293 `ff9b610` Add regression test for vValue sort order (MarcoFalke)
|
||||
- #7306 `4707797` Make sure conflicted wallet tx's update balances (Alex Morcos)
|
||||
- #7381 `621bbd8` [walletdb] Fix syntax error in key parser (MarcoFalke)
|
||||
- #7491 `00ec73e` wallet: Ignore MarkConflict if block hash is not known (Wladimir J. van der Laan)
|
||||
- #7502 `1329963` Update the wallet best block marker before pruning (Pieter Wuille)
|
||||
|
||||
### GUI
|
||||
|
||||
- #6217 `c57e12a` disconnect peers from peers tab via context menu (Diapolo)
|
||||
- #6209 `ab0ec67` extend rpc console peers tab (Diapolo)
|
||||
- #6484 `1369d69` use CHashWriter also in SignVerifyMessageDialog (Pavel Vasin)
|
||||
- #6487 `9848d42` Introduce PlatformStyle (Wladimir J. van der Laan)
|
||||
- #6505 `100c9d3` cleanup icons (MarcoFalke)
|
||||
- #4587 `0c465f5` allow users to set -onion via GUI (Diapolo)
|
||||
- #6529 `c0f66ce` show client user agent in debug window (Diapolo)
|
||||
- #6594 `878ea69` Disallow duplicate windows. (Casey Rodarmor)
|
||||
- #5665 `6f55cdd` add verifySize() function to PaymentServer (Diapolo)
|
||||
- #6317 `ca5e2a1` minor optimisations in peertablemodel (Diapolo)
|
||||
- #6315 `e59d2a8` allow banning and unbanning over UI->peers table (Jonas Schnelli)
|
||||
- #6653 `e04b2fa` Pop debug window in foreground when opened twice (MarcoFalke)
|
||||
- #6864 `c702521` Use monospace font (MarcoFalke)
|
||||
- #6887 `3694b74` Update coin control and smartfee labels (MarcoFalke)
|
||||
- #7000 `814697c` add shortcurts for debug-/console-window (Jonas Schnelli)
|
||||
- #6951 `03403d8` Use maxTxFee instead of 10000000 (MarcoFalke)
|
||||
- #7051 `a190777` ui: Add "Copy raw transaction data" to transaction list context menu (Wladimir J. van der Laan)
|
||||
- #6979 `776848a` simple mempool info in debug window (Jonas Schnelli)
|
||||
- #7006 `26af1ac` add startup option to reset Qt settings (Jonas Schnelli)
|
||||
- #6780 `2a94cd6` Call init's parameter interaction before we create the UI options model (Jonas Schnelli)
|
||||
- #7112 `96b8025` reduce cs_main locks during tip update, more fluently update UI (Jonas Schnelli)
|
||||
- #7206 `f43c2f9` Add "NODE_BLOOM" to guiutil so that peers don't get UNKNOWN[4] (Matt Corallo)
|
||||
- #7282 `5cadf3e` fix coincontrol update issue when deleting a send coins entry (Jonas Schnelli)
|
||||
- #7319 `1320300` Intro: Display required space (MarcoFalke)
|
||||
- #7318 `9265e89` quickfix for RPC timer interface problem (Jonas Schnelli)
|
||||
- #7327 `b16b5bc` [Wallet] Transaction View: LastMonth calculation fixed (crowning-)
|
||||
- #7364 `7726c48` [qt] Windows: Make rpcconsole monospace font larger (MarcoFalke)
|
||||
- #7384 `294f432` [qt] Peertable: Increase SUBVERSION_COLUMN_WIDTH (MarcoFalke)
|
||||
|
||||
### Tests and QA
|
||||
|
||||
- #6305 `9005c91` build: comparison tool swap (Cory Fields)
|
||||
- #6318 `e307e13` build: comparison tool NPE fix (Cory Fields)
|
||||
- #6337 `0564c5b` Testing infrastructure: mocktime fixes (Gavin Andresen)
|
||||
- #6350 `60abba1` add unit tests for the decodescript rpc (mruddy)
|
||||
- #5881 `3203a08` Fix and improve txn_doublespend.py test (Tom Harding)
|
||||
- #6390 `6a73d66` tests: Fix bitcoin-tx signing test case (Wladimir J. van der Laan)
|
||||
- #6368 `7fc25c2` CLTV: Add more tests to improve coverage (Esteban Ordano)
|
||||
- #6414 `5121c68` Fix intermittent test failure, reduce test time (Tom Harding)
|
||||
- #6417 `44fa82d` [QA] fix possible reorg issue in (fund)rawtransaction(s).py RPC test (Jonas Schnelli)
|
||||
- #6398 `3d9362d` rpc: Remove chain-specific RequireRPCPassword (Wladimir J. van der Laan)
|
||||
- #6428 `bb59e78` tests: Remove old sh-based test framework (Wladimir J. van der Laan)
|
||||
- #5515 `d946e9a` RFC: Assert on probable deadlocks if the second lock isnt try_lock (Matt Corallo)
|
||||
- #6287 `d2464df` Clang lock debug (Cory Fields)
|
||||
- #6465 `410fd74` Don't share objects between TestInstances (Casey Rodarmor)
|
||||
- #6534 `6c1c7fd` Fix test locking issues and un-revert the probable-deadlines assertions commit (Cory Fields)
|
||||
- #6509 `bb4faee` Fix race condition on test node shutdown (Casey Rodarmor)
|
||||
- #6523 `561f8af` Add p2p-fullblocktest.py (Casey Rodarmor)
|
||||
- #6590 `981fd92` Fix stale socket rebinding and re-enable python tests for Windows (Cory Fields)
|
||||
- #6730 `cb4d6d0` build: Remove dependency of bitcoin-cli on secp256k1 (Wladimir J. van der Laan)
|
||||
- #6616 `5ab5dca` Regression Tests: Migrated rpc-tests.sh to all Python rpc-tests.py (Peter Tschipper)
|
||||
- #6720 `d479311` Creates unittests for addrman, makes addrman more testable. (Ethan Heilman)
|
||||
- #6853 `c834f56` Added fPowNoRetargeting field to Consensus::Params (Eric Lombrozo)
|
||||
- #6827 `87e5539` [rpc-tests] Check return code (MarcoFalke)
|
||||
- #6848 `f2c869a` Add DERSIG transaction test cases (Ross Nicoll)
|
||||
- #6813 `5242bb3` Support gathering code coverage data for RPC tests with lcov (dexX7)
|
||||
- #6888 `c8322ff` Clear strMiscWarning before running PartitionAlert (Eric Lombrozo)
|
||||
- #6894 `2675276` [Tests] Fix BIP65 p2p test (Suhas Daftuar)
|
||||
- #6863 `725539e` [Test Suite] Fix test for null tx input (Daniel Kraft)
|
||||
- #6926 `a6d0d62` tests: Initialize networking on windows (Wladimir J. van der Laan)
|
||||
- #6822 `9fa54a1` [tests] Be more strict checking dust (MarcoFalke)
|
||||
- #6804 `5fcc14e` [tests] Add basic coverage reporting for RPC tests (James O'Beirne)
|
||||
- #7045 `72dccfc` Bugfix: Use unique autostart filenames on Linux for testnet/regtest (Luke-Jr)
|
||||
- #7095 `d8368a0` Replace scriptnum_test's normative ScriptNum implementation (Wladimir J. van der Laan)
|
||||
- #7063 `6abf6eb` [Tests] Add prioritisetransaction RPC test (Suhas Daftuar)
|
||||
- #7137 `16f4a6e` Tests: Explicitly set chain limits in replace-by-fee test (Suhas Daftuar)
|
||||
- #7216 `9572e49` Removed offline testnet DNSSeed 'alexykot.me'. (tnull)
|
||||
- #7209 `f3ad812` test: don't override BITCOIND and BITCOINCLI if they're set (Wladimir J. van der Laan)
|
||||
- #7226 `301f16a` Tests: Add more tests to p2p-fullblocktest (Suhas Daftuar)
|
||||
- #7153 `9ef7c54` [Tests] Add mempool_limit.py test (Jonas Schnelli)
|
||||
- #7170 `453c567` tests: Disable Tor interaction (Wladimir J. van der Laan)
|
||||
- #7229 `1ed938b` [qa] wallet: Check if maintenance changes the balance (MarcoFalke)
|
||||
- #7308 `d513405` [Tests] Eliminate intermittent failures in sendheaders.py (Suhas Daftuar)
|
||||
- #7468 `947c4ff` [rpc-tests] Change solve() to use rehash (Brad Andrews)
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- #6213 `e54ff2f` [init] add -blockversion help and extend -upnp help (Diapolo)
|
||||
- #5975 `1fea667` Consensus: Decouple ContextualCheckBlockHeader from checkpoints (Jorge Timón)
|
||||
- #6061 `eba2f06` Separate Consensus::CheckTxInputs and GetSpendHeight in CheckInputs (Jorge Timón)
|
||||
- #5994 `786ed11` detach wallet from miner (Jonas Schnelli)
|
||||
- #6387 `11576a5` [bitcoin-cli] improve error output (Jonas Schnelli)
|
||||
- #6401 `6db53b4` Add BITCOIND_SIGTERM_TIMEOUT to OpenRC init scripts (Florian Schmaus)
|
||||
- #6430 `b01981e` doc: add documentation for shared library libbitcoinconsensus (Braydon Fuller)
|
||||
- #6372 `dcc495e` Update Linearize tool to support Windows paths; fix variable scope; update README and example configuration (Paul Georgiou)
|
||||
- #6453 `8fe5cce` Separate core memory usage computation in core_memusage.h (Pieter Wuille)
|
||||
- #6149 `633fe10` Buffer log messages and explicitly open logs (Adam Weiss)
|
||||
- #6488 `7cbed7f` Avoid leaking file descriptors in RegisterLoad (Casey Rodarmor)
|
||||
- #6497 `a2bf40d` Make sure LogPrintf strings are line-terminated (Wladimir J. van der Laan)
|
||||
- #6504 `b6fee6b` Rationalize currency unit to "BTC" (Ross Nicoll)
|
||||
- #6507 `9bb4dd8` Removed contrib/bitrpc (Casey Rodarmor)
|
||||
- #6527 `41d650f` Use unique name for AlertNotify tempfile (Casey Rodarmor)
|
||||
- #6561 `e08a7d9` limitedmap fixes and tests (Casey Rodarmor)
|
||||
- #6565 `a6f2aff` Make sure we re-acquire lock if a task throws (Casey Rodarmor)
|
||||
- #6599 `f4d88c4` Make sure LogPrint strings are line-terminated (Ross Nicoll)
|
||||
- #6630 `195942d` Replace boost::reverse_lock with our own (Casey Rodarmor)
|
||||
- #6103 `13b8282` Add ZeroMQ notifications (João Barbosa)
|
||||
- #6692 `d5d1d2e` devtools: don't push if signing fails in github-merge (Wladimir J. van der Laan)
|
||||
- #6728 `2b0567b` timedata: Prevent warning overkill (Wladimir J. van der Laan)
|
||||
- #6713 `f6ce59c` SanitizeString: Allow hypen char (MarcoFalke)
|
||||
- #5987 `4899a04` Bugfix: Fix testnet-in-a-box use case (Luke-Jr)
|
||||
- #6733 `b7d78fd` Simple benchmarking framework (Gavin Andresen)
|
||||
- #6854 `a092970` devtools: Add security-check.py (Wladimir J. van der Laan)
|
||||
- #6790 `fa1d252` devtools: add clang-format.py (MarcoFalke)
|
||||
- #7114 `f3d0fdd` util: Don't set strMiscWarning on every exception (Wladimir J. van der Laan)
|
||||
- #7078 `93e0514` uint256::GetCheapHash bigendian compatibility (arowser)
|
||||
- #7094 `34e02e0` Assert now > 0 in GetTime GetTimeMillis GetTimeMicros (Patrick Strateman)
|
||||
|
||||
Credits
|
||||
=======
|
||||
|
||||
Thanks to everyone who directly contributed to this release:
|
||||
|
||||
- accraze
|
||||
- Adam Weiss
|
||||
- Alex Morcos
|
||||
- Alex van der Peet
|
||||
- AlSzacrel
|
||||
- Altoidnerd
|
||||
- Andriy Voskoboinyk
|
||||
- antonio-fr
|
||||
- Arne Brutschy
|
||||
- Ashley Holman
|
||||
- Bob McElrath
|
||||
- Braydon Fuller
|
||||
- BtcDrak
|
||||
- Casey Rodarmor
|
||||
- centaur1
|
||||
- Chris Kleeschulte
|
||||
- Christian Decker
|
||||
- Cory Fields
|
||||
- crowning-
|
||||
- daniel
|
||||
- Daniel Cousens
|
||||
- Daniel Kraft
|
||||
- David Hill
|
||||
- dexX7
|
||||
- Diego Viola
|
||||
- Elias Rohrer
|
||||
- Eric Lombrozo
|
||||
- Erik Mossberg
|
||||
- Esteban Ordano
|
||||
- EthanHeilman
|
||||
- Florian Schmaus
|
||||
- Forrest Voight
|
||||
- Gavin Andresen
|
||||
- Gregory Maxwell
|
||||
- Gregory Sanders / instagibbs
|
||||
- Ian T
|
||||
- Irving Ruan
|
||||
- Jacob Welsh
|
||||
- James O'Beirne
|
||||
- Jeff Garzik
|
||||
- Johnathan Corgan
|
||||
- Jonas Schnelli
|
||||
- Jonathan Cross
|
||||
- João Barbosa
|
||||
- Jorge Timón
|
||||
- Josh Lehan
|
||||
- J Ross Nicoll
|
||||
- kazcw
|
||||
- Kevin Cooper
|
||||
- lpescher
|
||||
- Luke Dashjr
|
||||
- MarcoFalke
|
||||
- Mark Friedenbach
|
||||
- Matt
|
||||
- Matt Bogosian
|
||||
- Matt Corallo
|
||||
- Matt Quinn
|
||||
- Micha
|
||||
- Michael
|
||||
- Michael Ford / fanquake
|
||||
- Midnight Magic
|
||||
- Mitchell Cash
|
||||
- mrbandrews
|
||||
- mruddy
|
||||
- Nick
|
||||
- Patrick Strateman
|
||||
- Paul Georgiou
|
||||
- Paul Rabahy
|
||||
- Pavel Janík / paveljanik
|
||||
- Pavel Vasin
|
||||
- Pavol Rusnak
|
||||
- Peter Josling
|
||||
- Peter Todd
|
||||
- Philip Kaufmann
|
||||
- Pieter Wuille
|
||||
- ptschip
|
||||
- randy-waterhouse
|
||||
- rion
|
||||
- Ross Nicoll
|
||||
- Ryan Havar
|
||||
- Shaul Kfir
|
||||
- Simon Males
|
||||
- Stephen
|
||||
- Suhas Daftuar
|
||||
- tailsjoin
|
||||
- Thomas Kerin
|
||||
- Tom Harding
|
||||
- tulip
|
||||
- unsystemizer
|
||||
- Veres Lajos
|
||||
- Wladimir J. van der Laan
|
||||
- xor-freenet
|
||||
- Zak Wilcox
|
||||
- zathras-crypto
|
||||
|
||||
As well as everyone that helped translating on [Transifex](https://www.transifex.com/projects/p/bitcoin/).
|
||||
|
||||
|
198
doc/release-notes/release-notes-0.12.1.md
Normal file
198
doc/release-notes/release-notes-0.12.1.md
Normal file
@ -0,0 +1,198 @@
|
||||
Bitcoin Core version 0.12.1 is now available from:
|
||||
|
||||
<https://bitcoin.org/bin/bitcoin-core-0.12.1/>
|
||||
|
||||
This is a new minor version release, including the BIP9, BIP68 and BIP112
|
||||
softfork, various bugfixes and updated translations.
|
||||
|
||||
Please report bugs using the issue tracker at github:
|
||||
|
||||
<https://github.com/bitcoin/bitcoin/issues>
|
||||
|
||||
Upgrading and downgrading
|
||||
=========================
|
||||
|
||||
How to Upgrade
|
||||
--------------
|
||||
|
||||
If you are running an older version, shut it down. Wait until it has completely
|
||||
shut down (which might take a few minutes for older versions), then run the
|
||||
installer (on Windows) or just copy over /Applications/Bitcoin-Qt (on Mac) or
|
||||
bitcoind/bitcoin-qt (on Linux).
|
||||
|
||||
Downgrade warning
|
||||
-----------------
|
||||
|
||||
### Downgrade to a version < 0.12.0
|
||||
|
||||
Because release 0.12.0 and later will obfuscate the chainstate on every
|
||||
fresh sync or reindex, the chainstate is not backwards-compatible with
|
||||
pre-0.12 versions of Bitcoin Core or other software.
|
||||
|
||||
If you want to downgrade after you have done a reindex with 0.12.0 or later,
|
||||
you will need to reindex when you first start Bitcoin Core version 0.11 or
|
||||
earlier.
|
||||
|
||||
Notable changes
|
||||
===============
|
||||
|
||||
First version bits BIP9 softfork deployment
|
||||
-------------------------------------------
|
||||
|
||||
This release includes a soft fork deployment to enforce [BIP68][],
|
||||
[BIP112][] and [BIP113][] using the [BIP9][] deployment mechanism.
|
||||
|
||||
The deployment sets the block version number to 0x20000001 between
|
||||
midnight 1st May 2016 and midnight 1st May 2017 to signal readiness for
|
||||
deployment. The version number consists of 0x20000000 to indicate version
|
||||
bits together with setting bit 0 to indicate support for this combined
|
||||
deployment, shown as "csv" in the `getblockchaininfo` RPC call.
|
||||
|
||||
For more information about the soft forking change, please see
|
||||
<https://github.com/bitcoin/bitcoin/pull/7648>
|
||||
|
||||
This specific backport pull-request can be viewed at
|
||||
<https://github.com/bitcoin/bitcoin/pull/7543>
|
||||
|
||||
[BIP9]: https://github.com/bitcoin/bips/blob/master/bip-0009.mediawiki
|
||||
[BIP68]: https://github.com/bitcoin/bips/blob/master/bip-0068.mediawiki
|
||||
[BIP112]: https://github.com/bitcoin/bips/blob/master/bip-0112.mediawiki
|
||||
[BIP113]: https://github.com/bitcoin/bips/blob/master/bip-0113.mediawiki
|
||||
|
||||
BIP68 soft fork to enforce sequence locks for relative locktime
|
||||
---------------------------------------------------------------
|
||||
|
||||
[BIP68][] introduces relative lock-time consensus-enforced semantics of
|
||||
the sequence number field to enable a signed transaction input to remain
|
||||
invalid for a defined period of time after confirmation of its corresponding
|
||||
outpoint.
|
||||
|
||||
For more information about the implementation, see
|
||||
<https://github.com/bitcoin/bitcoin/pull/7184>
|
||||
|
||||
BIP112 soft fork to enforce OP_CHECKSEQUENCEVERIFY
|
||||
--------------------------------------------------
|
||||
|
||||
[BIP112][] redefines the existing OP_NOP3 as OP_CHECKSEQUENCEVERIFY (CSV)
|
||||
for a new opcode in the Bitcoin scripting system that in combination with
|
||||
[BIP68][] allows execution pathways of a script to be restricted based
|
||||
on the age of the output being spent.
|
||||
|
||||
For more information about the implementation, see
|
||||
<https://github.com/bitcoin/bitcoin/pull/7524>
|
||||
|
||||
BIP113 locktime enforcement soft fork
|
||||
-------------------------------------
|
||||
|
||||
Bitcoin Core 0.11.2 previously introduced mempool-only locktime
|
||||
enforcement using GetMedianTimePast(). This release seeks to
|
||||
consensus enforce the rule.
|
||||
|
||||
Bitcoin transactions currently may specify a locktime indicating when
|
||||
they may be added to a valid block. Current consensus rules require
|
||||
that blocks have a block header time greater than the locktime specified
|
||||
in any transaction in that block.
|
||||
|
||||
Miners get to choose what time they use for their header time, with the
|
||||
consensus rule being that no node will accept a block whose time is more
|
||||
than two hours in the future. This creates a incentive for miners to
|
||||
set their header times to future values in order to include locktimed
|
||||
transactions which weren't supposed to be included for up to two more
|
||||
hours.
|
||||
|
||||
The consensus rules also specify that valid blocks may have a header
|
||||
time greater than that of the median of the 11 previous blocks. This
|
||||
GetMedianTimePast() time has a key feature we generally associate with
|
||||
time: it can't go backwards.
|
||||
|
||||
[BIP113][] specifies a soft fork enforced in this release that
|
||||
weakens this perverse incentive for individual miners to use a future
|
||||
time by requiring that valid blocks have a computed GetMedianTimePast()
|
||||
greater than the locktime specified in any transaction in that block.
|
||||
|
||||
Mempool inclusion rules currently require transactions to be valid for
|
||||
immediate inclusion in a block in order to be accepted into the mempool.
|
||||
This release begins applying the BIP113 rule to received transactions,
|
||||
so transaction whose time is greater than the GetMedianTimePast() will
|
||||
no longer be accepted into the mempool.
|
||||
|
||||
**Implication for miners:** you will begin rejecting transactions that
|
||||
would not be valid under BIP113, which will prevent you from producing
|
||||
invalid blocks when BIP113 is enforced on the network. Any
|
||||
transactions which are valid under the current rules but not yet valid
|
||||
under the BIP113 rules will either be mined by other miners or delayed
|
||||
until they are valid under BIP113. Note, however, that time-based
|
||||
locktime transactions are more or less unseen on the network currently.
|
||||
|
||||
**Implication for users:** GetMedianTimePast() always trails behind the
|
||||
current time, so a transaction locktime set to the present time will be
|
||||
rejected by nodes running this release until the median time moves
|
||||
forward. To compensate, subtract one hour (3,600 seconds) from your
|
||||
locktimes to allow those transactions to be included in mempools at
|
||||
approximately the expected time.
|
||||
|
||||
For more information about the implementation, see
|
||||
<https://github.com/bitcoin/bitcoin/pull/6566>
|
||||
|
||||
Miscellaneous
|
||||
-------------
|
||||
|
||||
The p2p alert system is off by default. To turn on, use `-alert` with
|
||||
startup configuration.
|
||||
|
||||
0.12.1 Change log
|
||||
=================
|
||||
|
||||
Detailed release notes follow. This overview includes changes that affect
|
||||
behavior, not code moves, refactors and string updates. For convenience in locating
|
||||
the code changes and accompanying discussion, both the pull request and
|
||||
git merge commit are mentioned.
|
||||
|
||||
### RPC and other APIs
|
||||
- #7739 `7ffc2bd` Add abandoned status to listtransactions (jonasschnelli)
|
||||
|
||||
### Block and transaction handling
|
||||
- #7543 `834aaef` Backport BIP9, BIP68 and BIP112 with softfork (btcdrak)
|
||||
|
||||
### P2P protocol and network code
|
||||
- #7804 `90f1d24` Track block download times per individual block (sipa)
|
||||
- #7832 `4c3a00d` Reduce block timeout to 10 minutes (laanwj)
|
||||
|
||||
### Validation
|
||||
- #7821 `4226aac` init: allow shutdown during 'Activating best chain...' (laanwj)
|
||||
- #7835 `46898e7` Version 2 transactions remain non-standard until CSV activates (sdaftuar)
|
||||
|
||||
### Build system
|
||||
- #7487 `00d57b4` Workaround Travis-side CI issues (luke-jr)
|
||||
- #7606 `a10da9a` No need to set -L and --location for curl (MarcoFalke)
|
||||
- #7614 `ca8f160` Add curl to packages (now needed for depends) (luke-jr)
|
||||
- #7776 `a784675` Remove unnecessary executables from gitian release (laanwj)
|
||||
|
||||
### Wallet
|
||||
- #7715 `19866c1` Fix calculation of balances and available coins. (morcos)
|
||||
|
||||
### Miscellaneous
|
||||
- #7617 `f04f4fd` Fix markdown syntax and line terminate LogPrint (MarcoFalke)
|
||||
- #7747 `4d035bc` added depends cross compile info (accraze)
|
||||
- #7741 `a0cea89` Mark p2p alert system as deprecated (btcdrak)
|
||||
- #7780 `c5f94f6` Disable bad-chain alert (btcdrak)
|
||||
|
||||
Credits
|
||||
=======
|
||||
|
||||
Thanks to everyone who directly contributed to this release:
|
||||
|
||||
- accraze
|
||||
- Alex Morcos
|
||||
- BtcDrak
|
||||
- Jonas Schnelli
|
||||
- Luke Dashjr
|
||||
- MarcoFalke
|
||||
- Mark Friedenbach
|
||||
- NicolasDorier
|
||||
- Pieter Wuille
|
||||
- Suhas Daftuar
|
||||
- Wladimir J. van der Laan
|
||||
|
||||
As well as everyone that helped translating on [Transifex](https://www.transifex.com/projects/p/bitcoin/).
|
||||
|
@ -62,7 +62,7 @@ your dashd's P2P listen port (9999 by default).
|
||||
this option, and this can be a .onion address. Given the above
|
||||
configuration, you can find your onion address in
|
||||
/var/lib/tor/dashcore-service/hostname. Onion addresses are given
|
||||
preference for your node to advertize itself with, for connections
|
||||
preference for your node to advertise itself with, for connections
|
||||
coming from unroutable addresses (such as 127.0.0.1, where the
|
||||
Tor proxy typically runs).
|
||||
|
||||
|
11
qa/README.md
11
qa/README.md
@ -5,6 +5,17 @@ Every pull request to the Dash Core repository is built and run through
|
||||
the regression test suite. You can also run all or only individual
|
||||
tests locally.
|
||||
|
||||
Test dependencies
|
||||
=================
|
||||
Before running the tests, the following must be installed.
|
||||
|
||||
Unix
|
||||
----
|
||||
The python-zmq library is required. On Ubuntu or Debian it can be installed via:
|
||||
```
|
||||
sudo apt-get install python-zmq
|
||||
```
|
||||
|
||||
Running tests
|
||||
=============
|
||||
|
||||
|
@ -32,13 +32,13 @@ import re
|
||||
from tests_config import *
|
||||
|
||||
#If imported values are not defined then set to zero (or disabled)
|
||||
if not vars().has_key('ENABLE_WALLET'):
|
||||
if 'ENABLE_WALLET' not in vars():
|
||||
ENABLE_WALLET=0
|
||||
if not vars().has_key('ENABLE_BITCOIND'):
|
||||
if 'ENABLE_BITCOIND' not in vars():
|
||||
ENABLE_BITCOIND=0
|
||||
if not vars().has_key('ENABLE_UTILS'):
|
||||
if 'ENABLE_UTILS' not in vars():
|
||||
ENABLE_UTILS=0
|
||||
if not vars().has_key('ENABLE_ZMQ'):
|
||||
if 'ENABLE_ZMQ' not in vars():
|
||||
ENABLE_ZMQ=0
|
||||
|
||||
ENABLE_COVERAGE=0
|
||||
@ -67,11 +67,25 @@ if "DASHD" not in os.environ:
|
||||
if "DASHCLI" not in os.environ:
|
||||
os.environ["DASHCLI"] = buildDir + '/src/dash-cli' + EXEEXT
|
||||
|
||||
#Disable Windows tests by default
|
||||
if EXEEXT == ".exe" and "-win" not in opts:
|
||||
print "Win tests currently disabled. Use -win option to enable"
|
||||
# https://github.com/bitcoin/bitcoin/commit/d52802551752140cf41f0d9a225a43e84404d3e9
|
||||
# https://github.com/bitcoin/bitcoin/pull/5677#issuecomment-136646964
|
||||
print "Win tests currently disabled by default. Use -win option to enable"
|
||||
sys.exit(0)
|
||||
|
||||
if not (ENABLE_WALLET == 1 and ENABLE_UTILS == 1 and ENABLE_BITCOIND == 1):
|
||||
print "No rpc tests to run. Wallet, utils, and bitcoind must all be enabled"
|
||||
sys.exit(0)
|
||||
|
||||
# python-zmq may not be installed. Handle this gracefully and with some helpful info
|
||||
if ENABLE_ZMQ:
|
||||
try:
|
||||
import zmq
|
||||
except ImportError as e:
|
||||
print("ERROR: \"import zmq\" failed. Set ENABLE_ZMQ=0 or " \
|
||||
"to run zmq tests, see dependency info in /qa/README.md.")
|
||||
raise e
|
||||
|
||||
#Tests
|
||||
testScripts = [
|
||||
'bip68-112-113-p2p.py',
|
||||
@ -112,6 +126,9 @@ testScripts = [
|
||||
'abandonconflict.py',
|
||||
'p2p-versionbits-warning.py',
|
||||
]
|
||||
if ENABLE_ZMQ:
|
||||
testScripts.append('zmq_test.py')
|
||||
|
||||
testScriptsExt = [
|
||||
'bip9-softforks.py',
|
||||
'bip65-cltv.py',
|
||||
@ -135,11 +152,6 @@ testScriptsExt = [
|
||||
# 'replace-by-fee.py', # RBF is disabled in Dash Core
|
||||
]
|
||||
|
||||
#Enable ZMQ tests
|
||||
if ENABLE_ZMQ == 1:
|
||||
testScripts.append('zmq_test.py')
|
||||
|
||||
|
||||
def runtests():
|
||||
coverage = None
|
||||
|
||||
@ -147,53 +159,49 @@ def runtests():
|
||||
coverage = RPCCoverage()
|
||||
print("Initializing coverage directory at %s\n" % coverage.dir)
|
||||
|
||||
if(ENABLE_WALLET == 1 and ENABLE_UTILS == 1 and ENABLE_BITCOIND == 1):
|
||||
rpcTestDir = buildDir + '/qa/rpc-tests/'
|
||||
run_extended = '-extended' in opts
|
||||
cov_flag = coverage.flag if coverage else ''
|
||||
flags = " --srcdir %s/src %s %s" % (buildDir, cov_flag, passOn)
|
||||
rpcTestDir = buildDir + '/qa/rpc-tests/'
|
||||
run_extended = '-extended' in opts
|
||||
cov_flag = coverage.flag if coverage else ''
|
||||
flags = " --srcdir %s/src %s %s" % (buildDir, cov_flag, passOn)
|
||||
|
||||
#Run Tests
|
||||
for i in range(len(testScripts)):
|
||||
if (len(opts) == 0
|
||||
or (len(opts) == 1 and "-win" in opts )
|
||||
or run_extended
|
||||
or testScripts[i] in opts
|
||||
or re.sub(".py$", "", testScripts[i]) in opts ):
|
||||
#Run Tests
|
||||
for i in range(len(testScripts)):
|
||||
if (len(opts) == 0
|
||||
or (len(opts) == 1 and "-win" in opts )
|
||||
or run_extended
|
||||
or testScripts[i] in opts
|
||||
or re.sub(".py$", "", testScripts[i]) in opts ):
|
||||
|
||||
print("Running testscript %s%s%s ..." % (bold[1], testScripts[i], bold[0]))
|
||||
time0 = time.time()
|
||||
subprocess.check_call(
|
||||
rpcTestDir + testScripts[i] + flags, shell=True)
|
||||
print("Duration: %s s\n" % (int(time.time() - time0)))
|
||||
print("Running testscript %s%s%s ..." % (bold[1], testScripts[i], bold[0]))
|
||||
time0 = time.time()
|
||||
subprocess.check_call(
|
||||
rpcTestDir + testScripts[i] + flags, shell=True)
|
||||
print("Duration: %s s\n" % (int(time.time() - time0)))
|
||||
|
||||
# exit if help is called so we print just one set of
|
||||
# instructions
|
||||
p = re.compile(" -h| --help")
|
||||
if p.match(passOn):
|
||||
sys.exit(0)
|
||||
# exit if help is called so we print just one set of
|
||||
# instructions
|
||||
p = re.compile(" -h| --help")
|
||||
if p.match(passOn):
|
||||
sys.exit(0)
|
||||
|
||||
# Run Extended Tests
|
||||
for i in range(len(testScriptsExt)):
|
||||
if (run_extended or testScriptsExt[i] in opts
|
||||
or re.sub(".py$", "", testScriptsExt[i]) in opts):
|
||||
# Run Extended Tests
|
||||
for i in range(len(testScriptsExt)):
|
||||
if (run_extended or testScriptsExt[i] in opts
|
||||
or re.sub(".py$", "", testScriptsExt[i]) in opts):
|
||||
|
||||
print(
|
||||
"Running 2nd level testscript "
|
||||
+ "%s%s%s ..." % (bold[1], testScriptsExt[i], bold[0]))
|
||||
time0 = time.time()
|
||||
subprocess.check_call(
|
||||
rpcTestDir + testScriptsExt[i] + flags, shell=True)
|
||||
print("Duration: %s s\n" % (int(time.time() - time0)))
|
||||
print(
|
||||
"Running 2nd level testscript "
|
||||
+ "%s%s%s ..." % (bold[1], testScriptsExt[i], bold[0]))
|
||||
time0 = time.time()
|
||||
subprocess.check_call(
|
||||
rpcTestDir + testScriptsExt[i] + flags, shell=True)
|
||||
print("Duration: %s s\n" % (int(time.time() - time0)))
|
||||
|
||||
if coverage:
|
||||
coverage.report_rpc_coverage()
|
||||
if coverage:
|
||||
coverage.report_rpc_coverage()
|
||||
|
||||
print("Cleaning up coverage data")
|
||||
coverage.cleanup()
|
||||
|
||||
else:
|
||||
print "No rpc tests to run. Wallet, utils, and bitcoind must all be enabled"
|
||||
print("Cleaning up coverage data")
|
||||
coverage.cleanup()
|
||||
|
||||
|
||||
class RPCCoverage(object):
|
||||
|
@ -10,8 +10,7 @@ from test_framework.mininode import CTransaction, NetworkThread
|
||||
from test_framework.blocktools import create_coinbase, create_block
|
||||
from test_framework.comptool import TestInstance, TestManager
|
||||
from test_framework.script import CScript, OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP
|
||||
from binascii import unhexlify
|
||||
import cStringIO
|
||||
from io import BytesIO
|
||||
import time
|
||||
|
||||
def cltv_invalidate(tx):
|
||||
@ -60,7 +59,7 @@ class BIP65Test(ComparisonTestFramework):
|
||||
rawtx = node.createrawtransaction(inputs, outputs)
|
||||
signresult = node.signrawtransaction(rawtx)
|
||||
tx = CTransaction()
|
||||
f = cStringIO.StringIO(unhexlify(signresult['hex']))
|
||||
f = BytesIO(hex_str_to_bytes(signresult['hex']))
|
||||
tx.deserialize(f)
|
||||
return tx
|
||||
|
||||
@ -70,7 +69,7 @@ class BIP65Test(ComparisonTestFramework):
|
||||
height = 3 # height of the next block to build
|
||||
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
|
||||
self.nodeaddress = self.nodes[0].getnewaddress()
|
||||
self.last_block_time = time.time()
|
||||
self.last_block_time = int(time.time())
|
||||
|
||||
''' 98 more version 3 blocks '''
|
||||
test_blocks = []
|
||||
|
@ -10,8 +10,7 @@ from test_framework.mininode import ToHex, CTransaction, NetworkThread
|
||||
from test_framework.blocktools import create_coinbase, create_block
|
||||
from test_framework.comptool import TestInstance, TestManager
|
||||
from test_framework.script import *
|
||||
from binascii import unhexlify
|
||||
import cStringIO
|
||||
from io import BytesIO
|
||||
import time
|
||||
|
||||
'''
|
||||
@ -119,7 +118,7 @@ class BIP68_112_113Test(ComparisonTestFramework):
|
||||
outputs = { to_address : amount }
|
||||
rawtx = node.createrawtransaction(inputs, outputs)
|
||||
tx = CTransaction()
|
||||
f = cStringIO.StringIO(unhexlify(rawtx))
|
||||
f = BytesIO(hex_str_to_bytes(rawtx))
|
||||
tx.deserialize(f)
|
||||
return tx
|
||||
|
||||
@ -127,7 +126,7 @@ class BIP68_112_113Test(ComparisonTestFramework):
|
||||
rawtx = ToHex(unsignedtx)
|
||||
signresult = node.signrawtransaction(rawtx)
|
||||
tx = CTransaction()
|
||||
f = cStringIO.StringIO(unhexlify(signresult['hex']))
|
||||
f = BytesIO(hex_str_to_bytes(signresult['hex']))
|
||||
tx.deserialize(f)
|
||||
return tx
|
||||
|
||||
@ -174,7 +173,7 @@ class BIP68_112_113Test(ComparisonTestFramework):
|
||||
tx = self.create_transaction(self.nodes[0], input, self.nodeaddress, Decimal("499.98"))
|
||||
tx.nVersion = txversion
|
||||
signtx = self.sign_transaction(self.nodes[0], tx)
|
||||
signtx.vin[0].scriptSig = CScript([-1, OP_NOP3, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
|
||||
signtx.vin[0].scriptSig = CScript([-1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
|
||||
return signtx
|
||||
|
||||
def create_bip112txs(self, bip112inputs, varyOP_CSV, txversion, locktime_delta = 0):
|
||||
@ -197,9 +196,9 @@ class BIP68_112_113Test(ComparisonTestFramework):
|
||||
tx.nVersion = txversion
|
||||
signtx = self.sign_transaction(self.nodes[0], tx)
|
||||
if (varyOP_CSV):
|
||||
signtx.vin[0].scriptSig = CScript([relative_locktimes[b31][b25][b22][b18], OP_NOP3, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
|
||||
signtx.vin[0].scriptSig = CScript([relative_locktimes[b31][b25][b22][b18], OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
|
||||
else:
|
||||
signtx.vin[0].scriptSig = CScript([base_relative_locktime, OP_NOP3, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
|
||||
signtx.vin[0].scriptSig = CScript([base_relative_locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
|
||||
b18txs.append(signtx)
|
||||
b22txs.append(b18txs)
|
||||
b25txs.append(b22txs)
|
||||
|
@ -13,7 +13,6 @@ from test_framework.script import *
|
||||
from test_framework.mininode import *
|
||||
from test_framework.blocktools import *
|
||||
|
||||
COIN = 100000000
|
||||
SEQUENCE_LOCKTIME_DISABLE_FLAG = (1<<31)
|
||||
SEQUENCE_LOCKTIME_TYPE_FLAG = (1<<22) # this means use time (0 means height)
|
||||
SEQUENCE_LOCKTIME_GRANULARITY = 9 # this is a bit-shift
|
||||
|
@ -9,9 +9,8 @@ from test_framework.util import *
|
||||
from test_framework.mininode import CTransaction, NetworkThread
|
||||
from test_framework.blocktools import create_coinbase, create_block
|
||||
from test_framework.comptool import TestInstance, TestManager
|
||||
from test_framework.script import CScript, OP_1NEGATE, OP_NOP3, OP_DROP
|
||||
from binascii import hexlify, unhexlify
|
||||
import cStringIO
|
||||
from test_framework.script import CScript, OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP
|
||||
from io import BytesIO
|
||||
import time
|
||||
import itertools
|
||||
|
||||
@ -30,7 +29,6 @@ test that enforcement has triggered
|
||||
'''
|
||||
|
||||
|
||||
|
||||
class BIP9SoftForksTest(ComparisonTestFramework):
|
||||
|
||||
def __init__(self):
|
||||
@ -53,15 +51,15 @@ class BIP9SoftForksTest(ComparisonTestFramework):
|
||||
outputs = { to_address : amount }
|
||||
rawtx = node.createrawtransaction(inputs, outputs)
|
||||
tx = CTransaction()
|
||||
f = cStringIO.StringIO(unhexlify(rawtx))
|
||||
f = BytesIO(hex_str_to_bytes(rawtx))
|
||||
tx.deserialize(f)
|
||||
tx.nVersion = 2
|
||||
return tx
|
||||
|
||||
def sign_transaction(self, node, tx):
|
||||
signresult = node.signrawtransaction(hexlify(tx.serialize()))
|
||||
signresult = node.signrawtransaction(bytes_to_hex_str(tx.serialize()))
|
||||
tx = CTransaction()
|
||||
f = cStringIO.StringIO(unhexlify(signresult['hex']))
|
||||
f = BytesIO(hex_str_to_bytes(signresult['hex']))
|
||||
tx.deserialize(f)
|
||||
return tx
|
||||
|
||||
@ -184,7 +182,6 @@ class BIP9SoftForksTest(ComparisonTestFramework):
|
||||
NetworkThread().start() # Start up network handling in another thread
|
||||
|
||||
|
||||
|
||||
def get_tests(self):
|
||||
for test in itertools.chain(
|
||||
self.test_BIP('csv', 536870913, self.sequence_lock_invalidate, self.donothing),
|
||||
@ -200,7 +197,7 @@ class BIP9SoftForksTest(ComparisonTestFramework):
|
||||
'''Modify the signature in vin 0 of the tx to fail CSV
|
||||
Prepends -1 CSV DROP in the scriptSig itself.
|
||||
'''
|
||||
tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_NOP3, OP_DROP] +
|
||||
tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP] +
|
||||
list(CScript(tx.vin[0].scriptSig)))
|
||||
|
||||
def sequence_lock_invalidate(self, tx):
|
||||
@ -217,4 +214,4 @@ class BIP9SoftForksTest(ComparisonTestFramework):
|
||||
tx.nLockTime = self.last_block_time
|
||||
|
||||
if __name__ == '__main__':
|
||||
BIP9SoftForksTest().main()
|
||||
BIP9SoftForksTest().main()
|
||||
|
@ -10,8 +10,7 @@ from test_framework.mininode import CTransaction, NetworkThread
|
||||
from test_framework.blocktools import create_coinbase, create_block
|
||||
from test_framework.comptool import TestInstance, TestManager
|
||||
from test_framework.script import CScript
|
||||
from binascii import unhexlify
|
||||
import cStringIO
|
||||
from io import BytesIO
|
||||
import time
|
||||
|
||||
# A canonical signature consists of:
|
||||
@ -25,7 +24,7 @@ def unDERify(tx):
|
||||
newscript = []
|
||||
for i in scriptSig:
|
||||
if (len(newscript) == 0):
|
||||
newscript.append(i[0:-1] + '\0' + i[-1])
|
||||
newscript.append(i[0:-1] + b'\0' + i[-1:])
|
||||
else:
|
||||
newscript.append(i)
|
||||
tx.vin[0].scriptSig = CScript(newscript)
|
||||
@ -68,7 +67,7 @@ class BIP66Test(ComparisonTestFramework):
|
||||
rawtx = node.createrawtransaction(inputs, outputs)
|
||||
signresult = node.signrawtransaction(rawtx)
|
||||
tx = CTransaction()
|
||||
f = cStringIO.StringIO(unhexlify(signresult['hex']))
|
||||
f = BytesIO(hex_str_to_bytes(signresult['hex']))
|
||||
tx.deserialize(f)
|
||||
return tx
|
||||
|
||||
@ -78,7 +77,7 @@ class BIP66Test(ComparisonTestFramework):
|
||||
height = 3 # height of the next block to build
|
||||
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
|
||||
self.nodeaddress = self.nodes[0].getnewaddress()
|
||||
self.last_block_time = time.time()
|
||||
self.last_block_time = int(time.time())
|
||||
|
||||
''' 98 more version 2 blocks '''
|
||||
test_blocks = []
|
||||
|
@ -28,6 +28,7 @@ class BlockchainTest(BitcoinTestFramework):
|
||||
Test blockchain-related RPC calls:
|
||||
|
||||
- gettxoutsetinfo
|
||||
- verifychain
|
||||
|
||||
"""
|
||||
|
||||
@ -44,6 +45,7 @@ class BlockchainTest(BitcoinTestFramework):
|
||||
def run_test(self):
|
||||
self._test_gettxoutsetinfo()
|
||||
self._test_getblockheader()
|
||||
self.nodes[0].verifychain(4, 0)
|
||||
|
||||
def _test_gettxoutsetinfo(self):
|
||||
node = self.nodes[0]
|
||||
|
@ -6,8 +6,7 @@
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.mininode import *
|
||||
from binascii import hexlify, unhexlify
|
||||
from cStringIO import StringIO
|
||||
from io import BytesIO
|
||||
|
||||
class DecodeScriptTest(BitcoinTestFramework):
|
||||
"""Tests decoding scripts via RPC command "decodescript"."""
|
||||
@ -131,7 +130,7 @@ class DecodeScriptTest(BitcoinTestFramework):
|
||||
assert_equal('OP_DUP OP_HASH160 dc863734a218bfe83ef770ee9d41a27f824a6e56 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
|
||||
assert_equal('OP_HASH160 2a5edea39971049a540474c6a99edf0aa4074c58 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])
|
||||
txSave = CTransaction()
|
||||
txSave.deserialize(StringIO(unhexlify(tx)))
|
||||
txSave.deserialize(BytesIO(hex_str_to_bytes(tx)))
|
||||
|
||||
# make sure that a specifically crafted op_return value will not pass all the IsDERSignature checks and then get decoded as a sighash type
|
||||
tx = '01000000015ded05872fdbda629c7d3d02b194763ce3b9b1535ea884e3c8e765d42e316724020000006b48304502204c10d4064885c42638cbff3585915b322de33762598321145ba033fc796971e2022100bb153ad3baa8b757e30a2175bd32852d2e1cb9080f84d7e32fcdfd667934ef1b012103163c0ff73511ea1743fb5b98384a2ff09dd06949488028fd819f4d83f56264efffffffff0200000000000000000b6a0930060201000201000180380100000000001976a9141cabd296e753837c086da7a45a6c2fe0d49d7b7b88ac00000000'
|
||||
@ -147,7 +146,7 @@ class DecodeScriptTest(BitcoinTestFramework):
|
||||
# some more full transaction tests of varying specific scriptSigs. used instead of
|
||||
# tests in decodescript_script_sig because the decodescript RPC is specifically
|
||||
# for working on scriptPubKeys (argh!).
|
||||
push_signature = hexlify(txSave.vin[0].scriptSig)[2:(0x48*2+4)]
|
||||
push_signature = bytes_to_hex_str(txSave.vin[0].scriptSig)[2:(0x48*2+4)]
|
||||
signature = push_signature[2:]
|
||||
der_signature = signature[:-2]
|
||||
signature_sighash_decoded = der_signature + '[ALL]'
|
||||
@ -156,25 +155,24 @@ class DecodeScriptTest(BitcoinTestFramework):
|
||||
signature_2_sighash_decoded = der_signature + '[NONE|ANYONECANPAY]'
|
||||
|
||||
# 1) P2PK scriptSig
|
||||
txSave.vin[0].scriptSig = unhexlify(push_signature)
|
||||
rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
|
||||
txSave.vin[0].scriptSig = hex_str_to_bytes(push_signature)
|
||||
rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
|
||||
assert_equal(signature_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
|
||||
|
||||
# make sure that the sighash decodes come out correctly for a more complex / lesser used case.
|
||||
txSave.vin[0].scriptSig = unhexlify(push_signature_2)
|
||||
rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
|
||||
txSave.vin[0].scriptSig = hex_str_to_bytes(push_signature_2)
|
||||
rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
|
||||
assert_equal(signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
|
||||
|
||||
# 2) multisig scriptSig
|
||||
txSave.vin[0].scriptSig = unhexlify('00' + push_signature + push_signature_2)
|
||||
rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
|
||||
txSave.vin[0].scriptSig = hex_str_to_bytes('00' + push_signature + push_signature_2)
|
||||
rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
|
||||
assert_equal('0 ' + signature_sighash_decoded + ' ' + signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
|
||||
|
||||
# 3) test a scriptSig that contains more than push operations.
|
||||
# in fact, it contains an OP_RETURN with data specially crafted to cause improper decode if the code does not catch it.
|
||||
txSave.vin[0].scriptSig = unhexlify('6a143011020701010101010101020601010101010101')
|
||||
rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
|
||||
print(hexlify('636174'))
|
||||
txSave.vin[0].scriptSig = hex_str_to_bytes('6a143011020701010101010101020601010101010101')
|
||||
rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
|
||||
assert_equal('OP_RETURN 3011020701010101010101020601010101010101', rpc_result['vin'][0]['scriptSig']['asm'])
|
||||
|
||||
def run_test(self):
|
||||
|
@ -48,7 +48,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
|
||||
watchonly_address = self.nodes[0].getnewaddress()
|
||||
watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
|
||||
watchonly_amount = 2000
|
||||
watchonly_amount = Decimal(2000)
|
||||
self.nodes[3].importpubkey(watchonly_pubkey, "", True)
|
||||
watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
|
||||
self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10)
|
||||
@ -71,7 +71,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
|
||||
fee = rawtxfund['fee']
|
||||
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
|
||||
assert_equal(len(dec_tx['vin']) > 0, True) #test if we have enought inputs
|
||||
assert(len(dec_tx['vin']) > 0) #test if we have enought inputs
|
||||
|
||||
##############################
|
||||
# simple test with two coins #
|
||||
@ -84,7 +84,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
|
||||
fee = rawtxfund['fee']
|
||||
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
|
||||
assert_equal(len(dec_tx['vin']) > 0, True) #test if we have enough inputs
|
||||
assert(len(dec_tx['vin']) > 0) #test if we have enough inputs
|
||||
|
||||
##############################
|
||||
# simple test with two coins #
|
||||
@ -97,7 +97,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
|
||||
fee = rawtxfund['fee']
|
||||
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
|
||||
assert_equal(len(dec_tx['vin']) > 0, True)
|
||||
assert(len(dec_tx['vin']) > 0)
|
||||
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
|
||||
|
||||
|
||||
@ -116,7 +116,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
for out in dec_tx['vout']:
|
||||
totalOut += out['value']
|
||||
|
||||
assert_equal(len(dec_tx['vin']) > 0, True)
|
||||
assert(len(dec_tx['vin']) > 0)
|
||||
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
|
||||
|
||||
|
||||
@ -130,7 +130,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
utx = aUtx
|
||||
break
|
||||
|
||||
assert_equal(utx!=False, True)
|
||||
assert(utx!=False)
|
||||
|
||||
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
|
||||
outputs = { self.nodes[0].getnewaddress() : 10 }
|
||||
@ -148,7 +148,6 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
|
||||
|
||||
|
||||
|
||||
#####################################################################
|
||||
# test a fundrawtransaction with which will not get a change output #
|
||||
#####################################################################
|
||||
@ -159,7 +158,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
utx = aUtx
|
||||
break
|
||||
|
||||
assert_equal(utx!=False, True)
|
||||
assert(utx!=False)
|
||||
|
||||
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
|
||||
outputs = { self.nodes[0].getnewaddress() : Decimal(50) - fee - feeTolerance }
|
||||
@ -178,7 +177,6 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
|
||||
|
||||
|
||||
|
||||
#########################################################################
|
||||
# test a fundrawtransaction with a VIN smaller than the required amount #
|
||||
#########################################################################
|
||||
@ -189,7 +187,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
utx = aUtx
|
||||
break
|
||||
|
||||
assert_equal(utx!=False, True)
|
||||
assert(utx!=False)
|
||||
|
||||
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
|
||||
outputs = { self.nodes[0].getnewaddress() : 10 }
|
||||
@ -209,7 +207,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
matchingOuts = 0
|
||||
for i, out in enumerate(dec_tx['vout']):
|
||||
totalOut += out['value']
|
||||
if outputs.has_key(out['scriptPubKey']['addresses'][0]):
|
||||
if out['scriptPubKey']['addresses'][0] in outputs:
|
||||
matchingOuts+=1
|
||||
else:
|
||||
assert_equal(i, rawtxfund['changepos'])
|
||||
@ -234,7 +232,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
utx2 = aUtx
|
||||
|
||||
|
||||
assert_equal(utx!=False, True)
|
||||
assert(utx!=False)
|
||||
|
||||
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
|
||||
outputs = { self.nodes[0].getnewaddress() : 60 }
|
||||
@ -249,7 +247,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
matchingOuts = 0
|
||||
for out in dec_tx['vout']:
|
||||
totalOut += out['value']
|
||||
if outputs.has_key(out['scriptPubKey']['addresses'][0]):
|
||||
if out['scriptPubKey']['addresses'][0] in outputs:
|
||||
matchingOuts+=1
|
||||
|
||||
assert_equal(matchingOuts, 1)
|
||||
@ -276,7 +274,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
utx2 = aUtx
|
||||
|
||||
|
||||
assert_equal(utx!=False, True)
|
||||
assert(utx!=False)
|
||||
|
||||
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
|
||||
outputs = { self.nodes[0].getnewaddress() : 60, self.nodes[0].getnewaddress() : 10 }
|
||||
@ -291,7 +289,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
matchingOuts = 0
|
||||
for out in dec_tx['vout']:
|
||||
totalOut += out['value']
|
||||
if outputs.has_key(out['scriptPubKey']['addresses'][0]):
|
||||
if out['scriptPubKey']['addresses'][0] in outputs:
|
||||
matchingOuts+=1
|
||||
|
||||
assert_equal(matchingOuts, 2)
|
||||
@ -306,14 +304,11 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
|
||||
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
|
||||
|
||||
errorString = ""
|
||||
try:
|
||||
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
|
||||
assert("Insufficient" in errorString)
|
||||
|
||||
raise AssertionError("Spent more than available")
|
||||
except JSONRPCException as e:
|
||||
assert("Insufficient" in e.error['message'])
|
||||
|
||||
|
||||
############################################################
|
||||
@ -462,12 +457,11 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
self.is_network_split=False
|
||||
self.sync_all()
|
||||
|
||||
error = False
|
||||
try:
|
||||
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 12)
|
||||
except:
|
||||
error = True
|
||||
assert(error)
|
||||
raise AssertionError("Wallet unlocked without passphrase")
|
||||
except JSONRPCException as e:
|
||||
assert('walletpassphrase' in e.error['message'])
|
||||
|
||||
oldBalance = self.nodes[0].getbalance()
|
||||
|
||||
@ -488,7 +482,6 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
assert_equal(oldBalance+Decimal('511.0000000'), self.nodes[0].getbalance())
|
||||
|
||||
|
||||
|
||||
###############################################
|
||||
# multiple (~19) inputs tx test | Compare fee #
|
||||
###############################################
|
||||
@ -580,7 +573,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
assert_equal(len(res_dec["vin"]), 1)
|
||||
assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
|
||||
|
||||
assert_equal("fee" in result.keys(), True)
|
||||
assert("fee" in result.keys())
|
||||
assert_greater_than(result["changepos"], -1)
|
||||
|
||||
###############################################################
|
||||
|
@ -6,28 +6,6 @@
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
|
||||
|
||||
def check_array_result(object_array, to_match, expected):
|
||||
"""
|
||||
Pass in array of JSON objects, a dictionary with key/value pairs
|
||||
to match against, and another dictionary with expected key/value
|
||||
pairs.
|
||||
"""
|
||||
num_matched = 0
|
||||
for item in object_array:
|
||||
all_match = True
|
||||
for key,value in to_match.items():
|
||||
if item[key] != value:
|
||||
all_match = False
|
||||
if not all_match:
|
||||
continue
|
||||
for key,value in expected.items():
|
||||
if item[key] != value:
|
||||
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
|
||||
num_matched = num_matched+1
|
||||
if num_matched == 0:
|
||||
raise AssertionError("No objects matched %s"%(str(to_match)))
|
||||
|
||||
import threading
|
||||
|
||||
class LongpollThread(threading.Thread):
|
||||
|
@ -10,28 +10,6 @@ from binascii import a2b_hex, b2a_hex
|
||||
from hashlib import sha256
|
||||
from struct import pack
|
||||
|
||||
|
||||
def check_array_result(object_array, to_match, expected):
|
||||
"""
|
||||
Pass in array of JSON objects, a dictionary with key/value pairs
|
||||
to match against, and another dictionary with expected key/value
|
||||
pairs.
|
||||
"""
|
||||
num_matched = 0
|
||||
for item in object_array:
|
||||
all_match = True
|
||||
for key,value in to_match.items():
|
||||
if item[key] != value:
|
||||
all_match = False
|
||||
if not all_match:
|
||||
continue
|
||||
for key,value in expected.items():
|
||||
if item[key] != value:
|
||||
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
|
||||
num_matched = num_matched+1
|
||||
if num_matched == 0:
|
||||
raise AssertionError("No objects matched %s"%(str(to_match)))
|
||||
|
||||
def b2x(b):
|
||||
return b2a_hex(b).decode('ascii')
|
||||
|
||||
@ -120,10 +98,7 @@ class GetBlockTemplateProposalTest(BitcoinTestFramework):
|
||||
|
||||
# Test 3: Truncated final tx
|
||||
lastbyte = txlist[-1].pop()
|
||||
try:
|
||||
assert_template(node, tmpl, txlist, 'n/a')
|
||||
except JSONRPCException:
|
||||
pass # Expected
|
||||
assert_raises(JSONRPCException, assert_template, node, tmpl, txlist, 'n/a')
|
||||
txlist[-1].append(lastbyte)
|
||||
|
||||
# Test 4: Add an invalid tx to the end (duplicate of gen tx)
|
||||
@ -133,7 +108,7 @@ class GetBlockTemplateProposalTest(BitcoinTestFramework):
|
||||
|
||||
# Test 5: Add an invalid tx to the end (non-duplicate)
|
||||
txlist.append(bytearray(txlist[0]))
|
||||
txlist[-1][4+1] = b'\xff'
|
||||
txlist[-1][4+1] = 0xff
|
||||
assert_template(node, tmpl, txlist, 'bad-txns-inputs-missingorspent')
|
||||
txlist.pop()
|
||||
|
||||
@ -144,10 +119,7 @@ class GetBlockTemplateProposalTest(BitcoinTestFramework):
|
||||
|
||||
# Test 7: Bad tx count
|
||||
txlist.append(b'')
|
||||
try:
|
||||
assert_template(node, tmpl, txlist, 'n/a')
|
||||
except JSONRPCException:
|
||||
pass # Expected
|
||||
assert_raises(JSONRPCException, assert_template, node, tmpl, txlist, 'n/a')
|
||||
txlist.pop()
|
||||
|
||||
# Test 8: Bad bits
|
||||
|
@ -9,7 +9,6 @@
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
import base64
|
||||
|
||||
try:
|
||||
import http.client as httplib
|
||||
@ -31,71 +30,71 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
||||
#################################################
|
||||
url = urlparse.urlparse(self.nodes[0].url)
|
||||
authpair = url.username + ':' + url.password
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpair)}
|
||||
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
|
||||
|
||||
conn = httplib.HTTPConnection(url.hostname, url.port)
|
||||
conn.connect()
|
||||
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
|
||||
out1 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out1, True)
|
||||
assert_equal(conn.sock!=None, True) #according to http/1.1 connection must still be open!
|
||||
assert(b'"error":null' in out1)
|
||||
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
|
||||
|
||||
#send 2nd request without closing connection
|
||||
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
|
||||
out2 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out1, True) #must also response with a correct json-rpc message
|
||||
assert_equal(conn.sock!=None, True) #according to http/1.1 connection must still be open!
|
||||
out1 = conn.getresponse().read()
|
||||
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
|
||||
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
|
||||
conn.close()
|
||||
|
||||
#same should be if we add keep-alive because this should be the std. behaviour
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpair), "Connection": "keep-alive"}
|
||||
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"}
|
||||
|
||||
conn = httplib.HTTPConnection(url.hostname, url.port)
|
||||
conn.connect()
|
||||
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
|
||||
out1 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out1, True)
|
||||
assert_equal(conn.sock!=None, True) #according to http/1.1 connection must still be open!
|
||||
assert(b'"error":null' in out1)
|
||||
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
|
||||
|
||||
#send 2nd request without closing connection
|
||||
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
|
||||
out2 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out1, True) #must also response with a correct json-rpc message
|
||||
assert_equal(conn.sock!=None, True) #according to http/1.1 connection must still be open!
|
||||
out1 = conn.getresponse().read()
|
||||
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
|
||||
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
|
||||
conn.close()
|
||||
|
||||
#now do the same with "Connection: close"
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpair), "Connection":"close"}
|
||||
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"}
|
||||
|
||||
conn = httplib.HTTPConnection(url.hostname, url.port)
|
||||
conn.connect()
|
||||
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
|
||||
out1 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out1, True)
|
||||
assert_equal(conn.sock!=None, False) #now the connection must be closed after the response
|
||||
assert(b'"error":null' in out1)
|
||||
assert(conn.sock==None) #now the connection must be closed after the response
|
||||
|
||||
#node1 (2nd node) is running with disabled keep-alive option
|
||||
urlNode1 = urlparse.urlparse(self.nodes[1].url)
|
||||
authpair = urlNode1.username + ':' + urlNode1.password
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpair)}
|
||||
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
|
||||
|
||||
conn = httplib.HTTPConnection(urlNode1.hostname, urlNode1.port)
|
||||
conn.connect()
|
||||
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
|
||||
out1 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out1, True)
|
||||
assert(b'"error":null' in out1)
|
||||
|
||||
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
|
||||
urlNode2 = urlparse.urlparse(self.nodes[2].url)
|
||||
authpair = urlNode2.username + ':' + urlNode2.password
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpair)}
|
||||
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
|
||||
|
||||
conn = httplib.HTTPConnection(urlNode2.hostname, urlNode2.port)
|
||||
conn.connect()
|
||||
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
|
||||
out1 = conn.getresponse().read()
|
||||
assert_equal('"error":null' in out1, True)
|
||||
assert_equal(conn.sock!=None, True) #connection must be closed because bitcoind should use keep-alive by default
|
||||
assert(b'"error":null' in out1)
|
||||
assert(conn.sock!=None) #connection must be closed because bitcoind should use keep-alive by default
|
||||
|
||||
# Check excessive request size
|
||||
conn = httplib.HTTPConnection(urlNode2.hostname, urlNode2.port)
|
||||
|
@ -78,9 +78,9 @@ class InvalidBlockRequestTest(ComparisonTestFramework):
|
||||
block2 = create_block(self.tip, create_coinbase(height), self.block_time)
|
||||
self.block_time += 1
|
||||
|
||||
# chr(81) is OP_TRUE
|
||||
tx1 = create_transaction(self.block1.vtx[0], 0, chr(81), 50*100000000)
|
||||
tx2 = create_transaction(tx1, 0, chr(81), 50*100000000)
|
||||
# b'0x51' is OP_TRUE
|
||||
tx1 = create_transaction(self.block1.vtx[0], 0, b'\x51', 50 * COIN)
|
||||
tx2 = create_transaction(tx1, 0, b'\x51', 50 * COIN)
|
||||
|
||||
block2.vtx.extend([tx1, tx2])
|
||||
block2.hashMerkleRoot = block2.calc_merkle_root()
|
||||
@ -96,7 +96,7 @@ class InvalidBlockRequestTest(ComparisonTestFramework):
|
||||
assert(block2_orig.vtx != block2.vtx)
|
||||
|
||||
self.tip = block2.sha256
|
||||
yield TestInstance([[block2, RejectResult(16,'bad-txns-duplicate')], [block2_orig, True]])
|
||||
yield TestInstance([[block2, RejectResult(16, b'bad-txns-duplicate')], [block2_orig, True]])
|
||||
height += 1
|
||||
|
||||
'''
|
||||
@ -104,14 +104,14 @@ class InvalidBlockRequestTest(ComparisonTestFramework):
|
||||
'''
|
||||
block3 = create_block(self.tip, create_coinbase(height), self.block_time)
|
||||
self.block_time += 1
|
||||
block3.vtx[0].vout[0].nValue = 1000*100000000 # Too high!
|
||||
block3.vtx[0].vout[0].nValue = 1000 * COIN # Too high!
|
||||
block3.vtx[0].sha256=None
|
||||
block3.vtx[0].calc_sha256()
|
||||
block3.hashMerkleRoot = block3.calc_merkle_root()
|
||||
block3.rehash()
|
||||
block3.solve()
|
||||
|
||||
yield TestInstance([[block3, RejectResult(16,'bad-cb-amount')]])
|
||||
yield TestInstance([[block3, RejectResult(16, b'bad-cb-amount')]])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -61,10 +61,10 @@ class InvalidTxRequestTest(ComparisonTestFramework):
|
||||
height += 1
|
||||
yield test
|
||||
|
||||
# chr(100) is OP_NOTIF
|
||||
# b'\x64' is OP_NOTIF
|
||||
# Transaction will be rejected with code 16 (REJECT_INVALID)
|
||||
tx1 = create_transaction(self.block1.vtx[0], 0, chr(100), 50*100000000)
|
||||
yield TestInstance([[tx1, RejectResult(16, 'mandatory-script-verify-flag-failed')]])
|
||||
tx1 = create_transaction(self.block1.vtx[0], 0, b'\x64', 50 * COIN)
|
||||
yield TestInstance([[tx1, RejectResult(16, b'mandatory-script-verify-flag-failed')]])
|
||||
|
||||
# TODO: test further transactions...
|
||||
|
||||
|
@ -10,28 +10,6 @@
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
|
||||
|
||||
def check_array_result(object_array, to_match, expected):
|
||||
"""
|
||||
Pass in array of JSON objects, a dictionary with key/value pairs
|
||||
to match against, and another dictionary with expected key/value
|
||||
pairs.
|
||||
"""
|
||||
num_matched = 0
|
||||
for item in object_array:
|
||||
all_match = True
|
||||
for key,value in to_match.items():
|
||||
if item[key] != value:
|
||||
all_match = False
|
||||
if not all_match:
|
||||
continue
|
||||
for key,value in expected.items():
|
||||
if item[key] != value:
|
||||
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
|
||||
num_matched = num_matched+1
|
||||
if num_matched == 0:
|
||||
raise AssertionError("No objects matched %s"%(str(to_match)))
|
||||
|
||||
class KeyPoolTest(BitcoinTestFramework):
|
||||
|
||||
def run_test(self):
|
||||
@ -46,7 +24,7 @@ class KeyPoolTest(BitcoinTestFramework):
|
||||
try:
|
||||
addr = nodes[0].getnewaddress()
|
||||
raise AssertionError('Keypool should be exhausted after one address')
|
||||
except JSONRPCException,e:
|
||||
except JSONRPCException as e:
|
||||
assert(e.error['code']==-12)
|
||||
|
||||
# put three new keys in the keypool
|
||||
@ -66,13 +44,15 @@ class KeyPoolTest(BitcoinTestFramework):
|
||||
try:
|
||||
addr = nodes[0].getrawchangeaddress()
|
||||
raise AssertionError('Keypool should be exhausted after three addresses')
|
||||
except JSONRPCException,e:
|
||||
except JSONRPCException as e:
|
||||
assert(e.error['code']==-12)
|
||||
|
||||
# refill keypool with three new addresses
|
||||
nodes[0].walletpassphrase('test', 12000)
|
||||
nodes[0].walletpassphrase('test', 1)
|
||||
nodes[0].keypoolrefill(3)
|
||||
nodes[0].walletlock()
|
||||
# test walletpassphrase timeout
|
||||
time.sleep(1.1)
|
||||
assert_equal(nodes[0].getwalletinfo()["unlocked_until"], 0)
|
||||
|
||||
# drain them by mining
|
||||
nodes[0].generate(1)
|
||||
@ -82,7 +62,7 @@ class KeyPoolTest(BitcoinTestFramework):
|
||||
try:
|
||||
nodes[0].generate(1)
|
||||
raise AssertionError('Keypool should be exhausted after three addesses')
|
||||
except JSONRPCException,e:
|
||||
except JSONRPCException as e:
|
||||
assert(e.error['code']==-12)
|
||||
|
||||
def setup_chain(self):
|
||||
|
@ -7,65 +7,48 @@
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.mininode import CTransaction
|
||||
import cStringIO
|
||||
import binascii
|
||||
from test_framework.mininode import CTransaction, COIN
|
||||
from io import BytesIO
|
||||
|
||||
def txFromHex(hexstring):
|
||||
tx = CTransaction()
|
||||
f = cStringIO.StringIO(binascii.unhexlify(hexstring))
|
||||
f = BytesIO(hex_str_to_bytes(hexstring))
|
||||
tx.deserialize(f)
|
||||
return tx
|
||||
|
||||
def check_array_result(object_array, to_match, expected):
|
||||
"""
|
||||
Pass in array of JSON objects, a dictionary with key/value pairs
|
||||
to match against, and another dictionary with expected key/value
|
||||
pairs.
|
||||
"""
|
||||
num_matched = 0
|
||||
for item in object_array:
|
||||
all_match = True
|
||||
for key,value in to_match.items():
|
||||
if item[key] != value:
|
||||
all_match = False
|
||||
if not all_match:
|
||||
continue
|
||||
for key,value in expected.items():
|
||||
if item[key] != value:
|
||||
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
|
||||
num_matched = num_matched+1
|
||||
if num_matched == 0:
|
||||
raise AssertionError("No objects matched %s"%(str(to_match)))
|
||||
|
||||
class ListTransactionsTest(BitcoinTestFramework):
|
||||
|
||||
def setup_nodes(self):
|
||||
#This test requires mocktime
|
||||
enable_mocktime()
|
||||
return start_nodes(4, self.options.tmpdir)
|
||||
|
||||
def run_test(self):
|
||||
# Simple send, 0 to 1:
|
||||
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
|
||||
self.sync_all()
|
||||
check_array_result(self.nodes[0].listtransactions(),
|
||||
assert_array_result(self.nodes[0].listtransactions(),
|
||||
{"txid":txid},
|
||||
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":0})
|
||||
check_array_result(self.nodes[1].listtransactions(),
|
||||
assert_array_result(self.nodes[1].listtransactions(),
|
||||
{"txid":txid},
|
||||
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":0})
|
||||
# mine a block, confirmations should change:
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
check_array_result(self.nodes[0].listtransactions(),
|
||||
assert_array_result(self.nodes[0].listtransactions(),
|
||||
{"txid":txid},
|
||||
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":1})
|
||||
check_array_result(self.nodes[1].listtransactions(),
|
||||
assert_array_result(self.nodes[1].listtransactions(),
|
||||
{"txid":txid},
|
||||
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":1})
|
||||
|
||||
# send-to-self:
|
||||
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
|
||||
check_array_result(self.nodes[0].listtransactions(),
|
||||
assert_array_result(self.nodes[0].listtransactions(),
|
||||
{"txid":txid, "category":"send"},
|
||||
{"amount":Decimal("-0.2")})
|
||||
check_array_result(self.nodes[0].listtransactions(),
|
||||
assert_array_result(self.nodes[0].listtransactions(),
|
||||
{"txid":txid, "category":"receive"},
|
||||
{"amount":Decimal("0.2")})
|
||||
|
||||
@ -76,28 +59,28 @@ class ListTransactionsTest(BitcoinTestFramework):
|
||||
self.nodes[1].getaccountaddress("toself") : 0.44 }
|
||||
txid = self.nodes[1].sendmany("", send_to)
|
||||
self.sync_all()
|
||||
check_array_result(self.nodes[1].listtransactions(),
|
||||
assert_array_result(self.nodes[1].listtransactions(),
|
||||
{"category":"send","amount":Decimal("-0.11")},
|
||||
{"txid":txid} )
|
||||
check_array_result(self.nodes[0].listtransactions(),
|
||||
assert_array_result(self.nodes[0].listtransactions(),
|
||||
{"category":"receive","amount":Decimal("0.11")},
|
||||
{"txid":txid} )
|
||||
check_array_result(self.nodes[1].listtransactions(),
|
||||
assert_array_result(self.nodes[1].listtransactions(),
|
||||
{"category":"send","amount":Decimal("-0.22")},
|
||||
{"txid":txid} )
|
||||
check_array_result(self.nodes[1].listtransactions(),
|
||||
assert_array_result(self.nodes[1].listtransactions(),
|
||||
{"category":"receive","amount":Decimal("0.22")},
|
||||
{"txid":txid} )
|
||||
check_array_result(self.nodes[1].listtransactions(),
|
||||
assert_array_result(self.nodes[1].listtransactions(),
|
||||
{"category":"send","amount":Decimal("-0.33")},
|
||||
{"txid":txid} )
|
||||
check_array_result(self.nodes[0].listtransactions(),
|
||||
assert_array_result(self.nodes[0].listtransactions(),
|
||||
{"category":"receive","amount":Decimal("0.33")},
|
||||
{"txid":txid, "account" : "from1"} )
|
||||
check_array_result(self.nodes[1].listtransactions(),
|
||||
assert_array_result(self.nodes[1].listtransactions(),
|
||||
{"category":"send","amount":Decimal("-0.44")},
|
||||
{"txid":txid, "account" : ""} )
|
||||
check_array_result(self.nodes[1].listtransactions(),
|
||||
assert_array_result(self.nodes[1].listtransactions(),
|
||||
{"category":"receive","amount":Decimal("0.44")},
|
||||
{"txid":txid, "account" : "toself"} )
|
||||
|
||||
@ -107,7 +90,7 @@ class ListTransactionsTest(BitcoinTestFramework):
|
||||
self.nodes[1].generate(1)
|
||||
self.sync_all()
|
||||
assert(len(self.nodes[0].listtransactions("watchonly", 100, 0, False)) == 0)
|
||||
check_array_result(self.nodes[0].listtransactions("watchonly", 100, 0, True),
|
||||
assert_array_result(self.nodes[0].listtransactions("watchonly", 100, 0, True),
|
||||
{"category":"receive","amount":Decimal("0.1")},
|
||||
{"txid":txid, "account" : "watchonly"} )
|
||||
|
||||
@ -136,9 +119,9 @@ class ListTransactionsTest(BitcoinTestFramework):
|
||||
# 1. Chain a few transactions that don't opt-in.
|
||||
txid_1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
|
||||
assert(not is_opt_in(self.nodes[0], txid_1))
|
||||
check_array_result(self.nodes[0].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"})
|
||||
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"})
|
||||
sync_mempools(self.nodes)
|
||||
check_array_result(self.nodes[1].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"})
|
||||
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"})
|
||||
|
||||
# Tx2 will build off txid_1, still not opting in to RBF.
|
||||
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1)
|
||||
@ -152,9 +135,9 @@ class ListTransactionsTest(BitcoinTestFramework):
|
||||
|
||||
# ...and check the result
|
||||
assert(not is_opt_in(self.nodes[1], txid_2))
|
||||
check_array_result(self.nodes[1].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"})
|
||||
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"})
|
||||
sync_mempools(self.nodes)
|
||||
check_array_result(self.nodes[0].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"})
|
||||
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"})
|
||||
|
||||
# Tx3 will opt-in to RBF
|
||||
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_2)
|
||||
@ -163,14 +146,14 @@ class ListTransactionsTest(BitcoinTestFramework):
|
||||
tx3 = self.nodes[0].createrawtransaction(inputs, outputs)
|
||||
tx3_modified = txFromHex(tx3)
|
||||
tx3_modified.vin[0].nSequence = 0
|
||||
tx3 = binascii.hexlify(tx3_modified.serialize()).decode('utf-8')
|
||||
tx3 = bytes_to_hex_str(tx3_modified.serialize())
|
||||
tx3_signed = self.nodes[0].signrawtransaction(tx3)['hex']
|
||||
txid_3 = self.nodes[0].sendrawtransaction(tx3_signed)
|
||||
|
||||
assert(is_opt_in(self.nodes[0], txid_3))
|
||||
check_array_result(self.nodes[0].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"})
|
||||
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"})
|
||||
sync_mempools(self.nodes)
|
||||
check_array_result(self.nodes[1].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"})
|
||||
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"})
|
||||
|
||||
# Tx4 will chain off tx3. Doesn't signal itself, but depends on one
|
||||
# that does.
|
||||
@ -182,21 +165,21 @@ class ListTransactionsTest(BitcoinTestFramework):
|
||||
txid_4 = self.nodes[1].sendrawtransaction(tx4_signed)
|
||||
|
||||
assert(not is_opt_in(self.nodes[1], txid_4))
|
||||
check_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"yes"})
|
||||
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"yes"})
|
||||
sync_mempools(self.nodes)
|
||||
check_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"yes"})
|
||||
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"yes"})
|
||||
|
||||
# Replace tx3, and check that tx4 becomes unknown
|
||||
tx3_b = tx3_modified
|
||||
tx3_b.vout[0].nValue -= 0.004*100000000 # bump the fee
|
||||
tx3_b = binascii.hexlify(tx3_b.serialize()).decode('utf-8')
|
||||
tx3_b.vout[0].nValue -= int(Decimal("0.004") * COIN) # bump the fee
|
||||
tx3_b = bytes_to_hex_str(tx3_b.serialize())
|
||||
tx3_b_signed = self.nodes[0].signrawtransaction(tx3_b)['hex']
|
||||
txid_3b = self.nodes[0].sendrawtransaction(tx3_b_signed, True)
|
||||
assert(is_opt_in(self.nodes[0], txid_3b))
|
||||
|
||||
check_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"unknown"})
|
||||
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"unknown"})
|
||||
sync_mempools(self.nodes)
|
||||
check_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"unknown"})
|
||||
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"unknown"})
|
||||
|
||||
# Check gettransaction as well:
|
||||
for n in self.nodes[0:2]:
|
||||
|
@ -41,40 +41,36 @@ class TestManager(NodeConnCB):
|
||||
self.disconnectOkay = False
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
fail = False
|
||||
self.connection.rpc.generate(1) # Leave IBD
|
||||
self.connection.rpc.generate(1) # Leave IBD
|
||||
|
||||
numBlocksToGenerate = [ 8, 16, 128, 1024 ]
|
||||
for count in range(len(numBlocksToGenerate)):
|
||||
current_invs = []
|
||||
for i in range(numBlocksToGenerate[count]):
|
||||
current_invs.append(CInv(2, random.randrange(0, 1<<256)))
|
||||
if len(current_invs) >= 50000:
|
||||
self.connection.send_message(msg_inv(current_invs))
|
||||
current_invs = []
|
||||
if len(current_invs) > 0:
|
||||
numBlocksToGenerate = [8, 16, 128, 1024]
|
||||
for count in range(len(numBlocksToGenerate)):
|
||||
current_invs = []
|
||||
for i in range(numBlocksToGenerate[count]):
|
||||
current_invs.append(CInv(2, random.randrange(0, 1 << 256)))
|
||||
if len(current_invs) >= 50000:
|
||||
self.connection.send_message(msg_inv(current_invs))
|
||||
|
||||
# Wait and see how many blocks were requested
|
||||
time.sleep(2)
|
||||
current_invs = []
|
||||
if len(current_invs) > 0:
|
||||
self.connection.send_message(msg_inv(current_invs))
|
||||
|
||||
total_requests = 0
|
||||
with mininode_lock:
|
||||
for key in self.blockReqCounts:
|
||||
total_requests += self.blockReqCounts[key]
|
||||
if self.blockReqCounts[key] > 1:
|
||||
raise AssertionError("Error, test failed: block %064x requested more than once" % key)
|
||||
if total_requests > MAX_REQUESTS:
|
||||
raise AssertionError("Error, too many blocks (%d) requested" % total_requests)
|
||||
print "Round %d: success (total requests: %d)" % (count, total_requests)
|
||||
except AssertionError as e:
|
||||
print "TEST FAILED: ", e.args
|
||||
# Wait and see how many blocks were requested
|
||||
time.sleep(2)
|
||||
|
||||
total_requests = 0
|
||||
with mininode_lock:
|
||||
for key in self.blockReqCounts:
|
||||
total_requests += self.blockReqCounts[key]
|
||||
if self.blockReqCounts[key] > 1:
|
||||
raise AssertionError("Error, test failed: block %064x requested more than once" % key)
|
||||
if total_requests > MAX_REQUESTS:
|
||||
raise AssertionError("Error, too many blocks (%d) requested" % total_requests)
|
||||
print "Round %d: success (total requests: %d)" % (count, total_requests)
|
||||
|
||||
self.disconnectOkay = True
|
||||
self.connection.disconnect_node()
|
||||
|
||||
|
||||
|
||||
class MaxBlocksInFlightTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
parser.add_option("--testbinary", dest="testbinary",
|
||||
@ -86,7 +82,7 @@ class MaxBlocksInFlightTest(BitcoinTestFramework):
|
||||
initialize_chain_clean(self.options.tmpdir, 1)
|
||||
|
||||
def setup_network(self):
|
||||
self.nodes = start_nodes(1, self.options.tmpdir,
|
||||
self.nodes = start_nodes(1, self.options.tmpdir,
|
||||
extra_args=[['-debug', '-whitelist=127.0.0.1']],
|
||||
binary=[self.options.testbinary])
|
||||
|
||||
|
@ -7,7 +7,6 @@
|
||||
from test_framework.mininode import *
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.comptool import wait_until
|
||||
import time
|
||||
|
||||
'''
|
||||
@ -176,9 +175,9 @@ class MaxUploadTest(BitcoinTestFramework):
|
||||
getdata_request.inv.append(CInv(2, big_old_block))
|
||||
|
||||
max_bytes_per_day = 200*1024*1024
|
||||
daily_buffer = 144 * 1000000
|
||||
daily_buffer = 144 * MAX_BLOCK_SIZE
|
||||
max_bytes_available = max_bytes_per_day - daily_buffer
|
||||
success_count = max_bytes_available / old_block_size
|
||||
success_count = max_bytes_available // old_block_size
|
||||
|
||||
# 144MB will be reserved for relaying new blocks, so expect this to
|
||||
# succeed for ~70 tries.
|
||||
|
@ -38,7 +38,6 @@ class MempoolLimitTest(BitcoinTestFramework):
|
||||
self.nodes[0].settxfee(0) # return to automatic fee selection
|
||||
txFS = self.nodes[0].signrawtransaction(txF['hex'])
|
||||
txid = self.nodes[0].sendrawtransaction(txFS['hex'])
|
||||
self.nodes[0].lockunspent(True, [us0])
|
||||
|
||||
relayfee = self.nodes[0].getnetworkinfo()['relayfee']
|
||||
base_fee = relayfee*100
|
||||
|
@ -7,6 +7,7 @@
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.mininode import COIN
|
||||
|
||||
MAX_ANCESTORS = 25
|
||||
MAX_DESCENDANTS = 25
|
||||
@ -59,13 +60,12 @@ class MempoolPackagesTest(BitcoinTestFramework):
|
||||
descendant_count = 1
|
||||
descendant_fees = 0
|
||||
descendant_size = 0
|
||||
SATOSHIS = 100000000
|
||||
|
||||
for x in reversed(chain):
|
||||
assert_equal(mempool[x]['descendantcount'], descendant_count)
|
||||
descendant_fees += mempool[x]['fee']
|
||||
assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee'])
|
||||
assert_equal(mempool[x]['descendantfees'], SATOSHIS*descendant_fees)
|
||||
assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN)
|
||||
descendant_size += mempool[x]['size']
|
||||
assert_equal(mempool[x]['descendantsize'], descendant_size)
|
||||
descendant_count += 1
|
||||
@ -78,7 +78,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
|
||||
descendant_fees = 0
|
||||
for x in reversed(chain):
|
||||
descendant_fees += mempool[x]['fee']
|
||||
assert_equal(mempool[x]['descendantfees'], SATOSHIS*descendant_fees+1000)
|
||||
assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 1000)
|
||||
|
||||
# Adding one more transaction on to the chain should fail.
|
||||
try:
|
||||
@ -106,7 +106,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
|
||||
descendant_fees += mempool[x]['fee']
|
||||
if (x == chain[-1]):
|
||||
assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee']+satoshi_round(0.00002))
|
||||
assert_equal(mempool[x]['descendantfees'], SATOSHIS*descendant_fees+2000)
|
||||
assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 2000)
|
||||
|
||||
# TODO: check that node1's mempool is as expected
|
||||
|
||||
|
@ -25,14 +25,6 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
|
||||
self.is_network_split = False
|
||||
self.sync_all()
|
||||
|
||||
def create_tx(self, from_txid, to_address, amount):
|
||||
inputs = [{ "txid" : from_txid, "vout" : 0}]
|
||||
outputs = { to_address : amount }
|
||||
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
|
||||
signresult = self.nodes[0].signrawtransaction(rawtx)
|
||||
assert_equal(signresult["complete"], True)
|
||||
return signresult["hex"]
|
||||
|
||||
def run_test(self):
|
||||
start_count = self.nodes[0].getblockcount()
|
||||
|
||||
@ -52,9 +44,9 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
|
||||
# and make sure the mempool code behaves correctly.
|
||||
b = [ self.nodes[0].getblockhash(n) for n in range(101, 105) ]
|
||||
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
|
||||
spend_101_raw = self.create_tx(coinbase_txids[1], node1_address, 500)
|
||||
spend_102_raw = self.create_tx(coinbase_txids[2], node0_address, 500)
|
||||
spend_103_raw = self.create_tx(coinbase_txids[3], node0_address, 500)
|
||||
spend_101_raw = create_tx(self.nodes[0], coinbase_txids[1], node1_address, 500)
|
||||
spend_102_raw = create_tx(self.nodes[0], coinbase_txids[2], node0_address, 500)
|
||||
spend_103_raw = create_tx(self.nodes[0], coinbase_txids[3], node0_address, 500)
|
||||
|
||||
# Create a block-height-locked transaction which will be invalid after reorg
|
||||
timelock_tx = self.nodes[0].createrawtransaction([{"txid": coinbase_txids[0], "vout": 0}], {node0_address: 500})
|
||||
@ -71,8 +63,8 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
|
||||
assert_raises(JSONRPCException, self.nodes[0].sendrawtransaction, timelock_tx)
|
||||
|
||||
# Create 102_1 and 103_1:
|
||||
spend_102_1_raw = self.create_tx(spend_102_id, node1_address, 500)
|
||||
spend_103_1_raw = self.create_tx(spend_103_id, node1_address, 500)
|
||||
spend_102_1_raw = create_tx(self.nodes[0], spend_102_id, node1_address, 500)
|
||||
spend_103_1_raw = create_tx(self.nodes[0], spend_103_id, node1_address, 500)
|
||||
|
||||
# Broadcast and mine 103_1:
|
||||
spend_103_1_id = self.nodes[0].sendrawtransaction(spend_103_1_raw)
|
||||
|
@ -21,14 +21,6 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
|
||||
self.nodes.append(start_node(0, self.options.tmpdir, args))
|
||||
self.is_network_split = False
|
||||
|
||||
def create_tx(self, from_txid, to_address, amount):
|
||||
inputs = [{ "txid" : from_txid, "vout" : 0}]
|
||||
outputs = { to_address : amount }
|
||||
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
|
||||
signresult = self.nodes[0].signrawtransaction(rawtx)
|
||||
assert_equal(signresult["complete"], True)
|
||||
return signresult["hex"]
|
||||
|
||||
def run_test(self):
|
||||
node0_address = self.nodes[0].getnewaddress()
|
||||
# Spend block 1/2/3's coinbase transactions
|
||||
@ -43,13 +35,13 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
|
||||
|
||||
b = [ self.nodes[0].getblockhash(n) for n in range(1, 4) ]
|
||||
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
|
||||
spends1_raw = [ self.create_tx(txid, node0_address, 500) for txid in coinbase_txids ]
|
||||
spends1_raw = [ create_tx(self.nodes[0], txid, node0_address, 500) for txid in coinbase_txids ]
|
||||
spends1_id = [ self.nodes[0].sendrawtransaction(tx) for tx in spends1_raw ]
|
||||
|
||||
blocks = []
|
||||
blocks.extend(self.nodes[0].generate(1))
|
||||
|
||||
spends2_raw = [ self.create_tx(txid, node0_address, 499.99) for txid in spends1_id ]
|
||||
spends2_raw = [ create_tx(self.nodes[0], txid, node0_address, 499.99) for txid in spends1_id ]
|
||||
spends2_id = [ self.nodes[0].sendrawtransaction(tx) for tx in spends2_raw ]
|
||||
|
||||
blocks.extend(self.nodes[0].generate(1))
|
||||
|
@ -26,14 +26,6 @@ class MempoolSpendCoinbaseTest(BitcoinTestFramework):
|
||||
self.nodes.append(start_node(0, self.options.tmpdir, args))
|
||||
self.is_network_split = False
|
||||
|
||||
def create_tx(self, from_txid, to_address, amount):
|
||||
inputs = [{ "txid" : from_txid, "vout" : 0}]
|
||||
outputs = { to_address : amount }
|
||||
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
|
||||
signresult = self.nodes[0].signrawtransaction(rawtx)
|
||||
assert_equal(signresult["complete"], True)
|
||||
return signresult["hex"]
|
||||
|
||||
def run_test(self):
|
||||
chain_height = self.nodes[0].getblockcount()
|
||||
assert_equal(chain_height, 200)
|
||||
@ -44,7 +36,7 @@ class MempoolSpendCoinbaseTest(BitcoinTestFramework):
|
||||
# is too immature to spend.
|
||||
b = [ self.nodes[0].getblockhash(n) for n in range(101, 103) ]
|
||||
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
|
||||
spends_raw = [ self.create_tx(txid, node0_address, 500) for txid in coinbase_txids ]
|
||||
spends_raw = [ create_tx(self.nodes[0], txid, node0_address, 500) for txid in coinbase_txids ]
|
||||
|
||||
spend_101_id = self.nodes[0].sendrawtransaction(spends_raw[0])
|
||||
|
||||
|
@ -53,7 +53,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
||||
password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
|
||||
authpairnew = "rt:"+password
|
||||
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpair)}
|
||||
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
|
||||
|
||||
conn = httplib.HTTPConnection(url.hostname, url.port)
|
||||
conn.connect()
|
||||
@ -63,7 +63,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
||||
conn.close()
|
||||
|
||||
#Use new authpair to confirm both work
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
|
||||
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
|
||||
|
||||
conn = httplib.HTTPConnection(url.hostname, url.port)
|
||||
conn.connect()
|
||||
@ -74,7 +74,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
||||
|
||||
#Wrong login name with rt's password
|
||||
authpairnew = "rtwrong:"+password
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
|
||||
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
|
||||
|
||||
conn = httplib.HTTPConnection(url.hostname, url.port)
|
||||
conn.connect()
|
||||
@ -85,7 +85,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
||||
|
||||
#Wrong password for rt
|
||||
authpairnew = "rt:"+password+"wrong"
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
|
||||
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
|
||||
|
||||
conn = httplib.HTTPConnection(url.hostname, url.port)
|
||||
conn.connect()
|
||||
@ -96,7 +96,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
||||
|
||||
#Correct for rt2
|
||||
authpairnew = "rt2:"+password2
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
|
||||
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
|
||||
|
||||
conn = httplib.HTTPConnection(url.hostname, url.port)
|
||||
conn.connect()
|
||||
@ -107,7 +107,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
||||
|
||||
#Wrong password for rt2
|
||||
authpairnew = "rt2:"+password2+"wrong"
|
||||
headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
|
||||
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
|
||||
|
||||
conn = httplib.HTTPConnection(url.hostname, url.port)
|
||||
conn.connect()
|
||||
@ -117,6 +117,5 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
||||
conn.close()
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
HTTPBasicsTest ().main ()
|
||||
|
@ -150,7 +150,7 @@ class AcceptBlockTest(BitcoinTestFramework):
|
||||
# 2. Send one block that builds on each tip.
|
||||
# This should be accepted.
|
||||
blocks_h2 = [] # the height 2 blocks on each node's chain
|
||||
block_time = time.time() + 1
|
||||
block_time = int(time.time()) + 1
|
||||
for i in xrange(2):
|
||||
blocks_h2.append(create_block(tips[i], create_coinbase(2), block_time))
|
||||
blocks_h2[i].solve()
|
||||
|
@ -34,7 +34,7 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
self.num_nodes = 1
|
||||
self.block_heights = {}
|
||||
self.coinbase_key = CECKey()
|
||||
self.coinbase_key.set_secretbytes(bytes("horsebattery"))
|
||||
self.coinbase_key.set_secretbytes(b"horsebattery")
|
||||
self.coinbase_pubkey = self.coinbase_key.get_pubkey()
|
||||
self.block_time = int(time.time())+1
|
||||
self.tip = None
|
||||
@ -72,7 +72,7 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
block = create_block(base_block_hash, coinbase, self.block_time)
|
||||
if (spend != None):
|
||||
tx = CTransaction()
|
||||
tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n), "", 0xffffffff)) # no signature yet
|
||||
tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n), b"", 0xffffffff)) # no signature yet
|
||||
# This copies the java comparison tool testing behavior: the first
|
||||
# txout has a garbage scriptPubKey, "to make sure we're not
|
||||
# pre-verifying too much" (?)
|
||||
@ -82,7 +82,7 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
else:
|
||||
tx.vout.append(CTxOut(1, script))
|
||||
# Now sign it if necessary
|
||||
scriptSig = ""
|
||||
scriptSig = b""
|
||||
scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)
|
||||
if (scriptPubKey[0] == OP_TRUE): # looks like an anyone-can-spend
|
||||
scriptSig = CScript([OP_TRUE])
|
||||
@ -227,7 +227,7 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
# \-> b3 (1) -> b4 (2)
|
||||
tip(6)
|
||||
block(9, spend=out4, additional_coinbase_value=1)
|
||||
yield rejected(RejectResult(16, 'bad-cb-amount'))
|
||||
yield rejected(RejectResult(16, b'bad-cb-amount'))
|
||||
|
||||
|
||||
# Create a fork that ends in a block with too much fee (the one that causes the reorg)
|
||||
@ -239,7 +239,7 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
yield rejected()
|
||||
|
||||
block(11, spend=out4, additional_coinbase_value=1)
|
||||
yield rejected(RejectResult(16, 'bad-cb-amount'))
|
||||
yield rejected(RejectResult(16, b'bad-cb-amount'))
|
||||
|
||||
|
||||
# Try again, but with a valid fork first
|
||||
@ -271,7 +271,7 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
# \-> b3 (1) -> b4 (2)
|
||||
|
||||
# Test that a block with a lot of checksigs is okay
|
||||
lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50 - 1))
|
||||
lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 // 50 - 1))
|
||||
tip(13)
|
||||
block(15, spend=out5, script=lots_of_checksigs)
|
||||
yield accepted()
|
||||
@ -279,9 +279,9 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
|
||||
# Test that a block with too many checksigs is rejected
|
||||
out6 = get_spendable_output()
|
||||
too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50))
|
||||
too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 // 50))
|
||||
block(16, spend=out6, script=too_many_checksigs)
|
||||
yield rejected(RejectResult(16, 'bad-blk-sigops'))
|
||||
yield rejected(RejectResult(16, b'bad-blk-sigops'))
|
||||
|
||||
|
||||
# Attempt to spend a transaction created on a different fork
|
||||
@ -290,7 +290,7 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
# \-> b3 (1) -> b4 (2)
|
||||
tip(15)
|
||||
block(17, spend=txout_b3)
|
||||
yield rejected(RejectResult(16, 'bad-txns-inputs-missingorspent'))
|
||||
yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
|
||||
|
||||
# Attempt to spend a transaction created on a different fork (on a fork this time)
|
||||
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
|
||||
@ -311,7 +311,7 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
tip(15)
|
||||
out7 = get_spendable_output()
|
||||
block(20, spend=out7)
|
||||
yield rejected(RejectResult(16, 'bad-txns-premature-spend-of-coinbase'))
|
||||
yield rejected(RejectResult(16, b'bad-txns-premature-spend-of-coinbase'))
|
||||
|
||||
# Attempt to spend a coinbase at depth too low (on a fork this time)
|
||||
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
|
||||
@ -335,7 +335,7 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
old_hash = b23.sha256
|
||||
tx = CTransaction()
|
||||
script_length = MAX_BLOCK_SIZE - len(b23.serialize()) - 69
|
||||
script_output = CScript([chr(0)*script_length])
|
||||
script_output = CScript([b'\x00' * script_length])
|
||||
tx.vout.append(CTxOut(0, script_output))
|
||||
tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 1)))
|
||||
b23 = update_block(23, [tx])
|
||||
@ -347,11 +347,11 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
tip(15)
|
||||
b24 = block(24, spend=out6)
|
||||
script_length = MAX_BLOCK_SIZE - len(b24.serialize()) - 69
|
||||
script_output = CScript([chr(0)*(script_length+1)])
|
||||
script_output = CScript([b'\x00' * (script_length+1)])
|
||||
tx.vout = [CTxOut(0, script_output)]
|
||||
b24 = update_block(24, [tx])
|
||||
assert_equal(len(b24.serialize()), MAX_BLOCK_SIZE+1)
|
||||
yield rejected(RejectResult(16, 'bad-blk-length'))
|
||||
yield rejected(RejectResult(16, b'bad-blk-length'))
|
||||
|
||||
b25 = block(25, spend=out7)
|
||||
yield rejected()
|
||||
@ -363,12 +363,12 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
# \-> b3 (1) -> b4 (2)
|
||||
tip(15)
|
||||
b26 = block(26, spend=out6)
|
||||
b26.vtx[0].vin[0].scriptSig = chr(0)
|
||||
b26.vtx[0].vin[0].scriptSig = b'\x00'
|
||||
b26.vtx[0].rehash()
|
||||
# update_block causes the merkle root to get updated, even with no new
|
||||
# transactions, and updates the required state.
|
||||
b26 = update_block(26, [])
|
||||
yield rejected(RejectResult(16, 'bad-cb-length'))
|
||||
yield rejected(RejectResult(16, b'bad-cb-length'))
|
||||
|
||||
# Extend the b26 chain to make sure bitcoind isn't accepting b26
|
||||
b27 = block(27, spend=out7)
|
||||
@ -377,10 +377,10 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
# Now try a too-large-coinbase script
|
||||
tip(15)
|
||||
b28 = block(28, spend=out6)
|
||||
b28.vtx[0].vin[0].scriptSig = chr(0)*101
|
||||
b28.vtx[0].vin[0].scriptSig = b'\x00' * 101
|
||||
b28.vtx[0].rehash()
|
||||
b28 = update_block(28, [])
|
||||
yield rejected(RejectResult(16, 'bad-cb-length'))
|
||||
yield rejected(RejectResult(16, b'bad-cb-length'))
|
||||
|
||||
# Extend the b28 chain to make sure bitcoind isn't accepted b28
|
||||
b29 = block(29, spend=out7)
|
||||
@ -392,7 +392,7 @@ class FullBlockTest(ComparisonTestFramework):
|
||||
# b30 has a max-sized coinbase scriptSig.
|
||||
tip(23)
|
||||
b30 = block(30)
|
||||
b30.vtx[0].vin[0].scriptSig = chr(0)*100
|
||||
b30.vtx[0].vin[0].scriptSig = b'\x00' * 100
|
||||
b30.vtx[0].rehash()
|
||||
b30 = update_block(30, [])
|
||||
yield accepted()
|
||||
|
@ -9,8 +9,7 @@
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
|
||||
COIN = 100000000
|
||||
from test_framework.mininode import COIN, MAX_BLOCK_SIZE
|
||||
|
||||
class PrioritiseTransactionTest(BitcoinTestFramework):
|
||||
|
||||
@ -29,14 +28,29 @@ class PrioritiseTransactionTest(BitcoinTestFramework):
|
||||
self.relayfee = self.nodes[0].getnetworkinfo()['relayfee']
|
||||
|
||||
def run_test(self):
|
||||
utxos = create_confirmed_utxos(self.relayfee, self.nodes[0], 90)
|
||||
utxo_count = 90
|
||||
utxos = create_confirmed_utxos(self.relayfee, self.nodes[0], utxo_count)
|
||||
base_fee = self.relayfee*100 # our transactions are smaller than 100kb
|
||||
txids = []
|
||||
|
||||
# Create 3 batches of transactions at 3 different fee rate levels
|
||||
range_size = utxo_count // 3
|
||||
for i in xrange(3):
|
||||
txids.append([])
|
||||
txids[i] = create_lots_of_big_transactions(self.nodes[0], self.txouts, utxos[30*i:30*i+30], (i+1)*base_fee)
|
||||
start_range = i * range_size
|
||||
end_range = start_range + range_size
|
||||
txids[i] = create_lots_of_big_transactions(self.nodes[0], self.txouts, utxos[start_range:end_range], (i+1)*base_fee)
|
||||
|
||||
# Make sure that the size of each group of transactions exceeds
|
||||
# MAX_BLOCK_SIZE -- otherwise the test needs to be revised to create
|
||||
# more transactions.
|
||||
mempool = self.nodes[0].getrawmempool(True)
|
||||
sizes = [0, 0, 0]
|
||||
for i in xrange(3):
|
||||
for j in txids[i]:
|
||||
assert(j in mempool)
|
||||
sizes[i] += mempool[j]['size']
|
||||
assert(sizes[i] > MAX_BLOCK_SIZE) # Fail => raise utxo_count
|
||||
|
||||
# add a fee delta to something in the cheapest bucket and make sure it gets mined
|
||||
# also check that a different entry in the cheapest bucket is NOT mined (lower
|
||||
@ -47,7 +61,7 @@ class PrioritiseTransactionTest(BitcoinTestFramework):
|
||||
self.nodes[0].generate(1)
|
||||
|
||||
mempool = self.nodes[0].getrawmempool()
|
||||
print "Assert that prioritised transasction was mined"
|
||||
print "Assert that prioritised transaction was mined"
|
||||
assert(txids[0][0] not in mempool)
|
||||
assert(txids[0][1] in mempool)
|
||||
|
||||
@ -60,7 +74,7 @@ class PrioritiseTransactionTest(BitcoinTestFramework):
|
||||
assert(high_fee_tx != None)
|
||||
|
||||
# Add a prioritisation before a tx is in the mempool (de-prioritising a
|
||||
# high-fee transaction).
|
||||
# high-fee transaction so that it's now low fee).
|
||||
self.nodes[0].prioritisetransaction(high_fee_tx, -1e15, -int(2*base_fee*COIN))
|
||||
|
||||
# Add everything back to mempool
|
||||
@ -70,8 +84,11 @@ class PrioritiseTransactionTest(BitcoinTestFramework):
|
||||
mempool = self.nodes[0].getrawmempool()
|
||||
assert(high_fee_tx in mempool)
|
||||
|
||||
# Now verify the high feerate transaction isn't mined.
|
||||
self.nodes[0].generate(5)
|
||||
# Now verify the modified-high feerate transaction isn't mined before
|
||||
# the other high fee transactions. Keep mining until our mempool has
|
||||
# decreased by all the high fee size that we calculated above.
|
||||
while (self.nodes[0].getmempoolinfo()['bytes'] > sizes[0] + sizes[1]):
|
||||
self.nodes[0].generate(1)
|
||||
|
||||
# High fee transaction should not have been mined, but other high fee rate
|
||||
# transactions should have been.
|
||||
|
@ -86,7 +86,7 @@ class ProxyTest(BitcoinTestFramework):
|
||||
assert(isinstance(cmd, Socks5Command))
|
||||
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
|
||||
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
|
||||
assert_equal(cmd.addr, "15.61.23.23")
|
||||
assert_equal(cmd.addr, b"15.61.23.23")
|
||||
assert_equal(cmd.port, 1234)
|
||||
if not auth:
|
||||
assert_equal(cmd.username, None)
|
||||
@ -100,7 +100,7 @@ class ProxyTest(BitcoinTestFramework):
|
||||
assert(isinstance(cmd, Socks5Command))
|
||||
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
|
||||
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
|
||||
assert_equal(cmd.addr, "1233:3432:2434:2343:3234:2345:6546:4534")
|
||||
assert_equal(cmd.addr, b"1233:3432:2434:2343:3234:2345:6546:4534")
|
||||
assert_equal(cmd.port, 5443)
|
||||
if not auth:
|
||||
assert_equal(cmd.username, None)
|
||||
@ -113,7 +113,7 @@ class ProxyTest(BitcoinTestFramework):
|
||||
cmd = proxies[2].queue.get()
|
||||
assert(isinstance(cmd, Socks5Command))
|
||||
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
|
||||
assert_equal(cmd.addr, "bitcoinostk4e4re.onion")
|
||||
assert_equal(cmd.addr, b"bitcoinostk4e4re.onion")
|
||||
assert_equal(cmd.port, 8333)
|
||||
if not auth:
|
||||
assert_equal(cmd.username, None)
|
||||
@ -125,7 +125,7 @@ class ProxyTest(BitcoinTestFramework):
|
||||
cmd = proxies[3].queue.get()
|
||||
assert(isinstance(cmd, Socks5Command))
|
||||
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
|
||||
assert_equal(cmd.addr, "node.noumenon")
|
||||
assert_equal(cmd.addr, b"node.noumenon")
|
||||
assert_equal(cmd.port, 8333)
|
||||
if not auth:
|
||||
assert_equal(cmd.username, None)
|
||||
|
@ -15,7 +15,7 @@ from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
|
||||
def calc_usage(blockdir):
|
||||
return sum(os.path.getsize(blockdir+f) for f in os.listdir(blockdir) if os.path.isfile(blockdir+f))/(1024*1024)
|
||||
return sum(os.path.getsize(blockdir+f) for f in os.listdir(blockdir) if os.path.isfile(blockdir+f)) / (1024. * 1024.)
|
||||
|
||||
class PruneTest(BitcoinTestFramework):
|
||||
|
||||
@ -56,7 +56,7 @@ class PruneTest(BitcoinTestFramework):
|
||||
self.nodes[1].generate(200)
|
||||
sync_blocks(self.nodes[0:2])
|
||||
self.nodes[0].generate(150)
|
||||
# Then mine enough full blocks to create more than 550MB of data
|
||||
# Then mine enough full blocks to create more than 550MiB of data
|
||||
for i in xrange(645):
|
||||
self.mine_full_block(self.nodes[0], self.address[0])
|
||||
|
||||
@ -66,7 +66,7 @@ class PruneTest(BitcoinTestFramework):
|
||||
if not os.path.isfile(self.prunedir+"blk00000.dat"):
|
||||
raise AssertionError("blk00000.dat is missing, pruning too early")
|
||||
print "Success"
|
||||
print "Though we're already using more than 550MB, current usage:", calc_usage(self.prunedir)
|
||||
print "Though we're already using more than 550MiB, current usage:", calc_usage(self.prunedir)
|
||||
print "Mining 25 more blocks should cause the first block file to be pruned"
|
||||
# Pruning doesn't run until we're allocating another chunk, 20 full blocks past the height cutoff will ensure this
|
||||
for i in xrange(25):
|
||||
@ -311,7 +311,7 @@ class PruneTest(BitcoinTestFramework):
|
||||
# \ \
|
||||
# ++...++(1044) ..
|
||||
#
|
||||
# N0 ********************(1032) @@...@@@(1552)
|
||||
# N0 ********************(1032) @@...@@@(1552)
|
||||
# \
|
||||
# *...**(1320)
|
||||
|
||||
|
@ -56,13 +56,13 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
|
||||
rawtx = self.nodes[2].signrawtransaction(rawtx)
|
||||
|
||||
errorString = ""
|
||||
try:
|
||||
rawtx = self.nodes[2].sendrawtransaction(rawtx['hex'])
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
except JSONRPCException as e:
|
||||
assert("Missing inputs" in e.error['message'])
|
||||
else:
|
||||
assert(False)
|
||||
|
||||
assert("Missing inputs" in errorString)
|
||||
|
||||
#########################
|
||||
# RAW TX MULTISIG TESTS #
|
||||
@ -88,8 +88,6 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||
assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance
|
||||
|
||||
|
||||
|
||||
|
||||
# 2of3 test from different nodes
|
||||
bal = self.nodes[2].getbalance()
|
||||
addr1 = self.nodes[1].getnewaddress()
|
||||
|
@ -25,34 +25,13 @@ def get_sub_array_from_array(object_array, to_match):
|
||||
return item
|
||||
return []
|
||||
|
||||
def check_array_result(object_array, to_match, expected, should_not_find = False):
|
||||
"""
|
||||
Pass in array of JSON objects, a dictionary with key/value pairs
|
||||
to match against, and another dictionary with expected key/value
|
||||
pairs.
|
||||
If the should_not_find flag is true, to_match should not be found in object_array
|
||||
"""
|
||||
if should_not_find == True:
|
||||
expected = { }
|
||||
num_matched = 0
|
||||
for item in object_array:
|
||||
all_match = True
|
||||
for key,value in to_match.items():
|
||||
if item[key] != value:
|
||||
all_match = False
|
||||
if not all_match:
|
||||
continue
|
||||
for key,value in expected.items():
|
||||
if item[key] != value:
|
||||
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
|
||||
num_matched = num_matched+1
|
||||
if num_matched == 0 and should_not_find != True:
|
||||
raise AssertionError("No objects matched %s"%(str(to_match)))
|
||||
if num_matched > 0 and should_not_find == True:
|
||||
raise AssertionError("Objects was matched %s"%(str(to_match)))
|
||||
|
||||
class ReceivedByTest(BitcoinTestFramework):
|
||||
|
||||
def setup_nodes(self):
|
||||
#This test requires mocktime
|
||||
enable_mocktime()
|
||||
return start_nodes(4, self.options.tmpdir)
|
||||
|
||||
def run_test(self):
|
||||
'''
|
||||
listreceivedbyaddress Test
|
||||
@ -63,26 +42,26 @@ class ReceivedByTest(BitcoinTestFramework):
|
||||
self.sync_all()
|
||||
|
||||
#Check not listed in listreceivedbyaddress because has 0 confirmations
|
||||
check_array_result(self.nodes[1].listreceivedbyaddress(),
|
||||
assert_array_result(self.nodes[1].listreceivedbyaddress(),
|
||||
{"address":addr},
|
||||
{ },
|
||||
True)
|
||||
#Bury Tx under 10 block so it will be returned by listreceivedbyaddress
|
||||
self.nodes[1].generate(10)
|
||||
self.sync_all()
|
||||
check_array_result(self.nodes[1].listreceivedbyaddress(),
|
||||
assert_array_result(self.nodes[1].listreceivedbyaddress(),
|
||||
{"address":addr},
|
||||
{"address":addr, "account":"", "amount":Decimal("0.1"), "confirmations":10, "txids":[txid,]})
|
||||
#With min confidence < 10
|
||||
check_array_result(self.nodes[1].listreceivedbyaddress(5),
|
||||
assert_array_result(self.nodes[1].listreceivedbyaddress(5),
|
||||
{"address":addr},
|
||||
{"address":addr, "account":"", "amount":Decimal("0.1"), "confirmations":10, "txids":[txid,]})
|
||||
#With min confidence > 10, should not find Tx
|
||||
check_array_result(self.nodes[1].listreceivedbyaddress(11),{"address":addr},{ },True)
|
||||
assert_array_result(self.nodes[1].listreceivedbyaddress(11),{"address":addr},{ },True)
|
||||
|
||||
#Empty Tx
|
||||
addr = self.nodes[1].getnewaddress()
|
||||
check_array_result(self.nodes[1].listreceivedbyaddress(0,True),
|
||||
assert_array_result(self.nodes[1].listreceivedbyaddress(0,True),
|
||||
{"address":addr},
|
||||
{"address":addr, "account":"", "amount":0, "confirmations":0, "txids":[]})
|
||||
|
||||
@ -126,7 +105,7 @@ class ReceivedByTest(BitcoinTestFramework):
|
||||
self.sync_all()
|
||||
|
||||
# listreceivedbyaccount should return received_by_account_json because of 0 confirmations
|
||||
check_array_result(self.nodes[1].listreceivedbyaccount(),
|
||||
assert_array_result(self.nodes[1].listreceivedbyaccount(),
|
||||
{"account":account},
|
||||
received_by_account_json)
|
||||
|
||||
@ -138,7 +117,7 @@ class ReceivedByTest(BitcoinTestFramework):
|
||||
self.nodes[1].generate(10)
|
||||
self.sync_all()
|
||||
# listreceivedbyaccount should return updated account balance
|
||||
check_array_result(self.nodes[1].listreceivedbyaccount(),
|
||||
assert_array_result(self.nodes[1].listreceivedbyaccount(),
|
||||
{"account":account},
|
||||
{"account":received_by_account_json["account"], "amount":(received_by_account_json["amount"] + Decimal("0.1"))})
|
||||
|
||||
|
@ -11,16 +11,11 @@ from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from test_framework.script import *
|
||||
from test_framework.mininode import *
|
||||
import binascii
|
||||
|
||||
COIN = 100000000
|
||||
MAX_REPLACEMENT_LIMIT = 100
|
||||
|
||||
def satoshi_round(amount):
|
||||
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
|
||||
|
||||
def txToHex(tx):
|
||||
return binascii.hexlify(tx.serialize()).decode('utf-8')
|
||||
return bytes_to_hex_str(tx.serialize())
|
||||
|
||||
def make_utxo(node, amount, confirmed=True, scriptPubKey=CScript([1])):
|
||||
"""Create a txout with a given amount and scriptPubKey
|
||||
@ -54,9 +49,7 @@ def make_utxo(node, amount, confirmed=True, scriptPubKey=CScript([1])):
|
||||
tx2.vout = [CTxOut(amount, scriptPubKey)]
|
||||
tx2.rehash()
|
||||
|
||||
binascii.hexlify(tx2.serialize()).decode('utf-8')
|
||||
|
||||
signed_tx = node.signrawtransaction(binascii.hexlify(tx2.serialize()).decode('utf-8'))
|
||||
signed_tx = node.signrawtransaction(txToHex(tx2))
|
||||
|
||||
txid = node.sendrawtransaction(signed_tx['hex'], True)
|
||||
|
||||
@ -120,7 +113,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
|
||||
def test_simple_doublespend(self):
|
||||
"""Simple doublespend"""
|
||||
tx0_outpoint = make_utxo(self.nodes[0], 1.1*COIN)
|
||||
tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
|
||||
|
||||
tx1a = CTransaction()
|
||||
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
|
||||
@ -144,7 +137,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
# Extra 0.1 BTC fee
|
||||
tx1b = CTransaction()
|
||||
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
|
||||
tx1b.vout = [CTxOut(0.9*COIN, CScript([b'b']))]
|
||||
tx1b.vout = [CTxOut(int(0.9*COIN), CScript([b'b']))]
|
||||
tx1b_hex = txToHex(tx1b)
|
||||
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
|
||||
|
||||
@ -236,7 +229,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
_total_txs=_total_txs):
|
||||
yield x
|
||||
|
||||
fee = 0.0001*COIN
|
||||
fee = int(0.0001*COIN)
|
||||
n = MAX_REPLACEMENT_LIMIT
|
||||
tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
|
||||
assert_equal(len(tree_txs), n)
|
||||
@ -269,7 +262,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
# Try again, but with more total transactions than the "max txs
|
||||
# double-spent at once" anti-DoS limit.
|
||||
for n in (MAX_REPLACEMENT_LIMIT+1, MAX_REPLACEMENT_LIMIT*2):
|
||||
fee = 0.0001*COIN
|
||||
fee = int(0.0001*COIN)
|
||||
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
|
||||
tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
|
||||
assert_equal(len(tree_txs), n)
|
||||
@ -292,7 +285,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
|
||||
def test_replacement_feeperkb(self):
|
||||
"""Replacement requires fee-per-KB to be higher"""
|
||||
tx0_outpoint = make_utxo(self.nodes[0], 1.1*COIN)
|
||||
tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
|
||||
|
||||
tx1a = CTransaction()
|
||||
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
|
||||
@ -304,7 +297,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
# rejected.
|
||||
tx1b = CTransaction()
|
||||
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
|
||||
tx1b.vout = [CTxOut(0.001*COIN, CScript([b'a'*999000]))]
|
||||
tx1b.vout = [CTxOut(int(0.001*COIN), CScript([b'a'*999000]))]
|
||||
tx1b_hex = txToHex(tx1b)
|
||||
|
||||
try:
|
||||
@ -316,12 +309,12 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
|
||||
def test_spends_of_conflicting_outputs(self):
|
||||
"""Replacements that spend conflicting tx outputs are rejected"""
|
||||
utxo1 = make_utxo(self.nodes[0], 1.2*COIN)
|
||||
utxo2 = make_utxo(self.nodes[0], 3.0*COIN)
|
||||
utxo1 = make_utxo(self.nodes[0], int(1.2*COIN))
|
||||
utxo2 = make_utxo(self.nodes[0], 3*COIN)
|
||||
|
||||
tx1a = CTransaction()
|
||||
tx1a.vin = [CTxIn(utxo1, nSequence=0)]
|
||||
tx1a.vout = [CTxOut(1.1*COIN, CScript([b'a']))]
|
||||
tx1a.vout = [CTxOut(int(1.1*COIN), CScript([b'a']))]
|
||||
tx1a_hex = txToHex(tx1a)
|
||||
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
|
||||
|
||||
@ -344,7 +337,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
# Spend tx1a's output to test the indirect case.
|
||||
tx1b = CTransaction()
|
||||
tx1b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
|
||||
tx1b.vout = [CTxOut(1.0*COIN, CScript([b'a']))]
|
||||
tx1b.vout = [CTxOut(1*COIN, CScript([b'a']))]
|
||||
tx1b_hex = txToHex(tx1b)
|
||||
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
|
||||
tx1b_txid = int(tx1b_txid, 16)
|
||||
@ -364,12 +357,12 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
|
||||
def test_new_unconfirmed_inputs(self):
|
||||
"""Replacements that add new unconfirmed inputs are rejected"""
|
||||
confirmed_utxo = make_utxo(self.nodes[0], 1.1*COIN)
|
||||
unconfirmed_utxo = make_utxo(self.nodes[0], 0.1*COIN, False)
|
||||
confirmed_utxo = make_utxo(self.nodes[0], int(1.1*COIN))
|
||||
unconfirmed_utxo = make_utxo(self.nodes[0], int(0.1*COIN), False)
|
||||
|
||||
tx1 = CTransaction()
|
||||
tx1.vin = [CTxIn(confirmed_utxo)]
|
||||
tx1.vout = [CTxOut(1.0*COIN, CScript([b'a']))]
|
||||
tx1.vout = [CTxOut(1*COIN, CScript([b'a']))]
|
||||
tx1_hex = txToHex(tx1)
|
||||
tx1_txid = self.nodes[0].sendrawtransaction(tx1_hex, True)
|
||||
|
||||
@ -393,7 +386,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
# Start by creating a single transaction with many outputs
|
||||
initial_nValue = 10*COIN
|
||||
utxo = make_utxo(self.nodes[0], initial_nValue)
|
||||
fee = 0.0001*COIN
|
||||
fee = int(0.0001*COIN)
|
||||
split_value = int((initial_nValue-fee)/(MAX_REPLACEMENT_LIMIT+1))
|
||||
actual_fee = initial_nValue - split_value*(MAX_REPLACEMENT_LIMIT+1)
|
||||
|
||||
@ -446,7 +439,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
|
||||
def test_opt_in(self):
|
||||
""" Replacing should only work if orig tx opted in """
|
||||
tx0_outpoint = make_utxo(self.nodes[0], 1.1*COIN)
|
||||
tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
|
||||
|
||||
# Create a non-opting in transaction
|
||||
tx1a = CTransaction()
|
||||
@ -458,7 +451,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
# Shouldn't be able to double-spend
|
||||
tx1b = CTransaction()
|
||||
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
|
||||
tx1b.vout = [CTxOut(0.9*COIN, CScript([b'b']))]
|
||||
tx1b.vout = [CTxOut(int(0.9*COIN), CScript([b'b']))]
|
||||
tx1b_hex = txToHex(tx1b)
|
||||
|
||||
try:
|
||||
@ -469,7 +462,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
print tx1b_txid
|
||||
assert(False)
|
||||
|
||||
tx1_outpoint = make_utxo(self.nodes[0], 1.1*COIN)
|
||||
tx1_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
|
||||
|
||||
# Create a different non-opting in transaction
|
||||
tx2a = CTransaction()
|
||||
@ -481,7 +474,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
# Still shouldn't be able to double-spend
|
||||
tx2b = CTransaction()
|
||||
tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
|
||||
tx2b.vout = [CTxOut(0.9*COIN, CScript([b'b']))]
|
||||
tx2b.vout = [CTxOut(int(0.9*COIN), CScript([b'b']))]
|
||||
tx2b_hex = txToHex(tx2b)
|
||||
|
||||
try:
|
||||
@ -501,19 +494,19 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
tx3a = CTransaction()
|
||||
tx3a.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0xffffffff),
|
||||
CTxIn(COutPoint(tx2a_txid, 0), nSequence=0xfffffffd)]
|
||||
tx3a.vout = [CTxOut(0.9*COIN, CScript([b'c'])), CTxOut(0.9*COIN, CScript([b'd']))]
|
||||
tx3a.vout = [CTxOut(int(0.9*COIN), CScript([b'c'])), CTxOut(int(0.9*COIN), CScript([b'd']))]
|
||||
tx3a_hex = txToHex(tx3a)
|
||||
|
||||
self.nodes[0].sendrawtransaction(tx3a_hex, True)
|
||||
|
||||
tx3b = CTransaction()
|
||||
tx3b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
|
||||
tx3b.vout = [CTxOut(0.5*COIN, CScript([b'e']))]
|
||||
tx3b.vout = [CTxOut(int(0.5*COIN), CScript([b'e']))]
|
||||
tx3b_hex = txToHex(tx3b)
|
||||
|
||||
tx3c = CTransaction()
|
||||
tx3c.vin = [CTxIn(COutPoint(tx2a_txid, 0), nSequence=0)]
|
||||
tx3c.vout = [CTxOut(0.5*COIN, CScript([b'f']))]
|
||||
tx3c.vout = [CTxOut(int(0.5*COIN), CScript([b'f']))]
|
||||
tx3c_hex = txToHex(tx3c)
|
||||
|
||||
self.nodes[0].sendrawtransaction(tx3b_hex, True)
|
||||
@ -526,7 +519,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
# correctly used by replacement logic
|
||||
|
||||
# 1. Check that feeperkb uses modified fees
|
||||
tx0_outpoint = make_utxo(self.nodes[0], 1.1*COIN)
|
||||
tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
|
||||
|
||||
tx1a = CTransaction()
|
||||
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
|
||||
@ -537,7 +530,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
# Higher fee, but the actual fee per KB is much lower.
|
||||
tx1b = CTransaction()
|
||||
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
|
||||
tx1b.vout = [CTxOut(0.001*COIN, CScript([b'a'*740000]))]
|
||||
tx1b.vout = [CTxOut(int(0.001*COIN), CScript([b'a'*740000]))]
|
||||
tx1b_hex = txToHex(tx1b)
|
||||
|
||||
# Verify tx1b cannot replace tx1a.
|
||||
@ -557,7 +550,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
assert(tx1b_txid in self.nodes[0].getrawmempool())
|
||||
|
||||
# 2. Check that absolute fee checks use modified fee.
|
||||
tx1_outpoint = make_utxo(self.nodes[0], 1.1*COIN)
|
||||
tx1_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
|
||||
|
||||
tx2a = CTransaction()
|
||||
tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0)]
|
||||
@ -568,7 +561,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
# Lower fee, but we'll prioritise it
|
||||
tx2b = CTransaction()
|
||||
tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
|
||||
tx2b.vout = [CTxOut(1.01*COIN, CScript([b'a']))]
|
||||
tx2b.vout = [CTxOut(int(1.01*COIN), CScript([b'a']))]
|
||||
tx2b.rehash()
|
||||
tx2b_hex = txToHex(tx2b)
|
||||
|
||||
|
@ -11,8 +11,9 @@
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
from struct import *
|
||||
from io import BytesIO
|
||||
from codecs import encode
|
||||
import binascii
|
||||
import StringIO
|
||||
|
||||
try:
|
||||
import http.client as httplib
|
||||
@ -38,7 +39,7 @@ def http_get_call(host, port, path, response_object = 0):
|
||||
if response_object:
|
||||
return conn.getresponse()
|
||||
|
||||
return conn.getresponse().read()
|
||||
return conn.getresponse().read().decode('utf-8')
|
||||
|
||||
#allows simple http post calls with a request body
|
||||
def http_post_call(host, port, path, requestdata = '', response_object = 0):
|
||||
@ -140,13 +141,13 @@ class RESTTest (BitcoinTestFramework):
|
||||
bb_hash = self.nodes[0].getbestblockhash()
|
||||
|
||||
binaryRequest = b'\x01\x02'
|
||||
binaryRequest += binascii.unhexlify(txid)
|
||||
binaryRequest += hex_str_to_bytes(txid)
|
||||
binaryRequest += pack("i", n)
|
||||
binaryRequest += binascii.unhexlify(vintx)
|
||||
binaryRequest += hex_str_to_bytes(vintx)
|
||||
binaryRequest += pack("i", 0)
|
||||
|
||||
bin_response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', binaryRequest)
|
||||
output = StringIO.StringIO()
|
||||
output = BytesIO()
|
||||
output.write(bin_response)
|
||||
output.seek(0)
|
||||
chainHeight = unpack("i", output.read(4))[0]
|
||||
@ -233,7 +234,7 @@ class RESTTest (BitcoinTestFramework):
|
||||
assert_equal(response_hex.status, 200)
|
||||
assert_greater_than(int(response_hex.getheader('content-length')), 160)
|
||||
response_hex_str = response_hex.read()
|
||||
assert_equal(response_str.encode("hex")[0:160], response_hex_str[0:160])
|
||||
assert_equal(encode(response_str, "hex_codec")[0:160], response_hex_str[0:160])
|
||||
|
||||
# compare with hex block header
|
||||
response_header_hex = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"hex", True)
|
||||
@ -241,7 +242,7 @@ class RESTTest (BitcoinTestFramework):
|
||||
assert_greater_than(int(response_header_hex.getheader('content-length')), 160)
|
||||
response_header_hex_str = response_header_hex.read()
|
||||
assert_equal(response_hex_str[0:160], response_header_hex_str[0:160])
|
||||
assert_equal(response_header_str.encode("hex")[0:160], response_header_hex_str[0:160])
|
||||
assert_equal(encode(response_header_str, "hex_codec")[0:160], response_header_hex_str[0:160])
|
||||
|
||||
# check json format
|
||||
block_json_string = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+'json')
|
||||
@ -251,7 +252,7 @@ class RESTTest (BitcoinTestFramework):
|
||||
# compare with json block header
|
||||
response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"json", True)
|
||||
assert_equal(response_header_json.status, 200)
|
||||
response_header_json_str = response_header_json.read()
|
||||
response_header_json_str = response_header_json.read().decode('utf-8')
|
||||
json_obj = json.loads(response_header_json_str, parse_float=Decimal)
|
||||
assert_equal(len(json_obj), 1) #ensure that there is one header in the json response
|
||||
assert_equal(json_obj[0]['hash'], bb_hash) #request/response hash should be the same
|
||||
@ -275,7 +276,7 @@ class RESTTest (BitcoinTestFramework):
|
||||
self.sync_all()
|
||||
response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/5/'+bb_hash+self.FORMAT_SEPARATOR+"json", True)
|
||||
assert_equal(response_header_json.status, 200)
|
||||
response_header_json_str = response_header_json.read()
|
||||
response_header_json_str = response_header_json.read().decode('utf-8')
|
||||
json_obj = json.loads(response_header_json_str)
|
||||
assert_equal(len(json_obj), 5) #now we should have 5 header objects
|
||||
|
||||
@ -291,7 +292,6 @@ class RESTTest (BitcoinTestFramework):
|
||||
assert_greater_than(int(response.getheader('content-length')), 10)
|
||||
|
||||
|
||||
|
||||
# check block tx details
|
||||
# let's make 3 tx and mine them on node 1
|
||||
txs = []
|
||||
|
@ -7,6 +7,7 @@
|
||||
# Test fee estimation code
|
||||
#
|
||||
|
||||
from collections import OrderedDict
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
|
||||
@ -42,7 +43,7 @@ def swap_outputs_in_rawtx(rawtx, outputs, inputnum):
|
||||
def small_txpuzzle_randfee(from_node, conflist, unconflist, amount, min_fee, fee_increment):
|
||||
'''
|
||||
Create and send a transaction with a random fee.
|
||||
The transaction pays to a trival P2SH script, and assumes that its inputs
|
||||
The transaction pays to a trivial P2SH script, and assumes that its inputs
|
||||
are of the same form.
|
||||
The function takes a list of confirmed outputs and unconfirmed outputs
|
||||
and attempts to use the confirmed list first for its inputs.
|
||||
@ -69,11 +70,11 @@ def small_txpuzzle_randfee(from_node, conflist, unconflist, amount, min_fee, fee
|
||||
if total_in <= amount + fee:
|
||||
raise RuntimeError("Insufficient funds: need %d, have %d"%(amount+fee, total_in))
|
||||
outputs = {}
|
||||
outputs[P2SH_1] = total_in - amount - fee
|
||||
outputs[P2SH_2] = amount
|
||||
outputs = OrderedDict([(P2SH_1, total_in - amount - fee),
|
||||
(P2SH_2, amount)])
|
||||
rawtx = from_node.createrawtransaction(inputs, outputs)
|
||||
rawtx = swap_outputs_in_rawtx(rawtx, outputs, len(inputs))
|
||||
# Createrawtransaction constructions a transaction that is ready to be signed
|
||||
# createrawtransaction constructs a transaction that is ready to be signed.
|
||||
# These transactions don't need to be signed, but we still have to insert the ScriptSig
|
||||
# that will satisfy the ScriptPubKey.
|
||||
completetx = rawtx[0:10]
|
||||
@ -99,12 +100,10 @@ def split_inputs(from_node, txins, txouts, initial_split = False):
|
||||
'''
|
||||
prevtxout = txins.pop()
|
||||
inputs = []
|
||||
outputs = {}
|
||||
inputs.append({ "txid" : prevtxout["txid"], "vout" : prevtxout["vout"] })
|
||||
half_change = satoshi_round(prevtxout["amount"]/2)
|
||||
rem_change = prevtxout["amount"] - half_change - Decimal("0.00010000")
|
||||
outputs[P2SH_1] = half_change
|
||||
outputs[P2SH_2] = rem_change
|
||||
outputs = OrderedDict([(P2SH_1, half_change), (P2SH_2, rem_change)])
|
||||
rawtx = from_node.createrawtransaction(inputs, outputs)
|
||||
rawtx = swap_outputs_in_rawtx(rawtx, outputs, len(inputs))
|
||||
# If this is the initial split we actually need to sign the transaction
|
||||
@ -127,7 +126,7 @@ def check_estimates(node, fees_seen, max_invalid, print_estimates = True):
|
||||
print([str(all_estimates[e-1]) for e in [1,2,3,6,15,25]])
|
||||
delta = 1.0e-6 # account for rounding error
|
||||
last_e = max(fees_seen)
|
||||
for e in filter(lambda x: x >= 0, all_estimates):
|
||||
for e in [x for x in all_estimates if x >= 0]:
|
||||
# Estimates should be within the bounds of what transactions fees actually were:
|
||||
if float(e)+delta < min(fees_seen) or float(e)-delta > max(fees_seen):
|
||||
raise AssertionError("Estimated fee (%f) out of range (%f,%f)"
|
||||
@ -241,12 +240,12 @@ class EstimateFeeTest(BitcoinTestFramework):
|
||||
from_index = random.randint(1,2)
|
||||
(txhex, fee) = small_txpuzzle_randfee(self.nodes[from_index], self.confutxo,
|
||||
self.memutxo, Decimal("0.005"), min_fee, min_fee)
|
||||
tx_kbytes = (len(txhex)/2)/1000.0
|
||||
tx_kbytes = (len(txhex) // 2) / 1000.0
|
||||
self.fees_per_kb.append(float(fee)/tx_kbytes)
|
||||
sync_mempools(self.nodes[0:3],.1)
|
||||
mined = mining_node.getblock(mining_node.generate(1)[0],True)["tx"]
|
||||
sync_blocks(self.nodes[0:3],.1)
|
||||
#update which txouts are confirmed
|
||||
# update which txouts are confirmed
|
||||
newmem = []
|
||||
for utx in self.memutxo:
|
||||
if utx["txid"] in mined:
|
||||
|
@ -61,7 +61,7 @@ class JSONRPCException(Exception):
|
||||
|
||||
def EncodeDecimal(o):
|
||||
if isinstance(o, decimal.Decimal):
|
||||
return round(o, 8)
|
||||
return str(o)
|
||||
raise TypeError(repr(o) + " is not JSON serializable")
|
||||
|
||||
class AuthServiceProxy(object):
|
||||
@ -94,11 +94,10 @@ class AuthServiceProxy(object):
|
||||
self.__conn = connection
|
||||
elif self.__url.scheme == 'https':
|
||||
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
|
||||
None, None, False,
|
||||
timeout)
|
||||
timeout=timeout)
|
||||
else:
|
||||
self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
|
||||
False, timeout)
|
||||
timeout=timeout)
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name.startswith('__') and name.endswith('__'):
|
||||
@ -157,6 +156,11 @@ class AuthServiceProxy(object):
|
||||
raise JSONRPCException({
|
||||
'code': -342, 'message': 'missing HTTP response from server'})
|
||||
|
||||
content_type = http_response.getheader('Content-Type')
|
||||
if content_type != 'application/json':
|
||||
raise JSONRPCException({
|
||||
'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (http_response.status, http_response.reason)})
|
||||
|
||||
responsedata = http_response.read().decode('utf8')
|
||||
response = json.loads(responsedata, parse_float=decimal.Decimal)
|
||||
if "error" in response and response["error"] is None:
|
||||
|
@ -3,8 +3,9 @@
|
||||
# and for constructing a getheaders message
|
||||
#
|
||||
|
||||
from mininode import *
|
||||
from .mininode import *
|
||||
import dbm
|
||||
from io import BytesIO
|
||||
|
||||
class BlockStore(object):
|
||||
def __init__(self, datadir):
|
||||
@ -21,7 +22,7 @@ class BlockStore(object):
|
||||
serialized_block = self.blockDB[repr(blockhash)]
|
||||
except KeyError:
|
||||
return None
|
||||
f = cStringIO.StringIO(serialized_block)
|
||||
f = BytesIO(serialized_block)
|
||||
ret = CBlock()
|
||||
ret.deserialize(f)
|
||||
ret.calc_sha256()
|
||||
@ -115,7 +116,7 @@ class TxStore(object):
|
||||
serialized_tx = self.txDB[repr(txhash)]
|
||||
except KeyError:
|
||||
return None
|
||||
f = cStringIO.StringIO(serialized_tx)
|
||||
f = BytesIO(serialized_tx)
|
||||
ret = CTransaction()
|
||||
ret.deserialize(f)
|
||||
ret.calc_sha256()
|
||||
|
@ -4,8 +4,8 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
#
|
||||
|
||||
from mininode import *
|
||||
from script import CScript, OP_TRUE, OP_CHECKSIG
|
||||
from .mininode import *
|
||||
from .script import CScript, OP_TRUE, OP_CHECKSIG
|
||||
|
||||
# Create a block (with regtest difficulty)
|
||||
def create_block(hashprev, coinbase, nTime=None):
|
||||
@ -29,7 +29,7 @@ def serialize_script_num(value):
|
||||
neg = value < 0
|
||||
absvalue = -value if neg else value
|
||||
while (absvalue):
|
||||
r.append(chr(absvalue & 0xff))
|
||||
r.append(int(absvalue & 0xff))
|
||||
absvalue >>= 8
|
||||
if r[-1] & 0x80:
|
||||
r.append(0x80 if neg else 0)
|
||||
@ -45,7 +45,7 @@ def create_coinbase(height, pubkey = None):
|
||||
coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff),
|
||||
ser_string(serialize_script_num(height)), 0xffffffff))
|
||||
coinbaseoutput = CTxOut()
|
||||
coinbaseoutput.nValue = 500*100000000
|
||||
coinbaseoutput.nValue = 500 * COIN
|
||||
halvings = int(height/150) # regtest
|
||||
coinbaseoutput.nValue >>= halvings
|
||||
if (pubkey != None):
|
||||
@ -62,6 +62,6 @@ def create_transaction(prevtx, n, sig, value):
|
||||
tx = CTransaction()
|
||||
assert(n < len(prevtx.vout))
|
||||
tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), sig, 0xffffffff))
|
||||
tx.vout.append(CTxOut(value, ""))
|
||||
tx.vout.append(CTxOut(value, b""))
|
||||
tx.calc_sha256()
|
||||
return tx
|
||||
|
@ -4,9 +4,9 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
#
|
||||
|
||||
from mininode import *
|
||||
from blockstore import BlockStore, TxStore
|
||||
from util import p2p_port
|
||||
from .mininode import *
|
||||
from .blockstore import BlockStore, TxStore
|
||||
from .util import p2p_port
|
||||
|
||||
'''
|
||||
This is a tool for comparing two or more dashds to each other
|
||||
@ -27,25 +27,11 @@ generator that returns TestInstance objects. See below for definition.
|
||||
|
||||
global mininode_lock
|
||||
|
||||
def wait_until(predicate, attempts=float('inf'), timeout=float('inf')):
|
||||
attempt = 0
|
||||
elapsed = 0
|
||||
|
||||
while attempt < attempts and elapsed < timeout:
|
||||
with mininode_lock:
|
||||
if predicate():
|
||||
return True
|
||||
attempt += 1
|
||||
elapsed += 0.05
|
||||
time.sleep(0.05)
|
||||
|
||||
return False
|
||||
|
||||
class RejectResult(object):
|
||||
'''
|
||||
Outcome that expects rejection of a transaction or block.
|
||||
'''
|
||||
def __init__(self, code, reason=''):
|
||||
def __init__(self, code, reason=b''):
|
||||
self.code = code
|
||||
self.reason = reason
|
||||
def match(self, other):
|
||||
@ -111,9 +97,9 @@ class TestNode(NodeConnCB):
|
||||
raise AssertionError("Got pong for unknown ping [%s]" % repr(message))
|
||||
|
||||
def on_reject(self, conn, message):
|
||||
if message.message == 'tx':
|
||||
if message.message == b'tx':
|
||||
self.tx_reject_map[message.data] = RejectResult(message.code, message.reason)
|
||||
if message.message == 'block':
|
||||
if message.message == b'block':
|
||||
self.block_reject_map[message.data] = RejectResult(message.code, message.reason)
|
||||
|
||||
def send_inv(self, obj):
|
||||
|
@ -20,11 +20,12 @@
|
||||
import struct
|
||||
import socket
|
||||
import asyncore
|
||||
import binascii
|
||||
import time
|
||||
import sys
|
||||
import random
|
||||
import cStringIO
|
||||
from binascii import hexlify, unhexlify
|
||||
from io import BytesIO
|
||||
from codecs import encode
|
||||
import hashlib
|
||||
from threading import RLock
|
||||
from threading import Thread
|
||||
@ -35,11 +36,13 @@ import dash_hash
|
||||
|
||||
BIP0031_VERSION = 60000
|
||||
MY_VERSION = 70103 # past bip-31 for ping/pong
|
||||
MY_SUBVERSION = "/python-mininode-tester:0.0.1/"
|
||||
MY_SUBVERSION = b"/python-mininode-tester:0.0.2/"
|
||||
|
||||
MAX_INV_SZ = 50000
|
||||
MAX_BLOCK_SIZE = 1000000
|
||||
|
||||
COIN = 100000000L # 1 btc in satoshis
|
||||
|
||||
# Keep our own socket map for asyncore, so that we can track disconnects
|
||||
# ourselves (to workaround an issue with closing an asyncore socket when
|
||||
# using select)
|
||||
@ -77,12 +80,12 @@ def deser_string(f):
|
||||
|
||||
def ser_string(s):
|
||||
if len(s) < 253:
|
||||
return chr(len(s)) + s
|
||||
return struct.pack("B", len(s)) + s
|
||||
elif len(s) < 0x10000:
|
||||
return chr(253) + struct.pack("<H", len(s)) + s
|
||||
return struct.pack("<BH", 253, len(s)) + s
|
||||
elif len(s) < 0x100000000L:
|
||||
return chr(254) + struct.pack("<I", len(s)) + s
|
||||
return chr(255) + struct.pack("<Q", len(s)) + s
|
||||
return struct.pack("<BI", 254, len(s)) + s
|
||||
return struct.pack("<BQ", 255, len(s)) + s
|
||||
|
||||
|
||||
def deser_uint256(f):
|
||||
@ -94,7 +97,7 @@ def deser_uint256(f):
|
||||
|
||||
|
||||
def ser_uint256(u):
|
||||
rs = ""
|
||||
rs = b""
|
||||
for i in xrange(8):
|
||||
rs += struct.pack("<I", u & 0xFFFFFFFFL)
|
||||
u >>= 32
|
||||
@ -132,15 +135,15 @@ def deser_vector(f, c):
|
||||
|
||||
|
||||
def ser_vector(l):
|
||||
r = ""
|
||||
r = b""
|
||||
if len(l) < 253:
|
||||
r = chr(len(l))
|
||||
r = struct.pack("B", len(l))
|
||||
elif len(l) < 0x10000:
|
||||
r = chr(253) + struct.pack("<H", len(l))
|
||||
r = struct.pack("<BH", 253, len(l))
|
||||
elif len(l) < 0x100000000L:
|
||||
r = chr(254) + struct.pack("<I", len(l))
|
||||
r = struct.pack("<BI", 254, len(l))
|
||||
else:
|
||||
r = chr(255) + struct.pack("<Q", len(l))
|
||||
r = struct.pack("<BQ", 255, len(l))
|
||||
for i in l:
|
||||
r += i.serialize()
|
||||
return r
|
||||
@ -162,15 +165,15 @@ def deser_uint256_vector(f):
|
||||
|
||||
|
||||
def ser_uint256_vector(l):
|
||||
r = ""
|
||||
r = b""
|
||||
if len(l) < 253:
|
||||
r = chr(len(l))
|
||||
r = struct.pack("B", len(l))
|
||||
elif len(l) < 0x10000:
|
||||
r = chr(253) + struct.pack("<H", len(l))
|
||||
r = struct.pack("<BH", 253, len(l))
|
||||
elif len(l) < 0x100000000L:
|
||||
r = chr(254) + struct.pack("<I", len(l))
|
||||
r = struct.pack("<BI", 254, len(l))
|
||||
else:
|
||||
r = chr(255) + struct.pack("<Q", len(l))
|
||||
r = struct.pack("<BQ", 255, len(l))
|
||||
for i in l:
|
||||
r += ser_uint256(i)
|
||||
return r
|
||||
@ -192,15 +195,15 @@ def deser_string_vector(f):
|
||||
|
||||
|
||||
def ser_string_vector(l):
|
||||
r = ""
|
||||
r = b""
|
||||
if len(l) < 253:
|
||||
r = chr(len(l))
|
||||
r = struct.pack("B", len(l))
|
||||
elif len(l) < 0x10000:
|
||||
r = chr(253) + struct.pack("<H", len(l))
|
||||
r = struct.pack("<BH", 253, len(l))
|
||||
elif len(l) < 0x100000000L:
|
||||
r = chr(254) + struct.pack("<I", len(l))
|
||||
r = struct.pack("<BI", 254, len(l))
|
||||
else:
|
||||
r = chr(255) + struct.pack("<Q", len(l))
|
||||
r = struct.pack("<BQ", 255, len(l))
|
||||
for sv in l:
|
||||
r += ser_string(sv)
|
||||
return r
|
||||
@ -222,34 +225,34 @@ def deser_int_vector(f):
|
||||
|
||||
|
||||
def ser_int_vector(l):
|
||||
r = ""
|
||||
r = b""
|
||||
if len(l) < 253:
|
||||
r = chr(len(l))
|
||||
r = struct.pack("B", len(l))
|
||||
elif len(l) < 0x10000:
|
||||
r = chr(253) + struct.pack("<H", len(l))
|
||||
r = struct.pack("<BH", 253, len(l))
|
||||
elif len(l) < 0x100000000L:
|
||||
r = chr(254) + struct.pack("<I", len(l))
|
||||
r = struct.pack("<BI", 254, len(l))
|
||||
else:
|
||||
r = chr(255) + struct.pack("<Q", len(l))
|
||||
r = struct.pack("<BQ", 255, len(l))
|
||||
for i in l:
|
||||
r += struct.pack("<i", i)
|
||||
return r
|
||||
|
||||
# Deserialize from a hex string representation (eg from RPC)
|
||||
def FromHex(obj, hex_string):
|
||||
obj.deserialize(cStringIO.StringIO(binascii.unhexlify(hex_string)))
|
||||
obj.deserialize(BytesIO(unhexlify(hex_string.encode('ascii'))))
|
||||
return obj
|
||||
|
||||
# Convert a binary-serializable object to hex (eg for submission via RPC)
|
||||
def ToHex(obj):
|
||||
return binascii.hexlify(obj.serialize()).decode('utf-8')
|
||||
return hexlify(obj.serialize()).decode('ascii')
|
||||
|
||||
# Objects that map to dashd objects, which can be serialized/deserialized
|
||||
|
||||
class CAddress(object):
|
||||
def __init__(self):
|
||||
self.nServices = 1
|
||||
self.pchReserved = "\x00" * 10 + "\xff" * 2
|
||||
self.pchReserved = b"\x00" * 10 + b"\xff" * 2
|
||||
self.ip = "0.0.0.0"
|
||||
self.port = 0
|
||||
|
||||
@ -260,7 +263,7 @@ class CAddress(object):
|
||||
self.port = struct.unpack(">H", f.read(2))[0]
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += struct.pack("<Q", self.nServices)
|
||||
r += self.pchReserved
|
||||
r += socket.inet_aton(self.ip)
|
||||
@ -287,7 +290,7 @@ class CInv(object):
|
||||
self.hash = deser_uint256(f)
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += struct.pack("<i", self.type)
|
||||
r += ser_uint256(self.hash)
|
||||
return r
|
||||
@ -307,7 +310,7 @@ class CBlockLocator(object):
|
||||
self.vHave = deser_uint256_vector(f)
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += struct.pack("<i", self.nVersion)
|
||||
r += ser_uint256_vector(self.vHave)
|
||||
return r
|
||||
@ -327,7 +330,7 @@ class COutPoint(object):
|
||||
self.n = struct.unpack("<I", f.read(4))[0]
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += ser_uint256(self.hash)
|
||||
r += struct.pack("<I", self.n)
|
||||
return r
|
||||
@ -337,7 +340,7 @@ class COutPoint(object):
|
||||
|
||||
|
||||
class CTxIn(object):
|
||||
def __init__(self, outpoint=None, scriptSig="", nSequence=0):
|
||||
def __init__(self, outpoint=None, scriptSig=b"", nSequence=0):
|
||||
if outpoint is None:
|
||||
self.prevout = COutPoint()
|
||||
else:
|
||||
@ -352,7 +355,7 @@ class CTxIn(object):
|
||||
self.nSequence = struct.unpack("<I", f.read(4))[0]
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += self.prevout.serialize()
|
||||
r += ser_string(self.scriptSig)
|
||||
r += struct.pack("<I", self.nSequence)
|
||||
@ -360,12 +363,12 @@ class CTxIn(object):
|
||||
|
||||
def __repr__(self):
|
||||
return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
|
||||
% (repr(self.prevout), binascii.hexlify(self.scriptSig),
|
||||
% (repr(self.prevout), hexlify(self.scriptSig),
|
||||
self.nSequence)
|
||||
|
||||
|
||||
class CTxOut(object):
|
||||
def __init__(self, nValue=0, scriptPubKey=""):
|
||||
def __init__(self, nValue=0, scriptPubKey=b""):
|
||||
self.nValue = nValue
|
||||
self.scriptPubKey = scriptPubKey
|
||||
|
||||
@ -374,15 +377,15 @@ class CTxOut(object):
|
||||
self.scriptPubKey = deser_string(f)
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += struct.pack("<q", self.nValue)
|
||||
r += ser_string(self.scriptPubKey)
|
||||
return r
|
||||
|
||||
def __repr__(self):
|
||||
return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \
|
||||
% (self.nValue // 100000000, self.nValue % 100000000,
|
||||
binascii.hexlify(self.scriptPubKey))
|
||||
% (self.nValue // COIN, self.nValue % COIN,
|
||||
hexlify(self.scriptPubKey))
|
||||
|
||||
|
||||
class CTransaction(object):
|
||||
@ -411,7 +414,7 @@ class CTransaction(object):
|
||||
self.hash = None
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += struct.pack("<i", self.nVersion)
|
||||
r += ser_vector(self.vin)
|
||||
r += ser_vector(self.vout)
|
||||
@ -425,12 +428,12 @@ class CTransaction(object):
|
||||
def calc_sha256(self):
|
||||
if self.sha256 is None:
|
||||
self.sha256 = uint256_from_str(hash256(self.serialize()))
|
||||
self.hash = hash256(self.serialize())[::-1].encode('hex_codec')
|
||||
self.hash = encode(hash256(self.serialize())[::-1], 'hex_codec').decode('ascii')
|
||||
|
||||
def is_valid(self):
|
||||
self.calc_sha256()
|
||||
for tout in self.vout:
|
||||
if tout.nValue < 0 or tout.nValue > 21000000L * 100000000L:
|
||||
if tout.nValue < 0 or tout.nValue > 21000000 * COIN:
|
||||
return False
|
||||
return True
|
||||
|
||||
@ -475,7 +478,7 @@ class CBlockHeader(object):
|
||||
self.hash = None
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += struct.pack("<i", self.nVersion)
|
||||
r += ser_uint256(self.hashPrevBlock)
|
||||
r += ser_uint256(self.hashMerkleRoot)
|
||||
@ -486,7 +489,7 @@ class CBlockHeader(object):
|
||||
|
||||
def calc_sha256(self):
|
||||
if self.sha256 is None:
|
||||
r = ""
|
||||
r = b""
|
||||
r += struct.pack("<i", self.nVersion)
|
||||
r += ser_uint256(self.hashPrevBlock)
|
||||
r += ser_uint256(self.hashMerkleRoot)
|
||||
@ -494,7 +497,7 @@ class CBlockHeader(object):
|
||||
r += struct.pack("<I", self.nBits)
|
||||
r += struct.pack("<I", self.nNonce)
|
||||
self.sha256 = uint256_from_str(dashhash(r))
|
||||
self.hash = dashhash(r)[::-1].encode('hex_codec')
|
||||
self.hash = encode(dashhash(r)[::-1], 'hex_codec').decode('ascii')
|
||||
|
||||
def rehash(self):
|
||||
self.sha256 = None
|
||||
@ -517,7 +520,7 @@ class CBlock(CBlockHeader):
|
||||
self.vtx = deser_vector(f, CTransaction)
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += super(CBlock, self).serialize()
|
||||
r += ser_vector(self.vtx)
|
||||
return r
|
||||
@ -572,9 +575,9 @@ class CUnsignedAlert(object):
|
||||
self.nMaxVer = 0
|
||||
self.setSubVer = []
|
||||
self.nPriority = 0
|
||||
self.strComment = ""
|
||||
self.strStatusBar = ""
|
||||
self.strReserved = ""
|
||||
self.strComment = b""
|
||||
self.strStatusBar = b""
|
||||
self.strReserved = b""
|
||||
|
||||
def deserialize(self, f):
|
||||
self.nVersion = struct.unpack("<i", f.read(4))[0]
|
||||
@ -592,7 +595,7 @@ class CUnsignedAlert(object):
|
||||
self.strReserved = deser_string(f)
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += struct.pack("<i", self.nVersion)
|
||||
r += struct.pack("<q", self.nRelayUntil)
|
||||
r += struct.pack("<q", self.nExpiration)
|
||||
@ -617,15 +620,15 @@ class CUnsignedAlert(object):
|
||||
|
||||
class CAlert(object):
|
||||
def __init__(self):
|
||||
self.vchMsg = ""
|
||||
self.vchSig = ""
|
||||
self.vchMsg = b""
|
||||
self.vchSig = b""
|
||||
|
||||
def deserialize(self, f):
|
||||
self.vchMsg = deser_string(f)
|
||||
self.vchSig = deser_string(f)
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += ser_string(self.vchMsg)
|
||||
r += ser_string(self.vchSig)
|
||||
return r
|
||||
@ -637,12 +640,12 @@ class CAlert(object):
|
||||
|
||||
# Objects that correspond to messages on the wire
|
||||
class msg_version(object):
|
||||
command = "version"
|
||||
command = b"version"
|
||||
|
||||
def __init__(self):
|
||||
self.nVersion = MY_VERSION
|
||||
self.nServices = 1
|
||||
self.nTime = time.time()
|
||||
self.nTime = int(time.time())
|
||||
self.addrTo = CAddress()
|
||||
self.addrFrom = CAddress()
|
||||
self.nNonce = random.getrandbits(64)
|
||||
@ -673,7 +676,7 @@ class msg_version(object):
|
||||
self.nStartingHeight = None
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += struct.pack("<i", self.nVersion)
|
||||
r += struct.pack("<Q", self.nServices)
|
||||
r += struct.pack("<q", self.nTime)
|
||||
@ -692,7 +695,7 @@ class msg_version(object):
|
||||
|
||||
|
||||
class msg_verack(object):
|
||||
command = "verack"
|
||||
command = b"verack"
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
@ -701,14 +704,14 @@ class msg_verack(object):
|
||||
pass
|
||||
|
||||
def serialize(self):
|
||||
return ""
|
||||
return b""
|
||||
|
||||
def __repr__(self):
|
||||
return "msg_verack()"
|
||||
|
||||
|
||||
class msg_addr(object):
|
||||
command = "addr"
|
||||
command = b"addr"
|
||||
|
||||
def __init__(self):
|
||||
self.addrs = []
|
||||
@ -724,7 +727,7 @@ class msg_addr(object):
|
||||
|
||||
|
||||
class msg_alert(object):
|
||||
command = "alert"
|
||||
command = b"alert"
|
||||
|
||||
def __init__(self):
|
||||
self.alert = CAlert()
|
||||
@ -734,7 +737,7 @@ class msg_alert(object):
|
||||
self.alert.deserialize(f)
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += self.alert.serialize()
|
||||
return r
|
||||
|
||||
@ -743,7 +746,7 @@ class msg_alert(object):
|
||||
|
||||
|
||||
class msg_inv(object):
|
||||
command = "inv"
|
||||
command = b"inv"
|
||||
|
||||
def __init__(self, inv=None):
|
||||
if inv is None:
|
||||
@ -762,7 +765,7 @@ class msg_inv(object):
|
||||
|
||||
|
||||
class msg_getdata(object):
|
||||
command = "getdata"
|
||||
command = b"getdata"
|
||||
|
||||
def __init__(self, inv=None):
|
||||
self.inv = inv if inv != None else []
|
||||
@ -778,7 +781,7 @@ class msg_getdata(object):
|
||||
|
||||
|
||||
class msg_getblocks(object):
|
||||
command = "getblocks"
|
||||
command = b"getblocks"
|
||||
|
||||
def __init__(self):
|
||||
self.locator = CBlockLocator()
|
||||
@ -790,7 +793,7 @@ class msg_getblocks(object):
|
||||
self.hashstop = deser_uint256(f)
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += self.locator.serialize()
|
||||
r += ser_uint256(self.hashstop)
|
||||
return r
|
||||
@ -801,7 +804,7 @@ class msg_getblocks(object):
|
||||
|
||||
|
||||
class msg_tx(object):
|
||||
command = "tx"
|
||||
command = b"tx"
|
||||
|
||||
def __init__(self, tx=CTransaction()):
|
||||
self.tx = tx
|
||||
@ -817,7 +820,7 @@ class msg_tx(object):
|
||||
|
||||
|
||||
class msg_block(object):
|
||||
command = "block"
|
||||
command = b"block"
|
||||
|
||||
def __init__(self, block=None):
|
||||
if block is None:
|
||||
@ -836,7 +839,7 @@ class msg_block(object):
|
||||
|
||||
|
||||
class msg_getaddr(object):
|
||||
command = "getaddr"
|
||||
command = b"getaddr"
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
@ -845,14 +848,14 @@ class msg_getaddr(object):
|
||||
pass
|
||||
|
||||
def serialize(self):
|
||||
return ""
|
||||
return b""
|
||||
|
||||
def __repr__(self):
|
||||
return "msg_getaddr()"
|
||||
|
||||
|
||||
class msg_ping_prebip31(object):
|
||||
command = "ping"
|
||||
command = b"ping"
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
@ -861,14 +864,14 @@ class msg_ping_prebip31(object):
|
||||
pass
|
||||
|
||||
def serialize(self):
|
||||
return ""
|
||||
return b""
|
||||
|
||||
def __repr__(self):
|
||||
return "msg_ping() (pre-bip31)"
|
||||
|
||||
|
||||
class msg_ping(object):
|
||||
command = "ping"
|
||||
command = b"ping"
|
||||
|
||||
def __init__(self, nonce=0L):
|
||||
self.nonce = nonce
|
||||
@ -877,7 +880,7 @@ class msg_ping(object):
|
||||
self.nonce = struct.unpack("<Q", f.read(8))[0]
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += struct.pack("<Q", self.nonce)
|
||||
return r
|
||||
|
||||
@ -886,16 +889,16 @@ class msg_ping(object):
|
||||
|
||||
|
||||
class msg_pong(object):
|
||||
command = "pong"
|
||||
command = b"pong"
|
||||
|
||||
def __init__(self, nonce=0L):
|
||||
def __init__(self, nonce=0):
|
||||
self.nonce = nonce
|
||||
|
||||
def deserialize(self, f):
|
||||
self.nonce = struct.unpack("<Q", f.read(8))[0]
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += struct.pack("<Q", self.nonce)
|
||||
return r
|
||||
|
||||
@ -904,7 +907,7 @@ class msg_pong(object):
|
||||
|
||||
|
||||
class msg_mempool(object):
|
||||
command = "mempool"
|
||||
command = b"mempool"
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
@ -913,13 +916,13 @@ class msg_mempool(object):
|
||||
pass
|
||||
|
||||
def serialize(self):
|
||||
return ""
|
||||
return b""
|
||||
|
||||
def __repr__(self):
|
||||
return "msg_mempool()"
|
||||
|
||||
class msg_sendheaders(object):
|
||||
command = "sendheaders"
|
||||
command = b"sendheaders"
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
@ -928,7 +931,7 @@ class msg_sendheaders(object):
|
||||
pass
|
||||
|
||||
def serialize(self):
|
||||
return ""
|
||||
return b""
|
||||
|
||||
def __repr__(self):
|
||||
return "msg_sendheaders()"
|
||||
@ -938,7 +941,7 @@ class msg_sendheaders(object):
|
||||
# vector of hashes
|
||||
# hash_stop (hash of last desired block header, 0 to get as many as possible)
|
||||
class msg_getheaders(object):
|
||||
command = "getheaders"
|
||||
command = b"getheaders"
|
||||
|
||||
def __init__(self):
|
||||
self.locator = CBlockLocator()
|
||||
@ -950,7 +953,7 @@ class msg_getheaders(object):
|
||||
self.hashstop = deser_uint256(f)
|
||||
|
||||
def serialize(self):
|
||||
r = ""
|
||||
r = b""
|
||||
r += self.locator.serialize()
|
||||
r += ser_uint256(self.hashstop)
|
||||
return r
|
||||
@ -963,7 +966,7 @@ class msg_getheaders(object):
|
||||
# headers message has
|
||||
# <count> <vector of block headers>
|
||||
class msg_headers(object):
|
||||
command = "headers"
|
||||
command = b"headers"
|
||||
|
||||
def __init__(self):
|
||||
self.headers = []
|
||||
@ -983,26 +986,29 @@ class msg_headers(object):
|
||||
|
||||
|
||||
class msg_reject(object):
|
||||
command = "reject"
|
||||
command = b"reject"
|
||||
REJECT_MALFORMED = 1
|
||||
|
||||
def __init__(self):
|
||||
self.message = ""
|
||||
self.code = ""
|
||||
self.reason = ""
|
||||
self.message = b""
|
||||
self.code = 0
|
||||
self.reason = b""
|
||||
self.data = 0L
|
||||
|
||||
def deserialize(self, f):
|
||||
self.message = deser_string(f)
|
||||
self.code = struct.unpack("<B", f.read(1))[0]
|
||||
self.reason = deser_string(f)
|
||||
if (self.message == "block" or self.message == "tx"):
|
||||
if (self.code != self.REJECT_MALFORMED and
|
||||
(self.message == b"block" or self.message == b"tx")):
|
||||
self.data = deser_uint256(f)
|
||||
|
||||
def serialize(self):
|
||||
r = ser_string(self.message)
|
||||
r += struct.pack("<B", self.code)
|
||||
r += ser_string(self.reason)
|
||||
if (self.message == "block" or self.message == "tx"):
|
||||
if (self.code != self.REJECT_MALFORMED and
|
||||
(self.message == b"block" or self.message == b"tx")):
|
||||
r += ser_uint256(self.data)
|
||||
return r
|
||||
|
||||
@ -1010,6 +1016,20 @@ class msg_reject(object):
|
||||
return "msg_reject: %s %d %s [%064x]" \
|
||||
% (self.message, self.code, self.reason, self.data)
|
||||
|
||||
# Helper function
|
||||
def wait_until(predicate, attempts=float('inf'), timeout=float('inf')):
|
||||
attempt = 0
|
||||
elapsed = 0
|
||||
|
||||
while attempt < attempts and elapsed < timeout:
|
||||
with mininode_lock:
|
||||
if predicate():
|
||||
return True
|
||||
attempt += 1
|
||||
elapsed += 0.05
|
||||
time.sleep(0.05)
|
||||
|
||||
return False
|
||||
|
||||
# This is what a callback should look like for NodeConn
|
||||
# Reimplement the on_* functions to provide handling for events
|
||||
@ -1087,32 +1107,58 @@ class NodeConnCB(object):
|
||||
def on_mempool(self, conn): pass
|
||||
def on_pong(self, conn, message): pass
|
||||
|
||||
# More useful callbacks and functions for NodeConnCB's which have a single NodeConn
|
||||
class SingleNodeConnCB(NodeConnCB):
|
||||
def __init__(self):
|
||||
NodeConnCB.__init__(self)
|
||||
self.connection = None
|
||||
self.ping_counter = 1
|
||||
self.last_pong = msg_pong()
|
||||
|
||||
def add_connection(self, conn):
|
||||
self.connection = conn
|
||||
|
||||
# Wrapper for the NodeConn's send_message function
|
||||
def send_message(self, message):
|
||||
self.connection.send_message(message)
|
||||
|
||||
def on_pong(self, conn, message):
|
||||
self.last_pong = message
|
||||
|
||||
# Sync up with the node
|
||||
def sync_with_ping(self, timeout=30):
|
||||
def received_pong():
|
||||
return (self.last_pong.nonce == self.ping_counter)
|
||||
self.send_message(msg_ping(nonce=self.ping_counter))
|
||||
success = wait_until(received_pong, timeout)
|
||||
self.ping_counter += 1
|
||||
return success
|
||||
|
||||
# The actual NodeConn class
|
||||
# This class provides an interface for a p2p connection to a specified node
|
||||
class NodeConn(asyncore.dispatcher):
|
||||
messagemap = {
|
||||
"version": msg_version,
|
||||
"verack": msg_verack,
|
||||
"addr": msg_addr,
|
||||
"alert": msg_alert,
|
||||
"inv": msg_inv,
|
||||
"getdata": msg_getdata,
|
||||
"getblocks": msg_getblocks,
|
||||
"tx": msg_tx,
|
||||
"block": msg_block,
|
||||
"getaddr": msg_getaddr,
|
||||
"ping": msg_ping,
|
||||
"pong": msg_pong,
|
||||
"headers": msg_headers,
|
||||
"getheaders": msg_getheaders,
|
||||
"reject": msg_reject,
|
||||
"mempool": msg_mempool
|
||||
b"version": msg_version,
|
||||
b"verack": msg_verack,
|
||||
b"addr": msg_addr,
|
||||
b"alert": msg_alert,
|
||||
b"inv": msg_inv,
|
||||
b"getdata": msg_getdata,
|
||||
b"getblocks": msg_getblocks,
|
||||
b"tx": msg_tx,
|
||||
b"block": msg_block,
|
||||
b"getaddr": msg_getaddr,
|
||||
b"ping": msg_ping,
|
||||
b"pong": msg_pong,
|
||||
b"headers": msg_headers,
|
||||
b"getheaders": msg_getheaders,
|
||||
b"reject": msg_reject,
|
||||
b"mempool": msg_mempool,
|
||||
}
|
||||
MAGIC_BYTES = {
|
||||
"mainnet": "\xbf\x0c\x6b\xbd", # mainnet
|
||||
"testnet3": "\xce\xe2\xca\xff", # testnet3
|
||||
"regtest": "\xfc\xc1\xb7\xdc" # regtest
|
||||
"mainnet": b"\xbf\x0c\x6b\xbd", # mainnet
|
||||
"testnet3": b"\xce\xe2\xca\xff", # testnet3
|
||||
"regtest": b"\xfc\xc1\xb7\xdc" # regtest
|
||||
}
|
||||
|
||||
def __init__(self, dstaddr, dstport, rpc, callback, net="regtest", services=1):
|
||||
@ -1121,8 +1167,8 @@ class NodeConn(asyncore.dispatcher):
|
||||
self.dstaddr = dstaddr
|
||||
self.dstport = dstport
|
||||
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.sendbuf = ""
|
||||
self.recvbuf = ""
|
||||
self.sendbuf = b""
|
||||
self.recvbuf = b""
|
||||
self.ver_send = 209
|
||||
self.ver_recv = 209
|
||||
self.last_sent = 0
|
||||
@ -1159,8 +1205,8 @@ class NodeConn(asyncore.dispatcher):
|
||||
self.show_debug_msg("MiniNode: Closing Connection to %s:%d... "
|
||||
% (self.dstaddr, self.dstport))
|
||||
self.state = "closed"
|
||||
self.recvbuf = ""
|
||||
self.sendbuf = ""
|
||||
self.recvbuf = b""
|
||||
self.sendbuf = b""
|
||||
try:
|
||||
self.close()
|
||||
except:
|
||||
@ -1194,43 +1240,46 @@ class NodeConn(asyncore.dispatcher):
|
||||
self.sendbuf = self.sendbuf[sent:]
|
||||
|
||||
def got_data(self):
|
||||
while True:
|
||||
if len(self.recvbuf) < 4:
|
||||
return
|
||||
if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]:
|
||||
raise ValueError("got garbage %s" % repr(self.recvbuf))
|
||||
if self.ver_recv < 209:
|
||||
if len(self.recvbuf) < 4 + 12 + 4:
|
||||
try:
|
||||
while True:
|
||||
if len(self.recvbuf) < 4:
|
||||
return
|
||||
command = self.recvbuf[4:4+12].split("\x00", 1)[0]
|
||||
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
|
||||
checksum = None
|
||||
if len(self.recvbuf) < 4 + 12 + 4 + msglen:
|
||||
return
|
||||
msg = self.recvbuf[4+12+4:4+12+4+msglen]
|
||||
self.recvbuf = self.recvbuf[4+12+4+msglen:]
|
||||
else:
|
||||
if len(self.recvbuf) < 4 + 12 + 4 + 4:
|
||||
return
|
||||
command = self.recvbuf[4:4+12].split("\x00", 1)[0]
|
||||
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
|
||||
checksum = self.recvbuf[4+12+4:4+12+4+4]
|
||||
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
|
||||
return
|
||||
msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen]
|
||||
th = sha256(msg)
|
||||
h = sha256(th)
|
||||
if checksum != h[:4]:
|
||||
raise ValueError("got bad checksum " + repr(self.recvbuf))
|
||||
self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
|
||||
if command in self.messagemap:
|
||||
f = cStringIO.StringIO(msg)
|
||||
t = self.messagemap[command]()
|
||||
t.deserialize(f)
|
||||
self.got_message(t)
|
||||
else:
|
||||
self.show_debug_msg("Unknown command: '" + command + "' " +
|
||||
repr(msg))
|
||||
if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]:
|
||||
raise ValueError("got garbage %s" % repr(self.recvbuf))
|
||||
if self.ver_recv < 209:
|
||||
if len(self.recvbuf) < 4 + 12 + 4:
|
||||
return
|
||||
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
|
||||
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
|
||||
checksum = None
|
||||
if len(self.recvbuf) < 4 + 12 + 4 + msglen:
|
||||
return
|
||||
msg = self.recvbuf[4+12+4:4+12+4+msglen]
|
||||
self.recvbuf = self.recvbuf[4+12+4+msglen:]
|
||||
else:
|
||||
if len(self.recvbuf) < 4 + 12 + 4 + 4:
|
||||
return
|
||||
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
|
||||
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
|
||||
checksum = self.recvbuf[4+12+4:4+12+4+4]
|
||||
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
|
||||
return
|
||||
msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen]
|
||||
th = sha256(msg)
|
||||
h = sha256(th)
|
||||
if checksum != h[:4]:
|
||||
raise ValueError("got bad checksum " + repr(self.recvbuf))
|
||||
self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
|
||||
if command in self.messagemap:
|
||||
f = BytesIO(msg)
|
||||
t = self.messagemap[command]()
|
||||
t.deserialize(f)
|
||||
self.got_message(t)
|
||||
else:
|
||||
self.show_debug_msg("Unknown command: '" + command + "' " +
|
||||
repr(msg))
|
||||
except Exception as e:
|
||||
print 'got_data:', repr(e)
|
||||
|
||||
def send_message(self, message, pushbuf=False):
|
||||
if self.state != "connected" and not pushbuf:
|
||||
@ -1240,7 +1289,7 @@ class NodeConn(asyncore.dispatcher):
|
||||
data = message.serialize()
|
||||
tmsg = self.MAGIC_BYTES[self.network]
|
||||
tmsg += command
|
||||
tmsg += "\x00" * (12 - len(command))
|
||||
tmsg += b"\x00" * (12 - len(command))
|
||||
tmsg += struct.pack("<I", len(data))
|
||||
if self.ver_send >= 209:
|
||||
th = sha256(data)
|
||||
@ -1252,11 +1301,11 @@ class NodeConn(asyncore.dispatcher):
|
||||
self.last_sent = time.time()
|
||||
|
||||
def got_message(self, message):
|
||||
if message.command == "version":
|
||||
if message.command == b"version":
|
||||
if message.nVersion <= BIP0031_VERSION:
|
||||
self.messagemap['ping'] = msg_ping_prebip31
|
||||
self.messagemap[b'ping'] = msg_ping_prebip31
|
||||
if self.last_sent + 30 * 60 < time.time():
|
||||
self.send_message(self.messagemap['ping']())
|
||||
self.send_message(self.messagemap[b'ping']())
|
||||
self.show_debug_msg("Recv %s" % repr(message))
|
||||
self.cb.deliver(self, message)
|
||||
|
||||
|
@ -4,13 +4,14 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
# Linux network utilities
|
||||
|
||||
import sys
|
||||
import socket
|
||||
import fcntl
|
||||
import struct
|
||||
import array
|
||||
import os
|
||||
import binascii
|
||||
from binascii import unhexlify, hexlify
|
||||
|
||||
# Roughly based on http://voorloopnul.com/blog/a-python-netstat-in-less-than-100-lines-of-code/ by Ricardo Pascal
|
||||
STATE_ESTABLISHED = '01'
|
||||
@ -43,9 +44,9 @@ def _remove_empty(array):
|
||||
def _convert_ip_port(array):
|
||||
host,port = array.split(':')
|
||||
# convert host from mangled-per-four-bytes form as used by kernel
|
||||
host = binascii.unhexlify(host)
|
||||
host = unhexlify(host)
|
||||
host_out = ''
|
||||
for x in range(0, len(host)/4):
|
||||
for x in range(0, len(host) // 4):
|
||||
(val,) = struct.unpack('=I', host[x*4:(x+1)*4])
|
||||
host_out += '%08x' % val
|
||||
|
||||
@ -94,7 +95,7 @@ def all_interfaces():
|
||||
max_possible = 8 # initial value
|
||||
while True:
|
||||
bytes = max_possible * struct_size
|
||||
names = array.array('B', '\0' * bytes)
|
||||
names = array.array('B', b'\0' * bytes)
|
||||
outbytes = struct.unpack('iL', fcntl.ioctl(
|
||||
s.fileno(),
|
||||
0x8912, # SIOCGIFCONF
|
||||
@ -105,7 +106,7 @@ def all_interfaces():
|
||||
else:
|
||||
break
|
||||
namestr = names.tostring()
|
||||
return [(namestr[i:i+16].split('\0', 1)[0],
|
||||
return [(namestr[i:i+16].split(b'\0', 1)[0],
|
||||
socket.inet_ntoa(namestr[i+20:i+24]))
|
||||
for i in range(0, outbytes, struct_size)]
|
||||
|
||||
@ -136,7 +137,7 @@ def addr_to_hex(addr):
|
||||
addr = sub[0] + ([0] * nullbytes) + sub[1]
|
||||
else:
|
||||
raise ValueError('Could not parse address %s' % addr)
|
||||
return binascii.hexlify(bytearray(addr))
|
||||
return hexlify(bytearray(addr)).decode('ascii')
|
||||
|
||||
def test_ipv6_local():
|
||||
'''
|
||||
|
@ -227,7 +227,7 @@ OP_CHECKMULTISIGVERIFY = CScriptOp(0xaf)
|
||||
# expansion
|
||||
OP_NOP1 = CScriptOp(0xb0)
|
||||
OP_CHECKLOCKTIMEVERIFY = CScriptOp(0xb1)
|
||||
OP_NOP3 = CScriptOp(0xb2)
|
||||
OP_CHECKSEQUENCEVERIFY = CScriptOp(0xb2)
|
||||
OP_NOP4 = CScriptOp(0xb3)
|
||||
OP_NOP5 = CScriptOp(0xb4)
|
||||
OP_NOP6 = CScriptOp(0xb5)
|
||||
@ -354,7 +354,7 @@ VALID_OPCODES = {
|
||||
|
||||
OP_NOP1,
|
||||
OP_CHECKLOCKTIMEVERIFY,
|
||||
OP_NOP3,
|
||||
OP_CHECKSEQUENCEVERIFY,
|
||||
OP_NOP4,
|
||||
OP_NOP5,
|
||||
OP_NOP6,
|
||||
@ -473,7 +473,7 @@ OPCODE_NAMES.update({
|
||||
OP_CHECKMULTISIGVERIFY : 'OP_CHECKMULTISIGVERIFY',
|
||||
OP_NOP1 : 'OP_NOP1',
|
||||
OP_CHECKLOCKTIMEVERIFY : 'OP_CHECKLOCKTIMEVERIFY',
|
||||
OP_NOP3 : 'OP_NOP3',
|
||||
OP_CHECKSEQUENCEVERIFY : 'OP_CHECKSEQUENCEVERIFY',
|
||||
OP_NOP4 : 'OP_NOP4',
|
||||
OP_NOP5 : 'OP_NOP5',
|
||||
OP_NOP6 : 'OP_NOP6',
|
||||
@ -592,7 +592,7 @@ OPCODES_BY_NAME = {
|
||||
'OP_CHECKMULTISIGVERIFY' : OP_CHECKMULTISIGVERIFY,
|
||||
'OP_NOP1' : OP_NOP1,
|
||||
'OP_CHECKLOCKTIMEVERIFY' : OP_CHECKLOCKTIMEVERIFY,
|
||||
'OP_NOP3' : OP_NOP3,
|
||||
'OP_CHECKSEQUENCEVERIFY' : OP_CHECKSEQUENCEVERIFY,
|
||||
'OP_NOP4' : OP_NOP4,
|
||||
'OP_NOP5' : OP_NOP5,
|
||||
'OP_NOP6' : OP_NOP6,
|
||||
@ -629,7 +629,7 @@ class CScriptNum(object):
|
||||
neg = obj.value < 0
|
||||
absvalue = -obj.value if neg else obj.value
|
||||
while (absvalue):
|
||||
r.append(chr(absvalue & 0xff))
|
||||
r.append(absvalue & 0xff)
|
||||
absvalue >>= 8
|
||||
if r[-1] & 0x80:
|
||||
r.append(0x80 if neg else 0)
|
||||
@ -777,7 +777,7 @@ class CScript(bytes):
|
||||
# need to change
|
||||
def _repr(o):
|
||||
if isinstance(o, bytes):
|
||||
return "x('%s')" % hexlify(o).decode('utf8')
|
||||
return b"x('%s')" % hexlify(o).decode('ascii')
|
||||
else:
|
||||
return repr(o)
|
||||
|
||||
|
@ -102,7 +102,7 @@ class Socks5Connection(object):
|
||||
addr = recvall(self.conn, 4)
|
||||
elif atyp == AddressType.DOMAINNAME:
|
||||
n = recvall(self.conn, 1)[0]
|
||||
addr = str(recvall(self.conn, n))
|
||||
addr = recvall(self.conn, n)
|
||||
elif atyp == AddressType.IPV6:
|
||||
addr = recvall(self.conn, 16)
|
||||
else:
|
||||
@ -117,7 +117,7 @@ class Socks5Connection(object):
|
||||
self.serv.queue.put(cmdin)
|
||||
print('Proxy: ', cmdin)
|
||||
# Fall through to disconnect
|
||||
except Exception,e:
|
||||
except Exception as e:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
self.serv.queue.put(e)
|
||||
finally:
|
||||
|
@ -26,7 +26,7 @@ from .util import (
|
||||
check_json_precision,
|
||||
initialize_chain_clean,
|
||||
)
|
||||
from authproxy import AuthServiceProxy, JSONRPCException
|
||||
from .authproxy import AuthServiceProxy, JSONRPCException
|
||||
|
||||
|
||||
class BitcoinTestFramework(object):
|
||||
@ -140,10 +140,10 @@ class BitcoinTestFramework(object):
|
||||
print("JSONRPC error: "+e.error['message'])
|
||||
traceback.print_tb(sys.exc_info()[2])
|
||||
except AssertionError as e:
|
||||
print("Assertion failed: "+e.message)
|
||||
print("Assertion failed: "+ str(e))
|
||||
traceback.print_tb(sys.exc_info()[2])
|
||||
except Exception as e:
|
||||
print("Unexpected exception caught during testing: "+str(e))
|
||||
print("Unexpected exception caught during testing: " + repr(e))
|
||||
traceback.print_tb(sys.exc_info()[2])
|
||||
|
||||
if not self.options.noshutdown:
|
||||
|
@ -2,6 +2,8 @@
|
||||
# Copyright (c) 2014-2015 The Dash Core developers
|
||||
# Distributed under the MIT/X11 software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
|
||||
#
|
||||
# Helpful routines for regression testing
|
||||
#
|
||||
@ -10,6 +12,8 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from binascii import hexlify, unhexlify
|
||||
from base64 import b64encode
|
||||
from decimal import Decimal, ROUND_DOWN
|
||||
import json
|
||||
import random
|
||||
@ -17,12 +21,33 @@ import shutil
|
||||
import subprocess
|
||||
import time
|
||||
import re
|
||||
import errno
|
||||
|
||||
from . import coverage
|
||||
from .authproxy import AuthServiceProxy, JSONRPCException
|
||||
|
||||
COVERAGE_DIR = None
|
||||
|
||||
#Set Mocktime default to OFF.
|
||||
#MOCKTIME is only needed for scripts that use the
|
||||
#cached version of the blockchain. If the cached
|
||||
#version of the blockchain is used without MOCKTIME
|
||||
#then the mempools will not sync due to IBD.
|
||||
MOCKTIME = 0
|
||||
|
||||
def enable_mocktime():
|
||||
#For backwared compatibility of the python scripts
|
||||
#with previous versions of the cache, set MOCKTIME
|
||||
#to Jan 1, 2014 + (201 * 10 * 60)
|
||||
global MOCKTIME
|
||||
MOCKTIME = 1388534400 + (201 * 10 * 60)
|
||||
|
||||
def disable_mocktime():
|
||||
global MOCKTIME
|
||||
MOCKTIME = 0
|
||||
|
||||
def get_mocktime():
|
||||
return MOCKTIME
|
||||
|
||||
def enable_coverage(dirname):
|
||||
"""Maintain a log of which RPC calls are made during testing."""
|
||||
@ -80,6 +105,15 @@ def check_json_precision():
|
||||
def count_bytes(hex_string):
|
||||
return len(bytearray.fromhex(hex_string))
|
||||
|
||||
def bytes_to_hex_str(byte_str):
|
||||
return hexlify(byte_str).decode('ascii')
|
||||
|
||||
def hex_str_to_bytes(hex_str):
|
||||
return unhexlify(hex_str.encode('ascii'))
|
||||
|
||||
def str_to_b64str(string):
|
||||
return b64encode(string.encode('utf-8')).decode('ascii')
|
||||
|
||||
def sync_blocks(rpc_connections, wait=1):
|
||||
"""
|
||||
Wait until everybody has the same block count
|
||||
@ -124,11 +158,33 @@ def initialize_datadir(dirname, n):
|
||||
f.write("listenonion=0\n")
|
||||
return datadir
|
||||
|
||||
def rpc_url(i, rpchost=None):
|
||||
return "http://rt:rt@%s:%d" % (rpchost or '127.0.0.1', rpc_port(i))
|
||||
|
||||
def wait_for_bitcoind_start(process, url, i):
|
||||
'''
|
||||
Wait for dashd to start. This means that RPC is accessible and fully initialized.
|
||||
Raise an exception if dashd exits during initialization.
|
||||
'''
|
||||
while True:
|
||||
if process.poll() is not None:
|
||||
raise Exception('dashd exited with status %i during initialization' % process.returncode)
|
||||
try:
|
||||
rpc = get_rpc_proxy(url, i)
|
||||
blocks = rpc.getblockcount()
|
||||
break # break out of loop on success
|
||||
except IOError as e:
|
||||
if e.errno != errno.ECONNREFUSED: # Port not yet open?
|
||||
raise # unknown IO error
|
||||
except JSONRPCException as e: # Initialization phase
|
||||
if e.error['code'] != -28: # RPC in warmup?
|
||||
raise # unkown JSON RPC exception
|
||||
time.sleep(0.25)
|
||||
|
||||
def initialize_chain(test_dir):
|
||||
"""
|
||||
Create (or copy from cache) a 200-block-long chain and
|
||||
4 wallets.
|
||||
dashd and dash-cli must be in search path.
|
||||
"""
|
||||
|
||||
if (not os.path.isdir(os.path.join("cache","node0"))
|
||||
@ -141,7 +197,6 @@ def initialize_chain(test_dir):
|
||||
if os.path.isdir(os.path.join("cache","node"+str(i))):
|
||||
shutil.rmtree(os.path.join("cache","node"+str(i)))
|
||||
|
||||
devnull = open(os.devnull, "w")
|
||||
# Create cache directories, run dashds:
|
||||
for i in range(4):
|
||||
datadir=initialize_datadir("cache", i)
|
||||
@ -150,40 +205,38 @@ def initialize_chain(test_dir):
|
||||
args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
|
||||
bitcoind_processes[i] = subprocess.Popen(args)
|
||||
if os.getenv("PYTHON_DEBUG", ""):
|
||||
print "initialize_chain: dashd started, calling dash-cli -rpcwait getblockcount"
|
||||
subprocess.check_call([ os.getenv("DASHCLI", "dash-cli"), "-datadir="+datadir,
|
||||
"-rpcwait", "getblockcount"], stdout=devnull)
|
||||
print "initialize_chain: dashd started, waiting for RPC to come up"
|
||||
wait_for_bitcoind_start(bitcoind_processes[i], rpc_url(i), i)
|
||||
if os.getenv("PYTHON_DEBUG", ""):
|
||||
print "initialize_chain: dassh-cli -rpcwait getblockcount completed"
|
||||
devnull.close()
|
||||
print "initialize_chain: RPC succesfully started"
|
||||
|
||||
rpcs = []
|
||||
|
||||
for i in range(4):
|
||||
try:
|
||||
url = "http://rt:rt@127.0.0.1:%d" % (rpc_port(i),)
|
||||
rpcs.append(get_rpc_proxy(url, i))
|
||||
rpcs.append(get_rpc_proxy(rpc_url(i), i))
|
||||
except:
|
||||
sys.stderr.write("Error connecting to "+url+"\n")
|
||||
sys.exit(1)
|
||||
|
||||
# Create a 200-block-long chain; each of the 4 nodes
|
||||
# gets 25 mature blocks and 25 immature.
|
||||
# blocks are created with timestamps 156 seconds apart, starting
|
||||
# at 1 Jan 2015
|
||||
block_time = 1420070400
|
||||
# blocks are created with timestamps 2.5 minutes apart
|
||||
# starting from 2010 minutes in the past
|
||||
enable_mocktime()
|
||||
block_time = get_mocktime() - (201 * 2.5 * 60)
|
||||
for i in range(2):
|
||||
for peer in range(4):
|
||||
for j in range(25):
|
||||
set_node_times(rpcs, block_time)
|
||||
rpcs[peer].generate(1)
|
||||
block_time += 156
|
||||
block_time += 2.5*60
|
||||
# Must sync before next peer starts generating blocks
|
||||
sync_blocks(rpcs)
|
||||
|
||||
# Shut them down, and clean up cache directories:
|
||||
stop_nodes(rpcs)
|
||||
wait_bitcoinds()
|
||||
disable_mocktime()
|
||||
for i in range(4):
|
||||
os.remove(log_filename("cache", i, "debug.log"))
|
||||
os.remove(log_filename("cache", i, "db.log"))
|
||||
@ -233,20 +286,15 @@ def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=
|
||||
if binary is None:
|
||||
binary = os.getenv("DASHD", "dashd")
|
||||
# RPC tests still depend on free transactions
|
||||
args = [ binary, "-datadir="+datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-blockprioritysize=50000" ]
|
||||
args = [ binary, "-datadir="+datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-blockprioritysize=50000", "-mocktime="+str(get_mocktime()) ]
|
||||
if extra_args is not None: args.extend(extra_args)
|
||||
bitcoind_processes[i] = subprocess.Popen(args)
|
||||
devnull = open(os.devnull, "w")
|
||||
if os.getenv("PYTHON_DEBUG", ""):
|
||||
print "start_node: dashd started, calling dash-cli -rpcwait getblockcount"
|
||||
subprocess.check_call([ os.getenv("DASHCLI", "dash-cli"), "-datadir="+datadir] +
|
||||
_rpchost_to_args(rpchost) +
|
||||
["-rpcwait", "getblockcount"], stdout=devnull)
|
||||
print "start_node: dashd started, waiting for RPC to come up"
|
||||
url = rpc_url(i, rpchost)
|
||||
wait_for_bitcoind_start(bitcoind_processes[i], url, i)
|
||||
if os.getenv("PYTHON_DEBUG", ""):
|
||||
print "start_node: calling dash-cli -rpcwait getblockcount returned"
|
||||
devnull.close()
|
||||
url = "http://rt:rt@%s:%d" % (rpchost or '127.0.0.1', rpc_port(i))
|
||||
|
||||
print "start_node: RPC succesfully started"
|
||||
proxy = get_rpc_proxy(url, i, timeout=timewait)
|
||||
|
||||
if COVERAGE_DIR:
|
||||
@ -260,7 +308,14 @@ def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, binary=None):
|
||||
"""
|
||||
if extra_args is None: extra_args = [ None for i in range(num_nodes) ]
|
||||
if binary is None: binary = [ None for i in range(num_nodes) ]
|
||||
return [ start_node(i, dirname, extra_args[i], rpchost, binary=binary[i]) for i in range(num_nodes) ]
|
||||
rpcs = []
|
||||
try:
|
||||
for i in range(num_nodes):
|
||||
rpcs.append(start_node(i, dirname, extra_args[i], rpchost, binary=binary[i]))
|
||||
except: # If one node failed to start, stop the others
|
||||
stop_nodes(rpcs)
|
||||
raise
|
||||
return rpcs
|
||||
|
||||
def log_filename(dirname, n_node, logname):
|
||||
return os.path.join(dirname, "node"+str(n_node), "regtest", logname)
|
||||
@ -438,9 +493,40 @@ def assert_is_hash_string(string, length=64):
|
||||
raise AssertionError(
|
||||
"String %r contains invalid characters for a hash." % string)
|
||||
|
||||
def satoshi_round(amount):
|
||||
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
|
||||
def assert_array_result(object_array, to_match, expected, should_not_find = False):
|
||||
"""
|
||||
Pass in array of JSON objects, a dictionary with key/value pairs
|
||||
to match against, and another dictionary with expected key/value
|
||||
pairs.
|
||||
If the should_not_find flag is true, to_match should not be found
|
||||
in object_array
|
||||
"""
|
||||
if should_not_find == True:
|
||||
assert_equal(expected, { })
|
||||
num_matched = 0
|
||||
for item in object_array:
|
||||
all_match = True
|
||||
for key,value in to_match.items():
|
||||
if item[key] != value:
|
||||
all_match = False
|
||||
if not all_match:
|
||||
continue
|
||||
elif should_not_find == True:
|
||||
num_matched = num_matched+1
|
||||
for key,value in expected.items():
|
||||
if item[key] != value:
|
||||
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
|
||||
num_matched = num_matched+1
|
||||
if num_matched == 0 and should_not_find != True:
|
||||
raise AssertionError("No objects matched %s"%(str(to_match)))
|
||||
if num_matched > 0 and should_not_find == True:
|
||||
raise AssertionError("Objects were found %s"%(str(to_match)))
|
||||
|
||||
def satoshi_round(amount):
|
||||
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
|
||||
|
||||
# Helper to create at least "count" utxos
|
||||
# Pass in a fee that is sufficient for relay and mining new transactions.
|
||||
def create_confirmed_utxos(fee, node, count):
|
||||
node.generate(int(0.5*count)+101)
|
||||
utxos = node.listunspent()
|
||||
@ -468,6 +554,8 @@ def create_confirmed_utxos(fee, node, count):
|
||||
assert(len(utxos) >= count)
|
||||
return utxos
|
||||
|
||||
# Create large OP_RETURN txouts that can be appended to a transaction
|
||||
# to make it large (helper for constructing large transactions).
|
||||
def gen_return_txouts():
|
||||
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
|
||||
# So we have big transactions (and therefore can't fit very many into each block)
|
||||
@ -486,6 +574,16 @@ def gen_return_txouts():
|
||||
txouts = txouts + script_pubkey
|
||||
return txouts
|
||||
|
||||
def create_tx(node, coinbase, to_address, amount):
|
||||
inputs = [{ "txid" : coinbase, "vout" : 0}]
|
||||
outputs = { to_address : amount }
|
||||
rawtx = node.createrawtransaction(inputs, outputs)
|
||||
signresult = node.signrawtransaction(rawtx)
|
||||
assert_equal(signresult["complete"], True)
|
||||
return signresult["hex"]
|
||||
|
||||
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
|
||||
# transaction to make it large. See gen_return_txouts() above.
|
||||
def create_lots_of_big_transactions(node, txouts, utxos, fee):
|
||||
addr = node.getnewaddress()
|
||||
txids = []
|
||||
|
@ -5,7 +5,6 @@
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import *
|
||||
|
||||
@ -35,6 +34,12 @@ class WalletTest (BitcoinTestFramework):
|
||||
self.sync_all()
|
||||
|
||||
def run_test (self):
|
||||
|
||||
# Check that there's no UTXO on none of the nodes
|
||||
assert_equal(len(self.nodes[0].listunspent()), 0)
|
||||
assert_equal(len(self.nodes[1].listunspent()), 0)
|
||||
assert_equal(len(self.nodes[2].listunspent()), 0)
|
||||
|
||||
print "Mining blocks..."
|
||||
|
||||
self.nodes[0].generate(1)
|
||||
@ -51,6 +56,11 @@ class WalletTest (BitcoinTestFramework):
|
||||
assert_equal(self.nodes[1].getbalance(), 500)
|
||||
assert_equal(self.nodes[2].getbalance(), 0)
|
||||
|
||||
# Check that only first and second nodes have UTXOs
|
||||
assert_equal(len(self.nodes[0].listunspent()), 1)
|
||||
assert_equal(len(self.nodes[1].listunspent()), 1)
|
||||
assert_equal(len(self.nodes[2].listunspent()), 0)
|
||||
|
||||
# Send 210 DASH from 0 to 2 using sendtoaddress call.
|
||||
# Second transaction will be child of first, and will require a fee
|
||||
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 110)
|
||||
@ -63,6 +73,15 @@ class WalletTest (BitcoinTestFramework):
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# Exercise locking of unspent outputs
|
||||
unspent_0 = self.nodes[2].listunspent()[0]
|
||||
unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]}
|
||||
self.nodes[2].lockunspent(False, [unspent_0])
|
||||
assert_raises(JSONRPCException, self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20)
|
||||
assert_equal([unspent_0], self.nodes[2].listlockunspent())
|
||||
self.nodes[2].lockunspent(True, [unspent_0])
|
||||
assert_equal(len(self.nodes[2].listlockunspent()), 0)
|
||||
|
||||
# Have node1 generate 100 blocks (so node0 can recover the fee)
|
||||
self.nodes[1].generate(100)
|
||||
self.sync_all()
|
||||
@ -152,6 +171,10 @@ class WalletTest (BitcoinTestFramework):
|
||||
|
||||
assert(txid1 in self.nodes[3].getrawmempool())
|
||||
|
||||
# Exercise balance rpcs
|
||||
assert_equal(self.nodes[0].getwalletinfo()["unconfirmed_balance"], 1)
|
||||
assert_equal(self.nodes[0].getunconfirmedbalance(), 1)
|
||||
|
||||
#check if we can list zero value tx as available coins
|
||||
#1. create rawtx
|
||||
#2. hex-changed one output to 0.0
|
||||
@ -236,28 +259,53 @@ class WalletTest (BitcoinTestFramework):
|
||||
txObj = self.nodes[0].gettransaction(txId)
|
||||
assert_equal(txObj['amount'], Decimal('-0.0001'))
|
||||
|
||||
#this should fail
|
||||
errorString = ""
|
||||
try:
|
||||
txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1f-4")
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
except JSONRPCException as e:
|
||||
assert("Invalid amount" in e.error['message'])
|
||||
else:
|
||||
raise AssertionError("Must not parse invalid amounts")
|
||||
|
||||
assert_equal("Invalid amount" in errorString, True)
|
||||
|
||||
errorString = ""
|
||||
try:
|
||||
self.nodes[0].generate("2") #use a string to as block amount parameter must fail because it's not interpreted as amount
|
||||
except JSONRPCException,e:
|
||||
errorString = e.error['message']
|
||||
self.nodes[0].generate("2")
|
||||
raise AssertionError("Must not accept strings as numeric")
|
||||
except JSONRPCException as e:
|
||||
assert("not an integer" in e.error['message'])
|
||||
|
||||
assert_equal("not an integer" in errorString, True)
|
||||
# Import address and private key to check correct behavior of spendable unspents
|
||||
# 1. Send some coins to generate new UTXO
|
||||
address_to_import = self.nodes[2].getnewaddress()
|
||||
txid = self.nodes[0].sendtoaddress(address_to_import, 1)
|
||||
self.nodes[0].generate(1)
|
||||
self.sync_all()
|
||||
|
||||
# 2. Import address from node2 to node1
|
||||
self.nodes[1].importaddress(address_to_import)
|
||||
|
||||
# 3. Validate that the imported address is watch-only on node1
|
||||
assert(self.nodes[1].validateaddress(address_to_import)["iswatchonly"])
|
||||
|
||||
# 4. Check that the unspents after import are not spendable
|
||||
assert_array_result(self.nodes[1].listunspent(),
|
||||
{"address": address_to_import},
|
||||
{"spendable": False})
|
||||
|
||||
# 5. Import private key of the previously imported address on node1
|
||||
priv_key = self.nodes[2].dumpprivkey(address_to_import)
|
||||
self.nodes[1].importprivkey(priv_key)
|
||||
|
||||
# 6. Check that the unspents are now spendable on node1
|
||||
assert_array_result(self.nodes[1].listunspent(),
|
||||
{"address": address_to_import},
|
||||
{"spendable": True})
|
||||
|
||||
#check if wallet or blochchain maintenance changes the balance
|
||||
self.sync_all()
|
||||
self.nodes[0].generate(1)
|
||||
blocks = self.nodes[0].generate(2)
|
||||
self.sync_all()
|
||||
balance_nodes = [self.nodes[i].getbalance() for i in range(3)]
|
||||
block_count = self.nodes[0].getblockcount()
|
||||
|
||||
# Check modes:
|
||||
# - True: unicode escaped as \u....
|
||||
@ -281,15 +329,21 @@ class WalletTest (BitcoinTestFramework):
|
||||
'-salvagewallet',
|
||||
]
|
||||
for m in maintenance:
|
||||
print "check " + m
|
||||
stop_nodes(self.nodes)
|
||||
wait_bitcoinds()
|
||||
self.nodes = start_nodes(3, self.options.tmpdir, [[m]] * 3)
|
||||
connect_nodes_bi(self.nodes,0,1)
|
||||
connect_nodes_bi(self.nodes,1,2)
|
||||
connect_nodes_bi(self.nodes,0,2)
|
||||
self.sync_all()
|
||||
while m == '-reindex' and [block_count] * 3 != [self.nodes[i].getblockcount() for i in range(3)]:
|
||||
# reindex will leave rpc warm up "early"; Wait for it to finish
|
||||
time.sleep(0.1)
|
||||
assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)])
|
||||
|
||||
# Exercise listsinceblock with the last two blocks
|
||||
coinbase_tx_1 = self.nodes[0].listsinceblock(blocks[0])
|
||||
assert_equal(coinbase_tx_1["lastblock"], blocks[1])
|
||||
assert_equal(len(coinbase_tx_1["transactions"]), 1)
|
||||
assert_equal(coinbase_tx_1["transactions"][0]["blockhash"], blocks[1])
|
||||
assert_equal(len(self.nodes[0].listsinceblock(blocks[1])["transactions"]), 0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
WalletTest ().main ()
|
||||
|
@ -65,14 +65,8 @@ class ZapWalletTXesTest (BitcoinTestFramework):
|
||||
#restart bitcoind with zapwallettxes
|
||||
self.nodes[0] = start_node(0,self.options.tmpdir, ["-zapwallettxes=1"])
|
||||
|
||||
aException = False
|
||||
try:
|
||||
tx3 = self.nodes[0].gettransaction(txid3)
|
||||
except JSONRPCException,e:
|
||||
print e
|
||||
aException = True
|
||||
|
||||
assert_equal(aException, True) #there must be a expection because the unconfirmed wallettx0 must be gone by now
|
||||
assert_raises(JSONRPCException, self.nodes[0].gettransaction, [txid3])
|
||||
#there must be a expection because the unconfirmed wallettx0 must be gone by now
|
||||
|
||||
tx0 = self.nodes[0].gettransaction(txid0)
|
||||
assert_equal(tx0['txid'], txid0) #tx0 (confirmed) must still be available because it was confirmed
|
||||
|
@ -28,8 +28,8 @@ class ZMQTest (BitcoinTestFramework):
|
||||
def setup_nodes(self):
|
||||
self.zmqContext = zmq.Context()
|
||||
self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
|
||||
self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, "hashblock")
|
||||
self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, "hashtx")
|
||||
self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashblock")
|
||||
self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashtx")
|
||||
self.zmqSubSocket.connect("tcp://127.0.0.1:%i" % self.port)
|
||||
return start_nodes(4, self.options.tmpdir, extra_args=[
|
||||
['-zmqpubhashtx=tcp://127.0.0.1:'+str(self.port), '-zmqpubhashblock=tcp://127.0.0.1:'+str(self.port)],
|
||||
@ -46,13 +46,13 @@ class ZMQTest (BitcoinTestFramework):
|
||||
|
||||
print "listen..."
|
||||
msg = self.zmqSubSocket.recv_multipart()
|
||||
topic = str(msg[0])
|
||||
topic = msg[0]
|
||||
body = msg[1]
|
||||
|
||||
msg = self.zmqSubSocket.recv_multipart()
|
||||
topic = str(msg[0])
|
||||
topic = msg[0]
|
||||
body = msg[1]
|
||||
blkhash = binascii.hexlify(body)
|
||||
blkhash = bytes_to_hex_str(body)
|
||||
|
||||
assert_equal(genhashes[0], blkhash) #blockhash from generate must be equal to the hash received over zmq
|
||||
|
||||
@ -63,10 +63,10 @@ class ZMQTest (BitcoinTestFramework):
|
||||
zmqHashes = []
|
||||
for x in range(0,n*2):
|
||||
msg = self.zmqSubSocket.recv_multipart()
|
||||
topic = str(msg[0])
|
||||
topic = msg[0]
|
||||
body = msg[1]
|
||||
if topic == "hashblock":
|
||||
zmqHashes.append(binascii.hexlify(body))
|
||||
if topic == b"hashblock":
|
||||
zmqHashes.append(bytes_to_hex_str(body))
|
||||
|
||||
for x in range(0,n):
|
||||
assert_equal(genhashes[x], zmqHashes[x]) #blockhash from generate must be equal to the hash received over zmq
|
||||
@ -77,11 +77,11 @@ class ZMQTest (BitcoinTestFramework):
|
||||
|
||||
# now we should receive a zmq msg because the tx was broadcast
|
||||
msg = self.zmqSubSocket.recv_multipart()
|
||||
topic = str(msg[0])
|
||||
topic = msg[0]
|
||||
body = msg[1]
|
||||
hashZMQ = ""
|
||||
if topic == "hashtx":
|
||||
hashZMQ = binascii.hexlify(body)
|
||||
if topic == b"hashtx":
|
||||
hashZMQ = bytes_to_hex_str(body)
|
||||
|
||||
assert_equal(hashRPC, hashZMQ) #blockhash from generate must be equal to the hash received over zmq
|
||||
|
||||
|
@ -3,21 +3,7 @@ DIST_SUBDIRS = secp256k1 univalue
|
||||
AM_LDFLAGS = $(PTHREAD_CFLAGS) $(LIBTOOL_LDFLAGS) $(HARDENED_LDFLAGS)
|
||||
AM_CXXFLAGS = $(HARDENED_CXXFLAGS)
|
||||
AM_CPPFLAGS = $(HARDENED_CPPFLAGS)
|
||||
|
||||
if EMBEDDED_LEVELDB
|
||||
LEVELDB_CPPFLAGS += -I$(srcdir)/leveldb/include
|
||||
LEVELDB_CPPFLAGS += -I$(srcdir)/leveldb/helpers/memenv
|
||||
LIBLEVELDB += $(builddir)/leveldb/libleveldb.a
|
||||
LIBMEMENV += $(builddir)/leveldb/libmemenv.a
|
||||
|
||||
# NOTE: This dependency is not strictly necessary, but without it make may try to build both in parallel, which breaks the LevelDB build system in a race
|
||||
$(LIBLEVELDB): $(LIBMEMENV)
|
||||
|
||||
$(LIBLEVELDB) $(LIBMEMENV):
|
||||
@echo "Building LevelDB ..." && $(MAKE) -C $(@D) $(@F) CXX="$(CXX)" \
|
||||
CC="$(CC)" PLATFORM=$(TARGET_OS) AR="$(AR)" $(LEVELDB_TARGET_FLAGS) \
|
||||
OPT="$(AM_CXXFLAGS) $(PIE_FLAGS) $(CXXFLAGS) $(AM_CPPFLAGS) $(CPPFLAGS) -D__STDC_LIMIT_MACROS"
|
||||
endif
|
||||
EXTRA_LIBRARIES =
|
||||
|
||||
BITCOIN_CONFIG_INCLUDES=-I$(builddir)/config
|
||||
BITCOIN_INCLUDES=-I$(builddir) -I$(builddir)/obj $(BOOST_CPPFLAGS) $(LEVELDB_CPPFLAGS) $(CRYPTO_CFLAGS) $(SSL_CFLAGS)
|
||||
@ -43,7 +29,7 @@ $(LIBUNIVALUE): $(wildcard univalue/lib/*) $(wildcard univalue/include/*)
|
||||
|
||||
# Make is not made aware of per-object dependencies to avoid limiting building parallelization
|
||||
# But to build the less dependent modules first, we manually select their order here:
|
||||
EXTRA_LIBRARIES = \
|
||||
EXTRA_LIBRARIES += \
|
||||
crypto/libbitcoin_crypto.a \
|
||||
libbitcoin_util.a \
|
||||
libbitcoin_common.a \
|
||||
@ -535,6 +521,10 @@ endif
|
||||
@test -f $(PROTOC)
|
||||
$(AM_V_GEN) $(PROTOC) --cpp_out=$(@D) --proto_path=$(abspath $(<D) $<)
|
||||
|
||||
if EMBEDDED_LEVELDB
|
||||
include Makefile.leveldb.include
|
||||
endif
|
||||
|
||||
if ENABLE_TESTS
|
||||
include Makefile.test.include
|
||||
endif
|
||||
|
81
src/Makefile.leveldb.include
Normal file
81
src/Makefile.leveldb.include
Normal file
@ -0,0 +1,81 @@
|
||||
LIBLEVELDB_INT = leveldb/libleveldb.a
|
||||
LIBMEMENV_INT = leveldb/libmemenv.a
|
||||
|
||||
EXTRA_LIBRARIES += $(LIBLEVELDB_INT)
|
||||
EXTRA_LIBRARIES += $(LIBMEMENV_INT)
|
||||
|
||||
LIBLEVELDB += $(LIBLEVELDB_INT)
|
||||
LIBMEMENV += $(LIBMEMENV_INT)
|
||||
|
||||
LEVELDB_CPPFLAGS += -I$(srcdir)/leveldb/include
|
||||
LEVELDB_CPPFLAGS += -I$(srcdir)/leveldb/helpers/memenv
|
||||
|
||||
LEVELDB_CPPFLAGS_INT =
|
||||
LEVELDB_CPPFLAGS_INT += -I$(srcdir)/leveldb
|
||||
LEVELDB_CPPFLAGS_INT += $(LEVELDB_TARGET_FLAGS)
|
||||
LEVELDB_CPPFLAGS_INT += $(LEVELDB_ATOMIC_CPPFLAGS)
|
||||
LEVELDB_CPPFLAGS_INT += -D__STDC_LIMIT_MACROS
|
||||
|
||||
if TARGET_WINDOWS
|
||||
LEVELDB_CPPFLAGS_INT += -DLEVELDB_PLATFORM_WINDOWS -DWINVER=0x0500 -D__USE_MINGW_ANSI_STDIO=1
|
||||
else
|
||||
LEVELDB_CPPFLAGS_INT += -DLEVELDB_PLATFORM_POSIX
|
||||
endif
|
||||
|
||||
LEVELDB_CXXFLAGS_INT =
|
||||
LEVELDB_CXXFLAGS_INT += $(LEVELDB_ATOMIC_CXXFLAGS)
|
||||
|
||||
leveldb_libleveldb_a_CPPFLAGS = $(AM_CPPFLAGS) $(LEVELDB_CPPFLAGS_INT) $(LEVELDB_CPPFLAGS)
|
||||
leveldb_libleveldb_a_CXXFLAGS = $(AM_CXXFLAGS) $(PIE_FLAGS) $(LEVELDB_CXXFLAGS_INT)
|
||||
|
||||
leveldb_libleveldb_a_SOURCES=
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/builder.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/c.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/dbformat.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/db_impl.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/db_iter.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/dumpfile.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/filename.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/log_reader.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/log_writer.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/memtable.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/repair.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/table_cache.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/version_edit.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/version_set.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/db/write_batch.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/table/block_builder.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/table/block.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/table/filter_block.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/table/format.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/table/iterator.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/table/merger.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/table/table_builder.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/table/table.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/table/two_level_iterator.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/arena.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/bloom.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/cache.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/coding.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/comparator.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/crc32c.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/env.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/env_posix.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/env_win.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/filter_policy.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/hash.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/histogram.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/logging.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/options.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/status.cc
|
||||
|
||||
if TARGET_WINDOWS
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/util/env_win.cc
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/port/port_win.cc
|
||||
else
|
||||
leveldb_libleveldb_a_SOURCES += leveldb/port/port_posix.cc
|
||||
endif
|
||||
|
||||
leveldb_libmemenv_a_CPPFLAGS = $(leveldb_libleveldb_a_CPPFLAGS)
|
||||
leveldb_libmemenv_a_CXXFLAGS = $(leveldb_libleveldb_a_CXXFLAGS)
|
||||
leveldb_libmemenv_a_SOURCES = leveldb/helpers/memenv/memenv.cc
|
@ -569,11 +569,11 @@ ui_%.h: %.ui
|
||||
$(AM_V_GEN) QT_SELECT=$(QT_SELECT) $(UIC) -o $@ $< || (echo "Error creating $@"; false)
|
||||
|
||||
%.moc: %.cpp
|
||||
$(AM_V_GEN) QT_SELECT=$(QT_SELECT) $(MOC) $(QT_INCLUDES) $(MOC_DEFS) $< | \
|
||||
$(AM_V_GEN) QT_SELECT=$(QT_SELECT) $(MOC) $(DEFAULT_INCLUDES) $(QT_INCLUDES) $(MOC_DEFS) $< | \
|
||||
$(SED) -e '/^\*\*.*Created:/d' -e '/^\*\*.*by:/d' > $@
|
||||
|
||||
moc_%.cpp: %.h
|
||||
$(AM_V_GEN) QT_SELECT=$(QT_SELECT) $(MOC) $(QT_INCLUDES) $(MOC_DEFS) $< | \
|
||||
$(AM_V_GEN) QT_SELECT=$(QT_SELECT) $(MOC) $(DEFAULT_INCLUDES) $(QT_INCLUDES) $(MOC_DEFS) $< | \
|
||||
$(SED) -e '/^\*\*.*Created:/d' -e '/^\*\*.*by:/d' > $@
|
||||
|
||||
%.qm: %.ts
|
||||
|
@ -20,11 +20,10 @@ EXTRA_DIST += \
|
||||
test/data/txcreatesign.hex
|
||||
|
||||
JSON_TEST_FILES = \
|
||||
test/data/script_valid.json \
|
||||
test/data/script_tests.json \
|
||||
test/data/base58_keys_valid.json \
|
||||
test/data/base58_encode_decode.json \
|
||||
test/data/base58_keys_invalid.json \
|
||||
test/data/script_invalid.json \
|
||||
test/data/tx_invalid.json \
|
||||
test/data/tx_valid.json \
|
||||
test/data/sighash.json
|
||||
|
@ -172,7 +172,7 @@ bool CBase58Data::SetString(const char* psz, unsigned int nVersionBytes)
|
||||
vchData.resize(vchTemp.size() - nVersionBytes);
|
||||
if (!vchData.empty())
|
||||
memcpy(&vchData[0], &vchTemp[nVersionBytes], vchData.size());
|
||||
memory_cleanse(&vchTemp[0], vchData.size());
|
||||
memory_cleanse(&vchTemp[0], vchTemp.size());
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -93,6 +93,7 @@ CBlockIndex* CBlockIndex::GetAncestor(int height)
|
||||
pindexWalk = pindexWalk->pskip;
|
||||
heightWalk = heightSkip;
|
||||
} else {
|
||||
assert(pindexWalk->pprev);
|
||||
pindexWalk = pindexWalk->pprev;
|
||||
heightWalk--;
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ string FormatScript(const CScript& script)
|
||||
} else if ((op >= OP_1 && op <= OP_16) || op == OP_1NEGATE) {
|
||||
ret += strprintf("%i ", op - OP_1NEGATE - 1);
|
||||
continue;
|
||||
} else if (op >= OP_NOP && op <= OP_CHECKMULTISIGVERIFY) {
|
||||
} else if (op >= OP_NOP && op <= OP_NOP10) {
|
||||
string str(GetOpName(op));
|
||||
if (str.substr(0, 3) == string("OP_")) {
|
||||
ret += str.substr(3, string::npos) + " ";
|
||||
@ -45,7 +45,7 @@ string FormatScript(const CScript& script)
|
||||
if (vch.size() > 0) {
|
||||
ret += strprintf("0x%x 0x%x ", HexStr(it2, it - vch.size()), HexStr(it - vch.size(), it));
|
||||
} else {
|
||||
ret += strprintf("0x%x", HexStr(it2, it));
|
||||
ret += strprintf("0x%x ", HexStr(it2, it));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
@ -1311,6 +1311,7 @@ bool AppInit2(boost::thread_group& threadGroup, CScheduler& scheduler)
|
||||
// -proxy sets a proxy for all outgoing network traffic
|
||||
// -noproxy (or -proxy=0) as well as the empty string can be used to not set a proxy, this is the default
|
||||
std::string proxyArg = GetArg("-proxy", "");
|
||||
SetLimited(NET_TOR);
|
||||
if (proxyArg != "" && proxyArg != "0") {
|
||||
proxyType addrProxy = proxyType(CService(proxyArg, 9050), proxyRandomize);
|
||||
if (!addrProxy.IsValid())
|
||||
@ -1320,7 +1321,7 @@ bool AppInit2(boost::thread_group& threadGroup, CScheduler& scheduler)
|
||||
SetProxy(NET_IPV6, addrProxy);
|
||||
SetProxy(NET_TOR, addrProxy);
|
||||
SetNameProxy(addrProxy);
|
||||
SetReachable(NET_TOR); // by default, -proxy sets onion as reachable, unless -noonion later
|
||||
SetLimited(NET_TOR, false); // by default, -proxy sets onion as reachable, unless -noonion later
|
||||
}
|
||||
|
||||
// -onion can be used to set only a proxy for .onion, or override normal proxy for .onion addresses
|
||||
@ -1329,13 +1330,13 @@ bool AppInit2(boost::thread_group& threadGroup, CScheduler& scheduler)
|
||||
std::string onionArg = GetArg("-onion", "");
|
||||
if (onionArg != "") {
|
||||
if (onionArg == "0") { // Handle -noonion/-onion=0
|
||||
SetReachable(NET_TOR, false); // set onions as unreachable
|
||||
SetLimited(NET_TOR); // set onions as unreachable
|
||||
} else {
|
||||
proxyType addrOnion = proxyType(CService(onionArg, 9050), proxyRandomize);
|
||||
if (!addrOnion.IsValid())
|
||||
return InitError(strprintf(_("Invalid -onion address: '%s'"), onionArg));
|
||||
SetProxy(NET_TOR, addrOnion);
|
||||
SetReachable(NET_TOR);
|
||||
SetLimited(NET_TOR, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
47
src/main.cpp
47
src/main.cpp
@ -404,7 +404,7 @@ void MarkBlockAsInFlight(NodeId nodeid, const uint256& hash, const Consensus::Pa
|
||||
mapBlocksInFlight[hash] = std::make_pair(nodeid, it);
|
||||
}
|
||||
|
||||
/** Check whether the last unknown block a peer advertized is not yet known. */
|
||||
/** Check whether the last unknown block a peer advertised is not yet known. */
|
||||
void ProcessBlockAvailability(NodeId nodeid) {
|
||||
CNodeState *state = State(nodeid);
|
||||
assert(state != NULL);
|
||||
@ -5308,11 +5308,11 @@ bool static ProcessMessage(CNode* pfrom, string strCommand, CDataStream& vRecv,
|
||||
CAddress addr = GetLocalAddress(&pfrom->addr);
|
||||
if (addr.IsRoutable())
|
||||
{
|
||||
LogPrintf("ProcessMessages: advertizing address %s\n", addr.ToString());
|
||||
LogPrintf("ProcessMessages: advertising address %s\n", addr.ToString());
|
||||
pfrom->PushAddress(addr);
|
||||
} else if (IsPeerAddrLocalGood(pfrom)) {
|
||||
addr.SetIP(pfrom->addrLocal);
|
||||
LogPrintf("ProcessMessages: advertizing address %s\n", addr.ToString());
|
||||
LogPrintf("ProcessMessages: advertising address %s\n", addr.ToString());
|
||||
pfrom->PushAddress(addr);
|
||||
}
|
||||
}
|
||||
@ -5520,10 +5520,8 @@ bool static ProcessMessage(CNode* pfrom, string strCommand, CDataStream& vRecv,
|
||||
{
|
||||
if (fBlocksOnly)
|
||||
LogPrint("net", "transaction (%s) inv sent in violation of protocol peer=%d\n", inv.hash.ToString(), pfrom->id);
|
||||
else if (!fAlreadyHave && !fImporting && !fReindex)
|
||||
{
|
||||
else if (!fAlreadyHave && !fImporting && !fReindex && !IsInitialBlockDownload())
|
||||
pfrom->AskFor(inv);
|
||||
}
|
||||
}
|
||||
|
||||
// Track requests for our stuff
|
||||
@ -5706,7 +5704,7 @@ bool static ProcessMessage(CNode* pfrom, string strCommand, CDataStream& vRecv,
|
||||
CValidationState state;
|
||||
|
||||
pfrom->setAskFor.erase(inv.hash);
|
||||
mapAlreadyAskedFor.erase(inv);
|
||||
mapAlreadyAskedFor.erase(inv.hash);
|
||||
|
||||
if (!AlreadyHave(inv) && AcceptToMemoryPool(mempool, state, tx, true, &fMissingInputs))
|
||||
{
|
||||
@ -5954,13 +5952,18 @@ bool static ProcessMessage(CNode* pfrom, string strCommand, CDataStream& vRecv,
|
||||
}
|
||||
|
||||
|
||||
// This asymmetric behavior for inbound and outbound connections was introduced
|
||||
// to prevent a fingerprinting attack: an attacker can send specific fake addresses
|
||||
// to users' AddrMan and later request them by sending getaddr messages.
|
||||
// Making nodes which are behind NAT and can only make outgoing connections ignore
|
||||
// the getaddr message mitigates the attack.
|
||||
else if ((strCommand == NetMsgType::GETADDR) && (pfrom->fInbound))
|
||||
else if (strCommand == NetMsgType::GETADDR)
|
||||
{
|
||||
// This asymmetric behavior for inbound and outbound connections was introduced
|
||||
// to prevent a fingerprinting attack: an attacker can send specific fake addresses
|
||||
// to users' AddrMan and later request them by sending getaddr messages.
|
||||
// Making nodes which are behind NAT and can only make outgoing connections ignore
|
||||
// the getaddr message mitigates the attack.
|
||||
if (!pfrom->fInbound) {
|
||||
LogPrint("net", "Ignoring \"getaddr\" from outbound connection. peer=%d\n", pfrom->id);
|
||||
return true;
|
||||
}
|
||||
|
||||
pfrom->vAddrToSend.clear();
|
||||
vector<CAddress> vAddr = addrman.GetAddr();
|
||||
BOOST_FOREACH(const CAddress &addr, vAddr)
|
||||
@ -6379,7 +6382,7 @@ bool SendMessages(CNode* pto)
|
||||
// Address refresh broadcast
|
||||
int64_t nNow = GetTimeMicros();
|
||||
if (!IsInitialBlockDownload() && pto->nNextLocalAddrSend < nNow) {
|
||||
AdvertizeLocal(pto);
|
||||
AdvertiseLocal(pto);
|
||||
pto->nNextLocalAddrSend = PoissonNextSend(nNow, AVG_LOCAL_ADDRESS_BROADCAST_INTERVAL);
|
||||
}
|
||||
|
||||
@ -6492,7 +6495,21 @@ bool SendMessages(CNode* pto)
|
||||
fRevertToInv = true;
|
||||
break;
|
||||
}
|
||||
assert(pBestIndex == NULL || pindex->pprev == pBestIndex);
|
||||
if (pBestIndex != NULL && pindex->pprev != pBestIndex) {
|
||||
// This means that the list of blocks to announce don't
|
||||
// connect to each other.
|
||||
// This shouldn't really be possible to hit during
|
||||
// regular operation (because reorgs should take us to
|
||||
// a chain that has some block not on the prior chain,
|
||||
// which should be caught by the prior check), but one
|
||||
// way this could happen is by using invalidateblock /
|
||||
// reconsiderblock repeatedly on the tip, causing it to
|
||||
// be added multiple times to vBlockHashesToAnnounce.
|
||||
// Robustly deal with this rare situation by reverting
|
||||
// to an inv.
|
||||
fRevertToInv = true;
|
||||
break;
|
||||
}
|
||||
pBestIndex = pindex;
|
||||
if (fFoundStartingHeader) {
|
||||
// add this to the headers message
|
||||
|
22
src/net.cpp
22
src/net.cpp
@ -81,7 +81,6 @@ bool fListen = true;
|
||||
uint64_t nLocalServices = NODE_NETWORK;
|
||||
CCriticalSection cs_mapLocalHost;
|
||||
map<CNetAddr, LocalServiceInfo> mapLocalHost;
|
||||
static bool vfReachable[NET_MAX] = {};
|
||||
static bool vfLimited[NET_MAX] = {};
|
||||
static CNode* pnodeLocalHost = NULL;
|
||||
uint64_t nLocalHostNonce = 0;
|
||||
@ -96,7 +95,7 @@ CCriticalSection cs_vNodes;
|
||||
map<CInv, CDataStream> mapRelay;
|
||||
deque<pair<int64_t, CInv> > vRelayExpiration;
|
||||
CCriticalSection cs_mapRelay;
|
||||
limitedmap<CInv, int64_t> mapAlreadyAskedFor(MAX_INV_SZ);
|
||||
limitedmap<uint256, int64_t> mapAlreadyAskedFor(MAX_INV_SZ);
|
||||
|
||||
static deque<string> vOneShots;
|
||||
CCriticalSection cs_vOneShots;
|
||||
@ -207,7 +206,7 @@ bool IsPeerAddrLocalGood(CNode *pnode)
|
||||
}
|
||||
|
||||
// pushes our own address to a peer
|
||||
void AdvertizeLocal(CNode *pnode)
|
||||
void AdvertiseLocal(CNode *pnode)
|
||||
{
|
||||
if (fListen && pnode->fSuccessfullyConnected)
|
||||
{
|
||||
@ -222,20 +221,12 @@ void AdvertizeLocal(CNode *pnode)
|
||||
}
|
||||
if (addrLocal.IsRoutable())
|
||||
{
|
||||
LogPrintf("AdvertizeLocal: advertizing address %s\n", addrLocal.ToString());
|
||||
LogPrintf("AdvertiseLocal: advertising address %s\n", addrLocal.ToString());
|
||||
pnode->PushAddress(addrLocal);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void SetReachable(enum Network net, bool fFlag)
|
||||
{
|
||||
LOCK(cs_mapLocalHost);
|
||||
vfReachable[net] = fFlag;
|
||||
if (net == NET_IPV6 && fFlag)
|
||||
vfReachable[NET_IPV4] = true;
|
||||
}
|
||||
|
||||
// learn a new local address
|
||||
bool AddLocal(const CService& addr, int nScore)
|
||||
{
|
||||
@ -258,7 +249,6 @@ bool AddLocal(const CService& addr, int nScore)
|
||||
info.nScore = nScore + (fAlready ? 1 : 0);
|
||||
info.nPort = addr.GetPort();
|
||||
}
|
||||
SetReachable(addr.GetNetwork());
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -321,7 +311,7 @@ bool IsLocal(const CService& addr)
|
||||
bool IsReachable(enum Network net)
|
||||
{
|
||||
LOCK(cs_mapLocalHost);
|
||||
return vfReachable[net] && !vfLimited[net];
|
||||
return !vfLimited[net];
|
||||
}
|
||||
|
||||
/** check whether a given address is in a network we can probably connect to */
|
||||
@ -2463,7 +2453,7 @@ void CNode::AskFor(const CInv& inv)
|
||||
// We're using mapAskFor as a priority queue,
|
||||
// the key is the earliest time the request can be sent
|
||||
int64_t nRequestTime;
|
||||
limitedmap<CInv, int64_t>::const_iterator it = mapAlreadyAskedFor.find(inv);
|
||||
limitedmap<uint256, int64_t>::const_iterator it = mapAlreadyAskedFor.find(inv.hash);
|
||||
if (it != mapAlreadyAskedFor.end())
|
||||
nRequestTime = it->second;
|
||||
else
|
||||
@ -2483,7 +2473,7 @@ void CNode::AskFor(const CInv& inv)
|
||||
if (it != mapAlreadyAskedFor.end())
|
||||
mapAlreadyAskedFor.update(it, nRequestTime);
|
||||
else
|
||||
mapAlreadyAskedFor.insert(std::make_pair(inv, nRequestTime));
|
||||
mapAlreadyAskedFor.insert(std::make_pair(inv.hash, nRequestTime));
|
||||
mapAskFor.insert(std::make_pair(nRequestTime, inv));
|
||||
}
|
||||
|
||||
|
@ -136,7 +136,7 @@ enum
|
||||
};
|
||||
|
||||
bool IsPeerAddrLocalGood(CNode *pnode);
|
||||
void AdvertizeLocal(CNode *pnode);
|
||||
void AdvertiseLocal(CNode *pnode);
|
||||
void SetLimited(enum Network net, bool fLimited = true);
|
||||
bool IsLimited(enum Network net);
|
||||
bool IsLimited(const CNetAddr& addr);
|
||||
@ -148,7 +148,6 @@ bool IsLocal(const CService& addr);
|
||||
bool GetLocal(CService &addr, const CNetAddr *paddrPeer = NULL);
|
||||
bool IsReachable(enum Network net);
|
||||
bool IsReachable(const CNetAddr &addr);
|
||||
void SetReachable(enum Network net, bool fFlag = true);
|
||||
CAddress GetLocalAddress(const CNetAddr *paddrPeer = NULL);
|
||||
|
||||
|
||||
@ -166,7 +165,7 @@ extern CCriticalSection cs_vNodes;
|
||||
extern std::map<CInv, CDataStream> mapRelay;
|
||||
extern std::deque<std::pair<int64_t, CInv> > vRelayExpiration;
|
||||
extern CCriticalSection cs_mapRelay;
|
||||
extern limitedmap<CInv, int64_t> mapAlreadyAskedFor;
|
||||
extern limitedmap<uint256, int64_t> mapAlreadyAskedFor;
|
||||
|
||||
extern std::vector<std::string> vAddedNodes;
|
||||
extern CCriticalSection cs_vAddedNodes;
|
||||
|
@ -363,12 +363,14 @@ void BitcoinGUI::createActions()
|
||||
aboutAction = new QAction(QIcon(":/icons/" + theme + "/about"), tr("&About Dash Core"), this);
|
||||
aboutAction->setStatusTip(tr("Show information about Dash Core"));
|
||||
aboutAction->setMenuRole(QAction::AboutRole);
|
||||
aboutAction->setEnabled(false);
|
||||
aboutQtAction = new QAction(QIcon(":/icons/" + theme + "/about_qt"), tr("About &Qt"), this);
|
||||
aboutQtAction->setStatusTip(tr("Show information about Qt"));
|
||||
aboutQtAction->setMenuRole(QAction::AboutQtRole);
|
||||
optionsAction = new QAction(QIcon(":/icons/" + theme + "/options"), tr("&Options..."), this);
|
||||
optionsAction->setStatusTip(tr("Modify configuration options for Dash Core"));
|
||||
optionsAction->setMenuRole(QAction::PreferencesRole);
|
||||
optionsAction->setEnabled(false);
|
||||
toggleHideAction = new QAction(QIcon(":/icons/" + theme + "/about"), tr("&Show / Hide"), this);
|
||||
toggleHideAction->setStatusTip(tr("Show or hide the main Window"));
|
||||
|
||||
@ -403,6 +405,12 @@ void BitcoinGUI::createActions()
|
||||
openMNConfEditorAction->setStatusTip(tr("Open Masternode configuration file"));
|
||||
showBackupsAction = new QAction(QIcon(":/icons/" + theme + "/browse"), tr("Show Automatic &Backups"), this);
|
||||
showBackupsAction->setStatusTip(tr("Show automatically created wallet backups"));
|
||||
// initially disable the debug window menu items
|
||||
openInfoAction->setEnabled(false);
|
||||
openRPCConsoleAction->setEnabled(false);
|
||||
openGraphAction->setEnabled(false);
|
||||
openPeersAction->setEnabled(false);
|
||||
openRepairAction->setEnabled(false);
|
||||
|
||||
usedSendingAddressesAction = new QAction(QIcon(":/icons/" + theme + "/address-book"), tr("&Sending addresses..."), this);
|
||||
usedSendingAddressesAction->setStatusTip(tr("Show the list of used sending addresses and labels"));
|
||||
@ -1104,6 +1112,18 @@ void BitcoinGUI::closeEvent(QCloseEvent *event)
|
||||
QMainWindow::closeEvent(event);
|
||||
}
|
||||
|
||||
void BitcoinGUI::showEvent(QShowEvent *event)
|
||||
{
|
||||
// enable the debug window when the main window shows up
|
||||
openInfoAction->setEnabled(true);
|
||||
openRPCConsoleAction->setEnabled(true);
|
||||
openGraphAction->setEnabled(true);
|
||||
openPeersAction->setEnabled(true);
|
||||
openRepairAction->setEnabled(true);
|
||||
aboutAction->setEnabled(true);
|
||||
optionsAction->setEnabled(true);
|
||||
}
|
||||
|
||||
#ifdef ENABLE_WALLET
|
||||
void BitcoinGUI::incomingTransaction(const QString& date, int unit, const CAmount& amount, const QString& type, const QString& address, const QString& label)
|
||||
{
|
||||
|
@ -74,6 +74,7 @@ public:
|
||||
protected:
|
||||
void changeEvent(QEvent *e);
|
||||
void closeEvent(QCloseEvent *event);
|
||||
void showEvent(QShowEvent *event);
|
||||
void dragEnterEvent(QDragEnterEvent *event);
|
||||
void dropEvent(QDropEvent *event);
|
||||
bool eventFilter(QObject *object, QEvent *event);
|
||||
|
@ -79,6 +79,7 @@ void CScheduler::serviceQueue()
|
||||
}
|
||||
}
|
||||
--nThreadsServicingQueue;
|
||||
newTaskScheduled.notify_one();
|
||||
}
|
||||
|
||||
void CScheduler::stop(bool drain)
|
||||
|
@ -131,7 +131,7 @@ const char* GetOpName(opcodetype opcode)
|
||||
// expanson
|
||||
case OP_NOP1 : return "OP_NOP1";
|
||||
case OP_CHECKLOCKTIMEVERIFY : return "OP_CHECKLOCKTIMEVERIFY";
|
||||
case OP_NOP3 : return "OP_NOP3";
|
||||
case OP_CHECKSEQUENCEVERIFY : return "OP_CHECKSEQUENCEVERIFY";
|
||||
case OP_NOP4 : return "OP_NOP4";
|
||||
case OP_NOP5 : return "OP_NOP5";
|
||||
case OP_NOP6 : return "OP_NOP6";
|
||||
|
@ -164,8 +164,8 @@ enum opcodetype
|
||||
OP_NOP1 = 0xb0,
|
||||
OP_CHECKLOCKTIMEVERIFY = 0xb1,
|
||||
OP_NOP2 = OP_CHECKLOCKTIMEVERIFY,
|
||||
OP_NOP3 = 0xb2,
|
||||
OP_CHECKSEQUENCEVERIFY = OP_NOP3,
|
||||
OP_CHECKSEQUENCEVERIFY = 0xb2,
|
||||
OP_NOP3 = OP_CHECKSEQUENCEVERIFY,
|
||||
OP_NOP4 = 0xb3,
|
||||
OP_NOP5 = 0xb4,
|
||||
OP_NOP6 = 0xb5,
|
||||
|
@ -2,6 +2,7 @@
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
from __future__ import division,print_function,unicode_literals
|
||||
import subprocess
|
||||
import os
|
||||
import json
|
||||
|
File diff suppressed because one or more lines are too long
1833
src/test/data/script_tests.json
Normal file
1833
src/test/data/script_tests.json
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -204,41 +204,41 @@
|
||||
["CHECKSEQUENCEVERIFY tests"],
|
||||
|
||||
["By-height locks, with argument just beyond txin.nSequence"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "1 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "1 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4259839 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4259839 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000feff40000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["By-time locks, with argument just beyond txin.nSequence (but within numerical boundries)"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194305 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194305 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000040000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4259839 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4259839 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000feff40000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["Argument missing"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["Argument negative with by-blockheight txin.nSequence=0"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "-1 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "-1 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["Argument negative with by-blocktime txin.nSequence=CTxIn::SEQUENCE_LOCKTIME_TYPE_FLAG"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "-1 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "-1 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000040000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["Argument/tx height/time mismatch, both versions"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "0 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "0 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000040000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "65535 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "65535 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000040000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194304 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194304 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4259839 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4259839 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["6 byte non-minimally-encoded arguments are invalid even if their contents are valid"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "0x06 0x000000000000 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "0x06 0x000000000000 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffff00000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["Failure due to failing CHECKSEQUENCEVERIFY in scriptSig"],
|
||||
@ -250,9 +250,9 @@
|
||||
"0200000001000100000000000000000000000000000000000000000000000000000000000000000000030251b2000000000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["Failure due to insufficient tx.nVersion (<2)"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "0 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "0 CHECKSEQUENCEVERIFY 1"]],
|
||||
"010000000100010000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194304 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194304 CHECKSEQUENCEVERIFY 1"]],
|
||||
"010000000100010000000000000000000000000000000000000000000000000000000000000000000000000040000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["Make diffs cleaner by leaving a comment here without comma at the end"]
|
||||
|
@ -236,77 +236,77 @@
|
||||
["CHECKSEQUENCEVERIFY tests"],
|
||||
|
||||
["By-height locks, with argument == 0 and == txin.nSequence"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "0 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "0 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "65535 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "65535 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffff00000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "65535 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "65535 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffbf7f0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "0 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "0 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffbf7f0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["By-time locks, with argument == 0 and == txin.nSequence"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194304 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194304 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000040000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4259839 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4259839 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffff40000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4259839 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4259839 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffff7f0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194304 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194304 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffff7f0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["Upper sequence with upper sequence is fine"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483648 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483648 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000000800100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4294967295 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4294967295 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000000800100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483648 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483648 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000feffffff0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4294967295 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4294967295 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000feffffff0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483648 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483648 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffffff0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4294967295 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4294967295 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffffff0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["Argument 2^31 with various nSequence"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483648 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483648 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffbf7f0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483648 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483648 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffff7f0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483648 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483648 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffffff0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["Argument 2^32-1 with various nSequence"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4294967295 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4294967295 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffbf7f0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4294967295 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4294967295 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffff7f0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4294967295 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4294967295 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffffff0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["Argument 3<<31 with various nSequence"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "6442450944 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "6442450944 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffbf7f0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "6442450944 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "6442450944 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffff7f0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "6442450944 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "6442450944 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffffffff0100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["5 byte non-minimally-encoded operandss are valid"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "0x05 0x0000000000 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "0x05 0x0000000000 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["The argument can be calculated rather than created directly by a PUSHDATA"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194303 1ADD NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194303 1ADD CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000040000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194304 1SUB NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "4194304 1SUB CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000ffff00000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["An ADD producing a 5-byte result that sets CTxIn::SEQUENCE_LOCKTIME_DISABLE_FLAG"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483647 65536 NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483647 65536 CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483647 4259840 ADD NOP3 1"]],
|
||||
[[["0000000000000000000000000000000000000000000000000000000000000100", 0, "2147483647 4259840 ADD CHECKSEQUENCEVERIFY 1"]],
|
||||
"020000000100010000000000000000000000000000000000000000000000000000000000000000000000000040000100000000000000000000000000", "P2SH,CHECKSEQUENCEVERIFY"],
|
||||
|
||||
["Valid CHECKSEQUENCEVERIFY in scriptSig"],
|
||||
|
@ -203,5 +203,125 @@ BOOST_AUTO_TEST_CASE(existing_data_reindex)
|
||||
BOOST_CHECK(odbw.Read(key, res3));
|
||||
BOOST_CHECK_EQUAL(res3.ToString(), in2.ToString());
|
||||
}
|
||||
|
||||
|
||||
BOOST_AUTO_TEST_CASE(iterator_ordering)
|
||||
{
|
||||
path ph = temp_directory_path() / unique_path();
|
||||
CDBWrapper dbw(ph, (1 << 20), true, false, false);
|
||||
for (int x=0x00; x<256; ++x) {
|
||||
uint8_t key = x;
|
||||
uint32_t value = x*x;
|
||||
BOOST_CHECK(dbw.Write(key, value));
|
||||
}
|
||||
|
||||
boost::scoped_ptr<CDBIterator> it(const_cast<CDBWrapper*>(&dbw)->NewIterator());
|
||||
for (int c=0; c<2; ++c) {
|
||||
int seek_start;
|
||||
if (c == 0)
|
||||
seek_start = 0x00;
|
||||
else
|
||||
seek_start = 0x80;
|
||||
it->Seek((uint8_t)seek_start);
|
||||
for (int x=seek_start; x<256; ++x) {
|
||||
uint8_t key;
|
||||
uint32_t value;
|
||||
BOOST_CHECK(it->Valid());
|
||||
if (!it->Valid()) // Avoid spurious errors about invalid iterator's key and value in case of failure
|
||||
break;
|
||||
BOOST_CHECK(it->GetKey(key));
|
||||
BOOST_CHECK(it->GetValue(value));
|
||||
BOOST_CHECK_EQUAL(key, x);
|
||||
BOOST_CHECK_EQUAL(value, x*x);
|
||||
it->Next();
|
||||
}
|
||||
BOOST_CHECK(!it->Valid());
|
||||
}
|
||||
}
|
||||
|
||||
struct StringContentsSerializer {
|
||||
// Used to make two serialized objects the same while letting them have a different lengths
|
||||
// This is a terrible idea
|
||||
string str;
|
||||
StringContentsSerializer() {}
|
||||
StringContentsSerializer(const string& inp) : str(inp) {}
|
||||
|
||||
StringContentsSerializer& operator+=(const string& s) {
|
||||
str += s;
|
||||
return *this;
|
||||
}
|
||||
StringContentsSerializer& operator+=(const StringContentsSerializer& s) { return *this += s.str; }
|
||||
|
||||
ADD_SERIALIZE_METHODS;
|
||||
|
||||
template <typename Stream, typename Operation>
|
||||
inline void SerializationOp(Stream& s, Operation ser_action, int nType, int nVersion) {
|
||||
if (ser_action.ForRead()) {
|
||||
str.clear();
|
||||
char c = 0;
|
||||
while (true) {
|
||||
try {
|
||||
READWRITE(c);
|
||||
str.push_back(c);
|
||||
} catch (const std::ios_base::failure& e) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (size_t i = 0; i < str.size(); i++)
|
||||
READWRITE(str[i]);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
BOOST_AUTO_TEST_CASE(iterator_string_ordering)
|
||||
{
|
||||
char buf[10];
|
||||
|
||||
path ph = temp_directory_path() / unique_path();
|
||||
CDBWrapper dbw(ph, (1 << 20), true, false, false);
|
||||
for (int x=0x00; x<10; ++x) {
|
||||
for (int y = 0; y < 10; y++) {
|
||||
sprintf(buf, "%d", x);
|
||||
StringContentsSerializer key(buf);
|
||||
for (int z = 0; z < y; z++)
|
||||
key += key;
|
||||
uint32_t value = x*x;
|
||||
BOOST_CHECK(dbw.Write(key, value));
|
||||
}
|
||||
}
|
||||
|
||||
boost::scoped_ptr<CDBIterator> it(const_cast<CDBWrapper*>(&dbw)->NewIterator());
|
||||
for (int c=0; c<2; ++c) {
|
||||
int seek_start;
|
||||
if (c == 0)
|
||||
seek_start = 0;
|
||||
else
|
||||
seek_start = 5;
|
||||
sprintf(buf, "%d", seek_start);
|
||||
StringContentsSerializer seek_key(buf);
|
||||
it->Seek(seek_key);
|
||||
for (int x=seek_start; x<10; ++x) {
|
||||
for (int y = 0; y < 10; y++) {
|
||||
sprintf(buf, "%d", x);
|
||||
string exp_key(buf);
|
||||
for (int z = 0; z < y; z++)
|
||||
exp_key += exp_key;
|
||||
StringContentsSerializer key;
|
||||
uint32_t value;
|
||||
BOOST_CHECK(it->Valid());
|
||||
if (!it->Valid()) // Avoid spurious errors about invalid iterator's key and value in case of failure
|
||||
break;
|
||||
BOOST_CHECK(it->GetKey(key));
|
||||
BOOST_CHECK(it->GetValue(value));
|
||||
BOOST_CHECK_EQUAL(key.str, exp_key);
|
||||
BOOST_CHECK_EQUAL(value, x*x);
|
||||
it->Next();
|
||||
}
|
||||
}
|
||||
BOOST_CHECK(!it->Valid());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
|
@ -40,7 +40,6 @@ static void MicroSleep(uint64_t n)
|
||||
#endif
|
||||
}
|
||||
|
||||
#if 0 /* Disabled for now because there is a race condition issue in this test - see #6540 */
|
||||
BOOST_AUTO_TEST_CASE(manythreads)
|
||||
{
|
||||
seed_insecure_rand(false);
|
||||
@ -116,6 +115,5 @@ BOOST_AUTO_TEST_CASE(manythreads)
|
||||
}
|
||||
BOOST_CHECK_EQUAL(counterSum, 200);
|
||||
}
|
||||
#endif
|
||||
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
|
@ -2,8 +2,7 @@
|
||||
// Distributed under the MIT software license, see the accompanying
|
||||
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
#include "data/script_invalid.json.h"
|
||||
#include "data/script_valid.json.h"
|
||||
#include "data/script_tests.json.h"
|
||||
|
||||
#include "core_io.h"
|
||||
#include "key.h"
|
||||
@ -52,6 +51,64 @@ read_json(const std::string& jsondata)
|
||||
return v.get_array();
|
||||
}
|
||||
|
||||
struct ScriptErrorDesc
|
||||
{
|
||||
ScriptError_t err;
|
||||
const char *name;
|
||||
};
|
||||
|
||||
static ScriptErrorDesc script_errors[]={
|
||||
{SCRIPT_ERR_OK, "OK"},
|
||||
{SCRIPT_ERR_UNKNOWN_ERROR, "UNKNOWN_ERROR"},
|
||||
{SCRIPT_ERR_EVAL_FALSE, "EVAL_FALSE"},
|
||||
{SCRIPT_ERR_OP_RETURN, "OP_RETURN"},
|
||||
{SCRIPT_ERR_SCRIPT_SIZE, "SCRIPT_SIZE"},
|
||||
{SCRIPT_ERR_PUSH_SIZE, "PUSH_SIZE"},
|
||||
{SCRIPT_ERR_OP_COUNT, "OP_COUNT"},
|
||||
{SCRIPT_ERR_STACK_SIZE, "STACK_SIZE"},
|
||||
{SCRIPT_ERR_SIG_COUNT, "SIG_COUNT"},
|
||||
{SCRIPT_ERR_PUBKEY_COUNT, "PUBKEY_COUNT"},
|
||||
{SCRIPT_ERR_VERIFY, "VERIFY"},
|
||||
{SCRIPT_ERR_EQUALVERIFY, "EQUALVERIFY"},
|
||||
{SCRIPT_ERR_CHECKMULTISIGVERIFY, "CHECKMULTISIGVERIFY"},
|
||||
{SCRIPT_ERR_CHECKSIGVERIFY, "CHECKSIGVERIFY"},
|
||||
{SCRIPT_ERR_NUMEQUALVERIFY, "NUMEQUALVERIFY"},
|
||||
{SCRIPT_ERR_BAD_OPCODE, "BAD_OPCODE"},
|
||||
{SCRIPT_ERR_DISABLED_OPCODE, "DISABLED_OPCODE"},
|
||||
{SCRIPT_ERR_INVALID_STACK_OPERATION, "INVALID_STACK_OPERATION"},
|
||||
{SCRIPT_ERR_INVALID_ALTSTACK_OPERATION, "INVALID_ALTSTACK_OPERATION"},
|
||||
{SCRIPT_ERR_UNBALANCED_CONDITIONAL, "UNBALANCED_CONDITIONAL"},
|
||||
{SCRIPT_ERR_NEGATIVE_LOCKTIME, "NEGATIVE_LOCKTIME"},
|
||||
{SCRIPT_ERR_UNSATISFIED_LOCKTIME, "UNSATISFIED_LOCKTIME"},
|
||||
{SCRIPT_ERR_SIG_HASHTYPE, "SIG_HASHTYPE"},
|
||||
{SCRIPT_ERR_SIG_DER, "SIG_DER"},
|
||||
{SCRIPT_ERR_MINIMALDATA, "MINIMALDATA"},
|
||||
{SCRIPT_ERR_SIG_PUSHONLY, "SIG_PUSHONLY"},
|
||||
{SCRIPT_ERR_SIG_HIGH_S, "SIG_HIGH_S"},
|
||||
{SCRIPT_ERR_SIG_NULLDUMMY, "SIG_NULLDUMMY"},
|
||||
{SCRIPT_ERR_PUBKEYTYPE, "PUBKEYTYPE"},
|
||||
{SCRIPT_ERR_CLEANSTACK, "CLEANSTACK"},
|
||||
{SCRIPT_ERR_DISCOURAGE_UPGRADABLE_NOPS, "DISCOURAGE_UPGRADABLE_NOPS"}
|
||||
};
|
||||
|
||||
const char *FormatScriptError(ScriptError_t err)
|
||||
{
|
||||
for (unsigned int i=0; i<ARRAYLEN(script_errors); ++i)
|
||||
if (script_errors[i].err == err)
|
||||
return script_errors[i].name;
|
||||
BOOST_ERROR("Unknown scripterror enumeration value, update script_errors in script_tests.cpp.");
|
||||
return "";
|
||||
}
|
||||
|
||||
ScriptError_t ParseScriptError(const std::string &name)
|
||||
{
|
||||
for (unsigned int i=0; i<ARRAYLEN(script_errors); ++i)
|
||||
if (script_errors[i].name == name)
|
||||
return script_errors[i].err;
|
||||
BOOST_ERROR("Unknown scripterror \"" << name << "\" in test description");
|
||||
return SCRIPT_ERR_UNKNOWN_ERROR;
|
||||
}
|
||||
|
||||
BOOST_FIXTURE_TEST_SUITE(script_tests, BasicTestingSetup)
|
||||
|
||||
CMutableTransaction BuildCreditingTransaction(const CScript& scriptPubKey)
|
||||
@ -87,13 +144,14 @@ CMutableTransaction BuildSpendingTransaction(const CScript& scriptSig, const CMu
|
||||
return txSpend;
|
||||
}
|
||||
|
||||
void DoTest(const CScript& scriptPubKey, const CScript& scriptSig, int flags, bool expect, const std::string& message)
|
||||
void DoTest(const CScript& scriptPubKey, const CScript& scriptSig, int flags, const std::string& message, int scriptError)
|
||||
{
|
||||
bool expect = (scriptError == SCRIPT_ERR_OK);
|
||||
ScriptError err;
|
||||
CMutableTransaction tx = BuildSpendingTransaction(scriptSig, BuildCreditingTransaction(scriptPubKey));
|
||||
CMutableTransaction tx2 = tx;
|
||||
BOOST_CHECK_MESSAGE(VerifyScript(scriptSig, scriptPubKey, flags, MutableTransactionSignatureChecker(&tx, 0), &err) == expect, message);
|
||||
BOOST_CHECK_MESSAGE(expect == (err == SCRIPT_ERR_OK), std::string(ScriptErrorString(err)) + ": " + message);
|
||||
BOOST_CHECK_MESSAGE(err == scriptError, std::string(FormatScriptError(err)) + " where " + std::string(FormatScriptError((ScriptError_t)scriptError)) + " expected: " + message);
|
||||
#if defined(HAVE_CONSENSUS_LIB)
|
||||
CDataStream stream(SER_NETWORK, PROTOCOL_VERSION);
|
||||
stream << tx2;
|
||||
@ -187,6 +245,7 @@ private:
|
||||
std::vector<unsigned char> push;
|
||||
std::string comment;
|
||||
int flags;
|
||||
int scriptError;
|
||||
|
||||
void DoPush()
|
||||
{
|
||||
@ -204,7 +263,7 @@ private:
|
||||
}
|
||||
|
||||
public:
|
||||
TestBuilder(const CScript& redeemScript, const std::string& comment_, int flags_, bool P2SH = false) : scriptPubKey(redeemScript), havePush(false), comment(comment_), flags(flags_)
|
||||
TestBuilder(const CScript& redeemScript, const std::string& comment_, int flags_, bool P2SH = false) : scriptPubKey(redeemScript), havePush(false), comment(comment_), flags(flags_), scriptError(SCRIPT_ERR_OK)
|
||||
{
|
||||
if (P2SH) {
|
||||
creditTx = BuildCreditingTransaction(CScript() << OP_HASH160 << ToByteVector(CScriptID(redeemScript)) << OP_EQUAL);
|
||||
@ -214,6 +273,12 @@ public:
|
||||
spendTx = BuildSpendingTransaction(CScript(), creditTx);
|
||||
}
|
||||
|
||||
TestBuilder& ScriptError(ScriptError_t err)
|
||||
{
|
||||
scriptError = err;
|
||||
return *this;
|
||||
}
|
||||
|
||||
TestBuilder& Add(const CScript& script)
|
||||
{
|
||||
DoPush();
|
||||
@ -284,11 +349,11 @@ public:
|
||||
return *this;
|
||||
}
|
||||
|
||||
TestBuilder& Test(bool expect)
|
||||
TestBuilder& Test()
|
||||
{
|
||||
TestBuilder copy = *this; // Make a copy so we can rollback the push.
|
||||
DoPush();
|
||||
DoTest(creditTx.vout[0].scriptPubKey, spendTx.vin[0].scriptSig, flags, expect, comment);
|
||||
DoTest(creditTx.vout[0].scriptPubKey, spendTx.vin[0].scriptSig, flags, comment, scriptError);
|
||||
*this = copy;
|
||||
return *this;
|
||||
}
|
||||
@ -300,6 +365,7 @@ public:
|
||||
array.push_back(FormatScript(spendTx.vin[0].scriptSig));
|
||||
array.push_back(FormatScript(creditTx.vout[0].scriptPubKey));
|
||||
array.push_back(FormatScriptFlags(flags));
|
||||
array.push_back(FormatScriptError((ScriptError_t)scriptError));
|
||||
array.push_back(comment);
|
||||
return array;
|
||||
}
|
||||
@ -314,332 +380,328 @@ public:
|
||||
return creditTx.vout[0].scriptPubKey;
|
||||
}
|
||||
};
|
||||
|
||||
std::string JSONPrettyPrint(const UniValue& univalue)
|
||||
{
|
||||
std::string ret = univalue.write(4);
|
||||
// Workaround for libunivalue pretty printer, which puts a space between comma's and newlines
|
||||
size_t pos = 0;
|
||||
while ((pos = ret.find(" \n", pos)) != std::string::npos) {
|
||||
ret.replace(pos, 2, "\n");
|
||||
pos++;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(script_build)
|
||||
{
|
||||
const KeyData keys;
|
||||
|
||||
std::vector<TestBuilder> good;
|
||||
std::vector<TestBuilder> bad;
|
||||
std::vector<TestBuilder> tests;
|
||||
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2PK", 0
|
||||
).PushSig(keys.key0));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2PK, bad sig", 0
|
||||
).PushSig(keys.key0).DamagePush(10));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2PK", 0
|
||||
).PushSig(keys.key0));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2PK, bad sig", 0
|
||||
).PushSig(keys.key0).DamagePush(10).ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << OP_DUP << OP_HASH160 << ToByteVector(keys.pubkey1C.GetID()) << OP_EQUALVERIFY << OP_CHECKSIG,
|
||||
"P2PKH", 0
|
||||
).PushSig(keys.key1).Push(keys.pubkey1C));
|
||||
bad.push_back(TestBuilder(CScript() << OP_DUP << OP_HASH160 << ToByteVector(keys.pubkey2C.GetID()) << OP_EQUALVERIFY << OP_CHECKSIG,
|
||||
"P2PKH, bad pubkey", 0
|
||||
).PushSig(keys.key2).Push(keys.pubkey2C).DamagePush(5));
|
||||
tests.push_back(TestBuilder(CScript() << OP_DUP << OP_HASH160 << ToByteVector(keys.pubkey1C.GetID()) << OP_EQUALVERIFY << OP_CHECKSIG,
|
||||
"P2PKH", 0
|
||||
).PushSig(keys.key1).Push(keys.pubkey1C));
|
||||
tests.push_back(TestBuilder(CScript() << OP_DUP << OP_HASH160 << ToByteVector(keys.pubkey2C.GetID()) << OP_EQUALVERIFY << OP_CHECKSIG,
|
||||
"P2PKH, bad pubkey", 0
|
||||
).PushSig(keys.key2).Push(keys.pubkey2C).DamagePush(5).ScriptError(SCRIPT_ERR_EQUALVERIFY));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG,
|
||||
"P2PK anyonecanpay", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL | SIGHASH_ANYONECANPAY));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG,
|
||||
"P2PK anyonecanpay marked with normal hashtype", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL | SIGHASH_ANYONECANPAY).EditPush(70, "81", "01"));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG,
|
||||
"P2PK anyonecanpay", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL | SIGHASH_ANYONECANPAY));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG,
|
||||
"P2PK anyonecanpay marked with normal hashtype", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL | SIGHASH_ANYONECANPAY).EditPush(70, "81", "01").ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0C) << OP_CHECKSIG,
|
||||
"P2SH(P2PK)", SCRIPT_VERIFY_P2SH, true
|
||||
).PushSig(keys.key0).PushRedeem());
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0C) << OP_CHECKSIG,
|
||||
"P2SH(P2PK), bad redeemscript", SCRIPT_VERIFY_P2SH, true
|
||||
).PushSig(keys.key0).PushRedeem().DamagePush(10));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0C) << OP_CHECKSIG,
|
||||
"P2SH(P2PK)", SCRIPT_VERIFY_P2SH, true
|
||||
).PushSig(keys.key0).PushRedeem());
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0C) << OP_CHECKSIG,
|
||||
"P2SH(P2PK), bad redeemscript", SCRIPT_VERIFY_P2SH, true
|
||||
).PushSig(keys.key0).PushRedeem().DamagePush(10).ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << OP_DUP << OP_HASH160 << ToByteVector(keys.pubkey1.GetID()) << OP_EQUALVERIFY << OP_CHECKSIG,
|
||||
"P2SH(P2PKH), bad sig but no VERIFY_P2SH", 0, true
|
||||
).PushSig(keys.key0).DamagePush(10).PushRedeem());
|
||||
bad.push_back(TestBuilder(CScript() << OP_DUP << OP_HASH160 << ToByteVector(keys.pubkey1.GetID()) << OP_EQUALVERIFY << OP_CHECKSIG,
|
||||
"P2SH(P2PKH), bad sig", SCRIPT_VERIFY_P2SH, true
|
||||
).PushSig(keys.key0).DamagePush(10).PushRedeem());
|
||||
tests.push_back(TestBuilder(CScript() << OP_DUP << OP_HASH160 << ToByteVector(keys.pubkey1.GetID()) << OP_EQUALVERIFY << OP_CHECKSIG,
|
||||
"P2SH(P2PKH), bad sig but no VERIFY_P2SH", 0, true
|
||||
).PushSig(keys.key0).DamagePush(10).PushRedeem());
|
||||
tests.push_back(TestBuilder(CScript() << OP_DUP << OP_HASH160 << ToByteVector(keys.pubkey1.GetID()) << OP_EQUALVERIFY << OP_CHECKSIG,
|
||||
"P2SH(P2PKH), bad sig", SCRIPT_VERIFY_P2SH, true
|
||||
).PushSig(keys.key0).DamagePush(10).PushRedeem().ScriptError(SCRIPT_ERR_EQUALVERIFY));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
|
||||
"3-of-3", 0
|
||||
).Num(0).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2));
|
||||
bad.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
|
||||
"3-of-3, 2 sigs", 0
|
||||
).Num(0).PushSig(keys.key0).PushSig(keys.key1).Num(0));
|
||||
tests.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
|
||||
"3-of-3", 0
|
||||
).Num(0).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2));
|
||||
tests.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
|
||||
"3-of-3, 2 sigs", 0
|
||||
).Num(0).PushSig(keys.key0).PushSig(keys.key1).Num(0).ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
|
||||
"P2SH(2-of-3)", SCRIPT_VERIFY_P2SH, true
|
||||
).Num(0).PushSig(keys.key1).PushSig(keys.key2).PushRedeem());
|
||||
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
|
||||
"P2SH(2-of-3), 1 sig", SCRIPT_VERIFY_P2SH, true
|
||||
).Num(0).PushSig(keys.key1).Num(0).PushRedeem());
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
|
||||
"P2SH(2-of-3)", SCRIPT_VERIFY_P2SH, true
|
||||
).Num(0).PushSig(keys.key1).PushSig(keys.key2).PushRedeem());
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
|
||||
"P2SH(2-of-3), 1 sig", SCRIPT_VERIFY_P2SH, true
|
||||
).Num(0).PushSig(keys.key1).Num(0).PushRedeem().ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"P2PK with too much R padding but no DERSIG", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000"));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"P2PK with too much R padding", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000"));
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"P2PK with too much S padding but no DERSIG", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL).EditPush(1, "44", "45").EditPush(37, "20", "2100"));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"P2PK with too much S padding", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key1, SIGHASH_ALL).EditPush(1, "44", "45").EditPush(37, "20", "2100"));
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"P2PK with too little R padding but no DERSIG", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"P2PK with too little R padding", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with bad sig with too much R padding but no DERSIG", 0
|
||||
).PushSig(keys.key2, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000").DamagePush(10));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with bad sig with too much R padding", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key2, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000").DamagePush(10));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with too much R padding but no DERSIG", 0
|
||||
).PushSig(keys.key2, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000"));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with too much R padding", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key2, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000"));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"P2PK with too much R padding but no DERSIG", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000"));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"P2PK with too much R padding", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000").ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"P2PK with too much S padding but no DERSIG", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL).EditPush(1, "44", "45").EditPush(37, "20", "2100"));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"P2PK with too much S padding", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key1, SIGHASH_ALL).EditPush(1, "44", "45").EditPush(37, "20", "2100").ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"P2PK with too little R padding but no DERSIG", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"P2PK with too little R padding", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with bad sig with too much R padding but no DERSIG", 0
|
||||
).PushSig(keys.key2, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000").DamagePush(10));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with bad sig with too much R padding", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key2, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000").DamagePush(10).ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with too much R padding but no DERSIG", 0
|
||||
).PushSig(keys.key2, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000").ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with too much R padding", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key2, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000").ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"BIP66 example 1, without DERSIG", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"BIP66 example 1, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
|
||||
"BIP66 example 2, without DERSIG", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
|
||||
"BIP66 example 2, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"BIP66 example 3, without DERSIG", 0
|
||||
).Num(0));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"BIP66 example 3, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0));
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
|
||||
"BIP66 example 4, without DERSIG", 0
|
||||
).Num(0));
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
|
||||
"BIP66 example 4, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"BIP66 example 5, without DERSIG", 0
|
||||
).Num(1));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"BIP66 example 5, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(1));
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
|
||||
"BIP66 example 6, without DERSIG", 0
|
||||
).Num(1));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
|
||||
"BIP66 example 6, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(1));
|
||||
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"BIP66 example 7, without DERSIG", 0
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").PushSig(keys.key2));
|
||||
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"BIP66 example 7, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").PushSig(keys.key2));
|
||||
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"BIP66 example 8, without DERSIG", 0
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").PushSig(keys.key2));
|
||||
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"BIP66 example 8, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").PushSig(keys.key2));
|
||||
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"BIP66 example 9, without DERSIG", 0
|
||||
).Num(0).Num(0).PushSig(keys.key2, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
|
||||
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"BIP66 example 9, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0).Num(0).PushSig(keys.key2, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
|
||||
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"BIP66 example 10, without DERSIG", 0
|
||||
).Num(0).Num(0).PushSig(keys.key2, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
|
||||
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"BIP66 example 10, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0).Num(0).PushSig(keys.key2, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
|
||||
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"BIP66 example 11, without DERSIG", 0
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").Num(0));
|
||||
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"BIP66 example 11, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").Num(0));
|
||||
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"BIP66 example 12, without DERSIG", 0
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").Num(0));
|
||||
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"BIP66 example 12, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").Num(0));
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2PK with multi-byte hashtype, without DERSIG", 0
|
||||
).PushSig(keys.key2, SIGHASH_ALL).EditPush(70, "01", "0101"));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2PK with multi-byte hashtype, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key2, SIGHASH_ALL).EditPush(70, "01", "0101"));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"BIP66 example 1, without DERSIG", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"BIP66 example 1, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
|
||||
"BIP66 example 2, without DERSIG", 0
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
|
||||
"BIP66 example 2, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"BIP66 example 3, without DERSIG", 0
|
||||
).Num(0).ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"BIP66 example 3, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0).ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
|
||||
"BIP66 example 4, without DERSIG", 0
|
||||
).Num(0));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
|
||||
"BIP66 example 4, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"BIP66 example 5, without DERSIG", 0
|
||||
).Num(1).ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
|
||||
"BIP66 example 5, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(1).ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
|
||||
"BIP66 example 6, without DERSIG", 0
|
||||
).Num(1));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
|
||||
"BIP66 example 6, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(1).ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"BIP66 example 7, without DERSIG", 0
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").PushSig(keys.key2));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"BIP66 example 7, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").PushSig(keys.key2).ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"BIP66 example 8, without DERSIG", 0
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").PushSig(keys.key2).ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"BIP66 example 8, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").PushSig(keys.key2).ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"BIP66 example 9, without DERSIG", 0
|
||||
).Num(0).Num(0).PushSig(keys.key2, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"BIP66 example 9, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0).Num(0).PushSig(keys.key2, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"BIP66 example 10, without DERSIG", 0
|
||||
).Num(0).Num(0).PushSig(keys.key2, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"BIP66 example 10, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0).Num(0).PushSig(keys.key2, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"BIP66 example 11, without DERSIG", 0
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").Num(0).ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"BIP66 example 11, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").Num(0).ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"BIP66 example 12, without DERSIG", 0
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").Num(0));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"BIP66 example 12, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").Num(0));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2PK with multi-byte hashtype, without DERSIG", 0
|
||||
).PushSig(keys.key2, SIGHASH_ALL).EditPush(70, "01", "0101"));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2PK with multi-byte hashtype, with DERSIG", SCRIPT_VERIFY_DERSIG
|
||||
).PushSig(keys.key2, SIGHASH_ALL).EditPush(70, "01", "0101").ScriptError(SCRIPT_ERR_SIG_DER));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2PK with high S but no LOW_S", 0
|
||||
).PushSig(keys.key2, SIGHASH_ALL, 32, 33));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2PK with high S", SCRIPT_VERIFY_LOW_S
|
||||
).PushSig(keys.key2, SIGHASH_ALL, 32, 33));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2PK with high S but no LOW_S", 0
|
||||
).PushSig(keys.key2, SIGHASH_ALL, 32, 33));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2PK with high S", SCRIPT_VERIFY_LOW_S
|
||||
).PushSig(keys.key2, SIGHASH_ALL, 32, 33).ScriptError(SCRIPT_ERR_SIG_HIGH_S));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG,
|
||||
"P2PK with hybrid pubkey but no STRICTENC", 0
|
||||
).PushSig(keys.key0, SIGHASH_ALL));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG,
|
||||
"P2PK with hybrid pubkey", SCRIPT_VERIFY_STRICTENC
|
||||
).PushSig(keys.key0, SIGHASH_ALL));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with hybrid pubkey but no STRICTENC", 0
|
||||
).PushSig(keys.key0, SIGHASH_ALL));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with hybrid pubkey", SCRIPT_VERIFY_STRICTENC
|
||||
).PushSig(keys.key0, SIGHASH_ALL));
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with invalid hybrid pubkey but no STRICTENC", 0
|
||||
).PushSig(keys.key0, SIGHASH_ALL).DamagePush(10));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with invalid hybrid pubkey", SCRIPT_VERIFY_STRICTENC
|
||||
).PushSig(keys.key0, SIGHASH_ALL).DamagePush(10));
|
||||
good.push_back(TestBuilder(CScript() << OP_1 << ToByteVector(keys.pubkey0H) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"1-of-2 with the second 1 hybrid pubkey and no STRICTENC", 0
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL));
|
||||
good.push_back(TestBuilder(CScript() << OP_1 << ToByteVector(keys.pubkey0H) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"1-of-2 with the second 1 hybrid pubkey", SCRIPT_VERIFY_STRICTENC
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL));
|
||||
bad.push_back(TestBuilder(CScript() << OP_1 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey0H) << OP_2 << OP_CHECKMULTISIG,
|
||||
"1-of-2 with the first 1 hybrid pubkey", SCRIPT_VERIFY_STRICTENC
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG,
|
||||
"P2PK with hybrid pubkey but no STRICTENC", 0
|
||||
).PushSig(keys.key0, SIGHASH_ALL));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG,
|
||||
"P2PK with hybrid pubkey", SCRIPT_VERIFY_STRICTENC
|
||||
).PushSig(keys.key0, SIGHASH_ALL).ScriptError(SCRIPT_ERR_PUBKEYTYPE));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with hybrid pubkey but no STRICTENC", 0
|
||||
).PushSig(keys.key0, SIGHASH_ALL).ScriptError(SCRIPT_ERR_EVAL_FALSE));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with hybrid pubkey", SCRIPT_VERIFY_STRICTENC
|
||||
).PushSig(keys.key0, SIGHASH_ALL).ScriptError(SCRIPT_ERR_PUBKEYTYPE));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with invalid hybrid pubkey but no STRICTENC", 0
|
||||
).PushSig(keys.key0, SIGHASH_ALL).DamagePush(10));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with invalid hybrid pubkey", SCRIPT_VERIFY_STRICTENC
|
||||
).PushSig(keys.key0, SIGHASH_ALL).DamagePush(10).ScriptError(SCRIPT_ERR_PUBKEYTYPE));
|
||||
tests.push_back(TestBuilder(CScript() << OP_1 << ToByteVector(keys.pubkey0H) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"1-of-2 with the second 1 hybrid pubkey and no STRICTENC", 0
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL));
|
||||
tests.push_back(TestBuilder(CScript() << OP_1 << ToByteVector(keys.pubkey0H) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"1-of-2 with the second 1 hybrid pubkey", SCRIPT_VERIFY_STRICTENC
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL));
|
||||
tests.push_back(TestBuilder(CScript() << OP_1 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey0H) << OP_2 << OP_CHECKMULTISIG,
|
||||
"1-of-2 with the first 1 hybrid pubkey", SCRIPT_VERIFY_STRICTENC
|
||||
).Num(0).PushSig(keys.key1, SIGHASH_ALL).ScriptError(SCRIPT_ERR_PUBKEYTYPE));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG,
|
||||
"P2PK with undefined hashtype but no STRICTENC", 0
|
||||
).PushSig(keys.key1, 5));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG,
|
||||
"P2PK with undefined hashtype", SCRIPT_VERIFY_STRICTENC
|
||||
).PushSig(keys.key1, 5));
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with invalid sig and undefined hashtype but no STRICTENC", 0
|
||||
).PushSig(keys.key1, 5).DamagePush(10));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with invalid sig and undefined hashtype", SCRIPT_VERIFY_STRICTENC
|
||||
).PushSig(keys.key1, 5).DamagePush(10));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG,
|
||||
"P2PK with undefined hashtype but no STRICTENC", 0
|
||||
).PushSig(keys.key1, 5));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG,
|
||||
"P2PK with undefined hashtype", SCRIPT_VERIFY_STRICTENC
|
||||
).PushSig(keys.key1, 5).ScriptError(SCRIPT_ERR_SIG_HASHTYPE));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with invalid sig and undefined hashtype but no STRICTENC", 0
|
||||
).PushSig(keys.key1, 5).DamagePush(10));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG << OP_NOT,
|
||||
"P2PK NOT with invalid sig and undefined hashtype", SCRIPT_VERIFY_STRICTENC
|
||||
).PushSig(keys.key1, 5).DamagePush(10).ScriptError(SCRIPT_ERR_SIG_HASHTYPE));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
|
||||
"3-of-3 with nonzero dummy but no NULLDUMMY", 0
|
||||
).Num(1).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2));
|
||||
bad.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
|
||||
"3-of-3 with nonzero dummy", SCRIPT_VERIFY_NULLDUMMY
|
||||
).Num(1).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2));
|
||||
good.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"3-of-3 NOT with invalid sig and nonzero dummy but no NULLDUMMY", 0
|
||||
).Num(1).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2).DamagePush(10));
|
||||
bad.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"3-of-3 NOT with invalid sig with nonzero dummy", SCRIPT_VERIFY_NULLDUMMY
|
||||
).Num(1).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2).DamagePush(10));
|
||||
tests.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
|
||||
"3-of-3 with nonzero dummy but no NULLDUMMY", 0
|
||||
).Num(1).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2));
|
||||
tests.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
|
||||
"3-of-3 with nonzero dummy", SCRIPT_VERIFY_NULLDUMMY
|
||||
).Num(1).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2).ScriptError(SCRIPT_ERR_SIG_NULLDUMMY));
|
||||
tests.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"3-of-3 NOT with invalid sig and nonzero dummy but no NULLDUMMY", 0
|
||||
).Num(1).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2).DamagePush(10));
|
||||
tests.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG << OP_NOT,
|
||||
"3-of-3 NOT with invalid sig with nonzero dummy", SCRIPT_VERIFY_NULLDUMMY
|
||||
).Num(1).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2).DamagePush(10).ScriptError(SCRIPT_ERR_SIG_NULLDUMMY));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"2-of-2 with two identical keys and sigs pushed using OP_DUP but no SIGPUSHONLY", 0
|
||||
).Num(0).PushSig(keys.key1).Add(CScript() << OP_DUP));
|
||||
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"2-of-2 with two identical keys and sigs pushed using OP_DUP", SCRIPT_VERIFY_SIGPUSHONLY
|
||||
).Num(0).PushSig(keys.key1).Add(CScript() << OP_DUP));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2SH(P2PK) with non-push scriptSig but no SIGPUSHONLY", 0
|
||||
).PushSig(keys.key2).PushRedeem());
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2SH(P2PK) with non-push scriptSig", SCRIPT_VERIFY_SIGPUSHONLY
|
||||
).PushSig(keys.key2).PushRedeem());
|
||||
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"2-of-2 with two identical keys and sigs pushed", SCRIPT_VERIFY_SIGPUSHONLY
|
||||
).Num(0).PushSig(keys.key1).PushSig(keys.key1));
|
||||
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2PK with unnecessary input but no CLEANSTACK", SCRIPT_VERIFY_P2SH
|
||||
).Num(11).PushSig(keys.key0));
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2PK with unnecessary input", SCRIPT_VERIFY_CLEANSTACK | SCRIPT_VERIFY_P2SH
|
||||
).Num(11).PushSig(keys.key0));
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2SH with unnecessary input but no CLEANSTACK", SCRIPT_VERIFY_P2SH, true
|
||||
).Num(11).PushSig(keys.key0).PushRedeem());
|
||||
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2SH with unnecessary input", SCRIPT_VERIFY_CLEANSTACK | SCRIPT_VERIFY_P2SH, true
|
||||
).Num(11).PushSig(keys.key0).PushRedeem());
|
||||
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2SH with CLEANSTACK", SCRIPT_VERIFY_CLEANSTACK | SCRIPT_VERIFY_P2SH, true
|
||||
).PushSig(keys.key0).PushRedeem());
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"2-of-2 with two identical keys and sigs pushed using OP_DUP but no SIGPUSHONLY", 0
|
||||
).Num(0).PushSig(keys.key1).Add(CScript() << OP_DUP));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"2-of-2 with two identical keys and sigs pushed using OP_DUP", SCRIPT_VERIFY_SIGPUSHONLY
|
||||
).Num(0).PushSig(keys.key1).Add(CScript() << OP_DUP).ScriptError(SCRIPT_ERR_SIG_PUSHONLY));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2SH(P2PK) with non-push scriptSig but no P2SH or SIGPUSHONLY", 0, true
|
||||
).PushSig(keys.key2).Add(CScript() << OP_NOP8).PushRedeem());
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2PK with non-push scriptSig but with P2SH validation", 0
|
||||
).PushSig(keys.key2).Add(CScript() << OP_NOP8));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2SH(P2PK) with non-push scriptSig but no SIGPUSHONLY", SCRIPT_VERIFY_P2SH, true
|
||||
).PushSig(keys.key2).Add(CScript() << OP_NOP8).PushRedeem().ScriptError(SCRIPT_ERR_SIG_PUSHONLY));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
|
||||
"P2SH(P2PK) with non-push scriptSig but not P2SH", SCRIPT_VERIFY_SIGPUSHONLY, true
|
||||
).PushSig(keys.key2).Add(CScript() << OP_NOP8).PushRedeem().ScriptError(SCRIPT_ERR_SIG_PUSHONLY));
|
||||
tests.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
|
||||
"2-of-2 with two identical keys and sigs pushed", SCRIPT_VERIFY_SIGPUSHONLY
|
||||
).Num(0).PushSig(keys.key1).PushSig(keys.key1));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2PK with unnecessary input but no CLEANSTACK", SCRIPT_VERIFY_P2SH
|
||||
).Num(11).PushSig(keys.key0));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2PK with unnecessary input", SCRIPT_VERIFY_CLEANSTACK | SCRIPT_VERIFY_P2SH
|
||||
).Num(11).PushSig(keys.key0).ScriptError(SCRIPT_ERR_CLEANSTACK));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2SH with unnecessary input but no CLEANSTACK", SCRIPT_VERIFY_P2SH, true
|
||||
).Num(11).PushSig(keys.key0).PushRedeem());
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2SH with unnecessary input", SCRIPT_VERIFY_CLEANSTACK | SCRIPT_VERIFY_P2SH, true
|
||||
).Num(11).PushSig(keys.key0).PushRedeem().ScriptError(SCRIPT_ERR_CLEANSTACK));
|
||||
tests.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
|
||||
"P2SH with CLEANSTACK", SCRIPT_VERIFY_CLEANSTACK | SCRIPT_VERIFY_P2SH, true
|
||||
).PushSig(keys.key0).PushRedeem());
|
||||
|
||||
|
||||
std::set<std::string> tests_good;
|
||||
std::set<std::string> tests_bad;
|
||||
std::set<std::string> tests_set;
|
||||
|
||||
{
|
||||
UniValue json_good = read_json(std::string(json_tests::script_valid, json_tests::script_valid + sizeof(json_tests::script_valid)));
|
||||
UniValue json_bad = read_json(std::string(json_tests::script_invalid, json_tests::script_invalid + sizeof(json_tests::script_invalid)));
|
||||
UniValue json_tests = read_json(std::string(json_tests::script_tests, json_tests::script_tests + sizeof(json_tests::script_tests)));
|
||||
|
||||
for (unsigned int idx = 0; idx < json_good.size(); idx++) {
|
||||
const UniValue& tv = json_good[idx];
|
||||
tests_good.insert(tv.get_array().write());
|
||||
}
|
||||
for (unsigned int idx = 0; idx < json_bad.size(); idx++) {
|
||||
const UniValue& tv = json_bad[idx];
|
||||
tests_bad.insert(tv.get_array().write());
|
||||
for (unsigned int idx = 0; idx < json_tests.size(); idx++) {
|
||||
const UniValue& tv = json_tests[idx];
|
||||
tests_set.insert(JSONPrettyPrint(tv.get_array()));
|
||||
}
|
||||
}
|
||||
|
||||
std::string strGood;
|
||||
std::string strBad;
|
||||
std::string strGen;
|
||||
|
||||
BOOST_FOREACH(TestBuilder& test, good) {
|
||||
test.Test(true);
|
||||
std::string str = test.GetJSON().write();
|
||||
BOOST_FOREACH(TestBuilder& test, tests) {
|
||||
test.Test();
|
||||
std::string str = JSONPrettyPrint(test.GetJSON());
|
||||
#ifndef UPDATE_JSON_TESTS
|
||||
if (tests_good.count(str) == 0) {
|
||||
if (tests_set.count(str) == 0) {
|
||||
BOOST_CHECK_MESSAGE(false, "Missing auto script_valid test: " + test.GetComment());
|
||||
}
|
||||
#endif
|
||||
strGood += str + ",\n";
|
||||
}
|
||||
BOOST_FOREACH(TestBuilder& test, bad) {
|
||||
test.Test(false);
|
||||
std::string str = test.GetJSON().write();
|
||||
#ifndef UPDATE_JSON_TESTS
|
||||
if (tests_bad.count(str) == 0) {
|
||||
BOOST_CHECK_MESSAGE(false, "Missing auto script_invalid test: " + test.GetComment());
|
||||
}
|
||||
#endif
|
||||
strBad += str + ",\n";
|
||||
strGen += str + ",\n";
|
||||
}
|
||||
|
||||
#ifdef UPDATE_JSON_TESTS
|
||||
FILE* valid = fopen("script_valid.json.gen", "w");
|
||||
fputs(strGood.c_str(), valid);
|
||||
fclose(valid);
|
||||
FILE* invalid = fopen("script_invalid.json.gen", "w");
|
||||
fputs(strBad.c_str(), invalid);
|
||||
fclose(invalid);
|
||||
FILE* file = fopen("script_tests.json.gen", "w");
|
||||
fputs(strGen.c_str(), file);
|
||||
fclose(file);
|
||||
#endif
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(script_valid)
|
||||
BOOST_AUTO_TEST_CASE(script_json_test)
|
||||
{
|
||||
// Read tests from test/data/script_valid.json
|
||||
// Read tests from test/data/script_tests.json
|
||||
// Format is an array of arrays
|
||||
// Inner arrays are [ "scriptSig", "scriptPubKey", "flags" ]
|
||||
// Inner arrays are [ "scriptSig", "scriptPubKey", "flags", "expected_scripterror" ]
|
||||
// ... where scriptSig and scriptPubKey are stringified
|
||||
// scripts.
|
||||
UniValue tests = read_json(std::string(json_tests::script_valid, json_tests::script_valid + sizeof(json_tests::script_valid)));
|
||||
UniValue tests = read_json(std::string(json_tests::script_tests, json_tests::script_tests + sizeof(json_tests::script_tests)));
|
||||
|
||||
for (unsigned int idx = 0; idx < tests.size(); idx++) {
|
||||
UniValue test = tests[idx];
|
||||
string strTest = test.write();
|
||||
if (test.size() < 3) // Allow size > 3; extra stuff ignored (useful for comments)
|
||||
if (test.size() < 4) // Allow size > 3; extra stuff ignored (useful for comments)
|
||||
{
|
||||
if (test.size() != 1) {
|
||||
BOOST_ERROR("Bad test: " << strTest);
|
||||
@ -651,33 +713,9 @@ BOOST_AUTO_TEST_CASE(script_valid)
|
||||
string scriptPubKeyString = test[1].get_str();
|
||||
CScript scriptPubKey = ParseScript(scriptPubKeyString);
|
||||
unsigned int scriptflags = ParseScriptFlags(test[2].get_str());
|
||||
int scriptError = ParseScriptError(test[3].get_str());
|
||||
|
||||
DoTest(scriptPubKey, scriptSig, scriptflags, true, strTest);
|
||||
}
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(script_invalid)
|
||||
{
|
||||
// Scripts that should evaluate as invalid
|
||||
UniValue tests = read_json(std::string(json_tests::script_invalid, json_tests::script_invalid + sizeof(json_tests::script_invalid)));
|
||||
|
||||
for (unsigned int idx = 0; idx < tests.size(); idx++) {
|
||||
UniValue test = tests[idx];
|
||||
string strTest = test.write();
|
||||
if (test.size() < 3) // Allow size > 2; extra stuff ignored (useful for comments)
|
||||
{
|
||||
if (test.size() != 1) {
|
||||
BOOST_ERROR("Bad test: " << strTest);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
string scriptSigString = test[0].get_str();
|
||||
CScript scriptSig = ParseScript(scriptSigString);
|
||||
string scriptPubKeyString = test[1].get_str();
|
||||
CScript scriptPubKey = ParseScript(scriptPubKeyString);
|
||||
unsigned int scriptflags = ParseScriptFlags(test[2].get_str());
|
||||
|
||||
DoTest(scriptPubKey, scriptSig, scriptflags, false, strTest);
|
||||
DoTest(scriptPubKey, scriptSig, scriptflags, strTest, scriptError);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -196,6 +196,8 @@ BOOST_AUTO_TEST_CASE(util_ParseMoney)
|
||||
BOOST_CHECK_EQUAL(ret, COIN*10);
|
||||
BOOST_CHECK(ParseMoney("1.00", ret));
|
||||
BOOST_CHECK_EQUAL(ret, COIN);
|
||||
BOOST_CHECK(ParseMoney("1", ret));
|
||||
BOOST_CHECK_EQUAL(ret, COIN);
|
||||
BOOST_CHECK(ParseMoney("0.1", ret));
|
||||
BOOST_CHECK_EQUAL(ret, COIN/10);
|
||||
BOOST_CHECK(ParseMoney("0.01", ret));
|
||||
@ -215,6 +217,9 @@ BOOST_AUTO_TEST_CASE(util_ParseMoney)
|
||||
|
||||
// Attempted 63 bit overflow should fail
|
||||
BOOST_CHECK(!ParseMoney("92233720368.54775808", ret));
|
||||
|
||||
// Parsing negative amounts must fail
|
||||
BOOST_CHECK(!ParseMoney("-1", ret));
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(util_IsHex)
|
||||
|
@ -394,6 +394,9 @@ TorController::TorController(struct event_base* base, const std::string& target)
|
||||
target(target), conn(base), reconnect(true), reconnect_ev(0),
|
||||
reconnect_timeout(RECONNECT_TIMEOUT_START)
|
||||
{
|
||||
reconnect_ev = event_new(base, -1, 0, reconnect_cb, this);
|
||||
if (!reconnect_ev)
|
||||
LogPrintf("tor: Failed to create event for reconnection: out of memory?\n");
|
||||
// Start connection attempts immediately
|
||||
if (!conn.Connect(target, boost::bind(&TorController::connected_cb, this, _1),
|
||||
boost::bind(&TorController::disconnected_cb, this, _1) )) {
|
||||
@ -409,8 +412,10 @@ TorController::TorController(struct event_base* base, const std::string& target)
|
||||
|
||||
TorController::~TorController()
|
||||
{
|
||||
if (reconnect_ev)
|
||||
event_del(reconnect_ev);
|
||||
if (reconnect_ev) {
|
||||
event_free(reconnect_ev);
|
||||
reconnect_ev = 0;
|
||||
}
|
||||
if (service.IsValid()) {
|
||||
RemoveLocal(service);
|
||||
}
|
||||
@ -430,7 +435,7 @@ void TorController::add_onion_cb(TorControlConnection& conn, const TorControlRep
|
||||
}
|
||||
|
||||
service = CService(service_id+".onion", GetListenPort(), false);
|
||||
LogPrintf("tor: Got service ID %s, advertizing service %s\n", service_id, service.ToString());
|
||||
LogPrintf("tor: Got service ID %s, advertising service %s\n", service_id, service.ToString());
|
||||
if (WriteBinaryFile(GetPrivateKeyFile(), private_key)) {
|
||||
LogPrint("tor", "tor: Cached service private key to %s\n", GetPrivateKeyFile());
|
||||
} else {
|
||||
@ -455,7 +460,7 @@ void TorController::auth_cb(TorControlConnection& conn, const TorControlReply& r
|
||||
if (GetArg("-onion", "") == "") {
|
||||
proxyType addrOnion = proxyType(CService("127.0.0.1", 9050), true);
|
||||
SetProxy(NET_TOR, addrOnion);
|
||||
SetReachable(NET_TOR);
|
||||
SetLimited(NET_TOR, false);
|
||||
}
|
||||
|
||||
// Finally - now create the service
|
||||
@ -611,7 +616,7 @@ void TorController::connected_cb(TorControlConnection& conn)
|
||||
|
||||
void TorController::disconnected_cb(TorControlConnection& conn)
|
||||
{
|
||||
// Stop advertizing service when disconnected
|
||||
// Stop advertising service when disconnected
|
||||
if (service.IsValid())
|
||||
RemoveLocal(service);
|
||||
service = CService();
|
||||
@ -622,8 +627,8 @@ void TorController::disconnected_cb(TorControlConnection& conn)
|
||||
|
||||
// Single-shot timer for reconnect. Use exponential backoff.
|
||||
struct timeval time = MillisToTimeval(int64_t(reconnect_timeout * 1000.0));
|
||||
reconnect_ev = event_new(base, -1, 0, reconnect_cb, this);
|
||||
event_add(reconnect_ev, &time);
|
||||
if (reconnect_ev)
|
||||
event_add(reconnect_ev, &time);
|
||||
reconnect_timeout *= RECONNECT_TIMEOUT_EXP;
|
||||
}
|
||||
|
||||
|
@ -533,9 +533,7 @@ boost::filesystem::path GetDefaultDataDir()
|
||||
pathRet = fs::path(pszHome);
|
||||
#ifdef MAC_OSX
|
||||
// Mac
|
||||
pathRet /= "Library/Application Support";
|
||||
TryCreateDirectory(pathRet);
|
||||
return pathRet / "DashCore";
|
||||
return pathRet / "Library/Application Support/DashCore";
|
||||
#else
|
||||
// Unix
|
||||
return pathRet / ".dashcore";
|
||||
|
@ -1336,7 +1336,7 @@ UniValue listreceivedbyaddress(const UniValue& params, bool fHelp)
|
||||
"\nList balances by receiving address.\n"
|
||||
"\nArguments:\n"
|
||||
"1. minconf (numeric, optional, default=1) The minimum number of confirmations before payments are included.\n"
|
||||
"2. includeempty (numeric, optional, default=false) Whether to include addresses that haven't received any payments.\n"
|
||||
"2. includeempty (bool, optional, default=false) Whether to include addresses that haven't received any payments.\n"
|
||||
"3. includeWatchonly (bool, optional, default=false) Whether to include watchonly addresses (see 'importaddress').\n"
|
||||
|
||||
"\nResult:\n"
|
||||
@ -1375,7 +1375,7 @@ UniValue listreceivedbyaccount(const UniValue& params, bool fHelp)
|
||||
"\nDEPRECATED. List balances by account.\n"
|
||||
"\nArguments:\n"
|
||||
"1. minconf (numeric, optional, default=1) The minimum number of confirmations before payments are included.\n"
|
||||
"2. includeempty (boolean, optional, default=false) Whether to include accounts that haven't received any payments.\n"
|
||||
"2. includeempty (bool, optional, default=false) Whether to include accounts that haven't received any payments.\n"
|
||||
"3. includeWatchonly (bool, optional, default=false) Whether to include watchonly addresses (see 'importaddress').\n"
|
||||
|
||||
"\nResult:\n"
|
||||
@ -1542,7 +1542,7 @@ UniValue listtransactions(const UniValue& params, bool fHelp)
|
||||
" \"trusted\": xxx (bool) Whether we consider the outputs of this unconfirmed transaction safe to spend.\n"
|
||||
" \"blockhash\": \"hashvalue\", (string) The block hash containing the transaction. Available for 'send' and 'receive'\n"
|
||||
" category of transactions.\n"
|
||||
" \"blockindex\": n, (numeric) The block index containing the transaction. Available for 'send' and 'receive'\n"
|
||||
" \"blockindex\": n, (numeric) The index of the transaction in the block that includes it. Available for 'send' and 'receive'\n"
|
||||
" category of transactions.\n"
|
||||
" \"blocktime\": xxx, (numeric) The block time in seconds since epoch (1 Jan 1970 GMT).\n"
|
||||
" \"txid\": \"transactionid\", (string) The transaction id. Available for 'send' and 'receive' category of transactions.\n"
|
||||
@ -1736,7 +1736,7 @@ UniValue listsinceblock(const UniValue& params, bool fHelp)
|
||||
" \"confirmations\": n, (numeric) The number of confirmations for the transaction. Available for 'send' and 'receive' category of transactions.\n"
|
||||
" \"bcconfirmations\" : n, (numeric) The number of blockchain confirmations for the transaction. Available for 'send' and 'receive' category of transactions.\n"
|
||||
" \"blockhash\": \"hashvalue\", (string) The block hash containing the transaction. Available for 'send' and 'receive' category of transactions.\n"
|
||||
" \"blockindex\": n, (numeric) The block index containing the transaction. Available for 'send' and 'receive' category of transactions.\n"
|
||||
" \"blockindex\": n, (numeric) The index of the transaction in the block that includes it. Available for 'send' and 'receive' category of transactions.\n"
|
||||
" \"blocktime\": xxx, (numeric) The block time in seconds since epoch (1 Jan 1970 GMT).\n"
|
||||
" \"txid\": \"transactionid\", (string) The transaction id. Available for 'send' and 'receive' category of transactions.\n"
|
||||
" \"time\": xxx, (numeric) The transaction time in seconds since epoch (Jan 1 1970 GMT).\n"
|
||||
@ -1821,7 +1821,7 @@ UniValue gettransaction(const UniValue& params, bool fHelp)
|
||||
" \"confirmations\" : n, (numeric) The number of confirmations\n"
|
||||
" \"bcconfirmations\" : n, (numeric) The number of blockchain confirmations\n"
|
||||
" \"blockhash\" : \"hash\", (string) The block hash\n"
|
||||
" \"blockindex\" : xx, (numeric) The block index\n"
|
||||
" \"blockindex\" : xx, (numeric) The index of the transaction in the block that includes it\n"
|
||||
" \"blocktime\" : ttt, (numeric) The time in seconds since epoch (1 Jan 1970 GMT)\n"
|
||||
" \"txid\" : \"transactionid\", (string) The transaction id.\n"
|
||||
" \"time\" : ttt, (numeric) The transaction time in seconds since epoch (1 Jan 1970 GMT)\n"
|
||||
|
@ -1496,7 +1496,7 @@ bool CWalletTx::RelayWalletTransaction(std::string strCommand)
|
||||
assert(pwallet->GetBroadcastTransactions());
|
||||
if (!IsCoinBase())
|
||||
{
|
||||
if (GetDepthInMainChain() == 0 && !isAbandoned()) {
|
||||
if (GetDepthInMainChain() == 0 && !isAbandoned() && InMempool()) {
|
||||
uint256 hash = GetHash();
|
||||
LogPrintf("Relaying wtx %s\n", hash.ToString());
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user