diff --git a/contrib/gitian-descriptors/gitian-linux.yml b/contrib/gitian-descriptors/gitian-linux.yml
index 46ccc5a07..b3da368c4 100755
--- a/contrib/gitian-descriptors/gitian-linux.yml
+++ b/contrib/gitian-descriptors/gitian-linux.yml
@@ -26,7 +26,7 @@ files: []
script: |
WRAP_DIR=$HOME/wrapped
HOSTS="i686-pc-linux-gnu x86_64-unknown-linux-gnu"
- CONFIGFLAGS="--enable-glibc-back-compat --enable-reduce-exports --disable-bench --disable-gui-tests"
+ CONFIGFLAGS="--enable-glibc-back-compat --enable-reduce-exports --disable-bench --disable-gui-tests LDFLAGS=-static-libstdc++"
FAKETIME_HOST_PROGS=""
FAKETIME_PROGS="date ar ranlib nm strip objcopy"
HOST_CFLAGS="-O2 -g"
diff --git a/contrib/verifysfbinaries/README.md b/contrib/verifysfbinaries/README.md
index 8c038865b..1db3fe52f 100644
--- a/contrib/verifysfbinaries/README.md
+++ b/contrib/verifysfbinaries/README.md
@@ -1,6 +1,6 @@
-### Verify SF Binaries ###
+### Verify Binaries ###
This script attempts to download the signature file `SHA256SUMS.asc` from https://bitcoin.org.
It first checks if the signature passes, and then downloads the files specified in the file, and checks if the hashes of these files match those that are specified in the signature file.
-The script returns 0 if everything passes the checks. It returns 1 if either the signature check or the hash check doesn't pass. If an error occurs the return value is 2.
\ No newline at end of file
+The script returns 0 if everything passes the checks. It returns 1 if either the signature check or the hash check doesn't pass. If an error occurs the return value is 2.
diff --git a/doc/Doxyfile b/doc/Doxyfile
index 30c400456..8df93f9bb 100644
--- a/doc/Doxyfile
+++ b/doc/Doxyfile
@@ -34,7 +34,7 @@ PROJECT_NAME = Dash
# This could be handy for archiving the generated documentation or
# if some version control system is used.
-PROJECT_NUMBER = 0.12.0
+PROJECT_NUMBER = 0.12.1
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer
diff --git a/doc/README.md b/doc/README.md
index f580fafe1..2fd35e3ed 100644
--- a/doc/README.md
+++ b/doc/README.md
@@ -1,4 +1,4 @@
-Dash Core 0.12.0
+Dash Core 0.12.1
=====================
This is the official reference wallet for Dash digital currency and comprises the backbone of the Dash peer-to-peer network. You can [download Dash Core](https://www.dash.org/downloads/) or [build it yourself](#building) using the guides below.
@@ -47,7 +47,7 @@ The following are developer notes on how to build Dash on your native platform.
Development
---------------------
-The Dash repo's [root README](https://github.com/dashpay/dash/blob/master/README.md) contains relevant information on the development process and automated testing.
+The Dash repo's [root README](/README.md) contains relevant information on the development process and automated testing.
- [Developer Notes](developer-notes.md)
- [Multiwallet Qt Development](multiwallet-qt.md)
diff --git a/doc/README_windows.txt b/doc/README_windows.txt
index a94f76dd3..1a347d4be 100644
--- a/doc/README_windows.txt
+++ b/doc/README_windows.txt
@@ -1,4 +1,4 @@
-Dash Core 0.12.0
+Dash Core 0.12.1
=====================
Intro
diff --git a/doc/build-unix.md b/doc/build-unix.md
index 865408020..f85635213 100644
--- a/doc/build-unix.md
+++ b/doc/build-unix.md
@@ -236,3 +236,30 @@ In this case there is no dependency on Berkeley DB 4.8.
Mining is also possible in disable-wallet mode, but only using the `getblocktemplate` RPC
call not `getwork`.
+
+Additional Configure Flags
+--------------------------
+A list of additional configure flags can be displayed with:
+
+ ./configure --help
+
+ARM Cross-compilation
+-------------------
+These steps can be performed on, for example, an Ubuntu VM. The depends system
+will also work on other Linux distributions, however the commands for
+installing the toolchain will be different.
+
+First install the toolchain:
+
+ sudo apt-get install g++-arm-linux-gnueabihf
+
+To build executables for ARM:
+
+ cd depends
+ make HOST=arm-linux-gnueabihf NO_QT=1
+ cd ..
+ ./configure --prefix=$PWD/depends/arm-linux-gnueabihf --enable-glibc-back-compat --enable-reduce-exports LDFLAGS=-static-libstdc++
+ make
+
+
+For further documentation on the depends system see [README.md](../depends/README.md) in the depends directory.
diff --git a/doc/release-notes.md b/doc/release-notes.md
index e619b4518..f160517cd 100644
--- a/doc/release-notes.md
+++ b/doc/release-notes.md
@@ -1,10 +1,9 @@
-Dash Core 0.12
-==================
+Dash Core version 0.12.1 is now available from:
-Dash Core tree 0.12.1.x release notes can be found here:
-- [v0.12.1](release-notes/dash/release-notes-0.12.1.md)
+
-Dash Core tree 0.12.1.x is a fork of Bitcoin Core tree 0.12
+This is a new minor version release, including the BIP9, BIP68 and BIP112
+softfork, various bugfixes and updated translations.
@@ -29,10 +28,109 @@ earlier.
Notable changes
===============
-Example item
----------------------------------------
+First version bits BIP9 softfork deployment
+-------------------------------------------
-Example text.
+This release includes a soft fork deployment to enforce [BIP68][],
+[BIP112][] and [BIP113][] using the [BIP9][] deployment mechanism.
+
+The deployment sets the block version number to 0x20000001 between
+midnight 1st May 2016 and midnight 1st May 2017 to signal readiness for
+deployment. The version number consists of 0x20000000 to indicate version
+bits together with setting bit 0 to indicate support for this combined
+deployment, shown as "csv" in the `getblockchaininfo` RPC call.
+
+For more information about the soft forking change, please see
+
+
+This specific backport pull-request can be viewed at
+
+
+[BIP9]: https://github.com/bitcoin/bips/blob/master/bip-0009.mediawiki
+[BIP68]: https://github.com/bitcoin/bips/blob/master/bip-0068.mediawiki
+[BIP112]: https://github.com/bitcoin/bips/blob/master/bip-0112.mediawiki
+[BIP113]: https://github.com/bitcoin/bips/blob/master/bip-0113.mediawiki
+
+BIP68 soft fork to enforce sequence locks for relative locktime
+---------------------------------------------------------------
+
+[BIP68][] introduces relative lock-time consensus-enforced semantics of
+the sequence number field to enable a signed transaction input to remain
+invalid for a defined period of time after confirmation of its corresponding
+outpoint.
+
+For more information about the implementation, see
+
+
+BIP112 soft fork to enforce OP_CHECKSEQUENCEVERIFY
+--------------------------------------------------
+
+[BIP112][] redefines the existing OP_NOP3 as OP_CHECKSEQUENCEVERIFY (CSV)
+for a new opcode in the Bitcoin scripting system that in combination with
+[BIP68][] allows execution pathways of a script to be restricted based
+on the age of the output being spent.
+
+For more information about the implementation, see
+
+
+BIP113 locktime enforcement soft fork
+-------------------------------------
+
+Bitcoin Core 0.11.2 previously introduced mempool-only locktime
+enforcement using GetMedianTimePast(). This release seeks to
+consensus enforce the rule.
+
+Bitcoin transactions currently may specify a locktime indicating when
+they may be added to a valid block. Current consensus rules require
+that blocks have a block header time greater than the locktime specified
+in any transaction in that block.
+
+Miners get to choose what time they use for their header time, with the
+consensus rule being that no node will accept a block whose time is more
+than two hours in the future. This creates a incentive for miners to
+set their header times to future values in order to include locktimed
+transactions which weren't supposed to be included for up to two more
+hours.
+
+The consensus rules also specify that valid blocks may have a header
+time greater than that of the median of the 11 previous blocks. This
+GetMedianTimePast() time has a key feature we generally associate with
+time: it can't go backwards.
+
+[BIP113][] specifies a soft fork enforced in this release that
+weakens this perverse incentive for individual miners to use a future
+time by requiring that valid blocks have a computed GetMedianTimePast()
+greater than the locktime specified in any transaction in that block.
+
+Mempool inclusion rules currently require transactions to be valid for
+immediate inclusion in a block in order to be accepted into the mempool.
+This release begins applying the BIP113 rule to received transactions,
+so transaction whose time is greater than the GetMedianTimePast() will
+no longer be accepted into the mempool.
+
+**Implication for miners:** you will begin rejecting transactions that
+would not be valid under BIP113, which will prevent you from producing
+invalid blocks when BIP113 is enforced on the network. Any
+transactions which are valid under the current rules but not yet valid
+under the BIP113 rules will either be mined by other miners or delayed
+until they are valid under BIP113. Note, however, that time-based
+locktime transactions are more or less unseen on the network currently.
+
+**Implication for users:** GetMedianTimePast() always trails behind the
+current time, so a transaction locktime set to the present time will be
+rejected by nodes running this release until the median time moves
+forward. To compensate, subtract one hour (3,600 seconds) from your
+locktimes to allow those transactions to be included in mempools at
+approximately the expected time.
+
+For more information about the implementation, see
+
+
+Miscellaneous
+-------------
+
+The p2p alert system is off by default. To turn on, use `-alert` with
+startup configuration.
0.12.1 Change log
=================
@@ -42,31 +140,57 @@ behavior, not code moves, refactors and string updates. For convenience in locat
the code changes and accompanying discussion, both the pull request and
git merge commit are mentioned.
-### RPC and REST
+### RPC and other APIs
+- #7739 `7ffc2bd` Add abandoned status to listtransactions (jonasschnelli)
### Configuration and command-line options
### Block and transaction handling
+- #7543 `834aaef` Backport BIP9, BIP68 and BIP112 with softfork (btcdrak)
### P2P protocol and network code
+- #7804 `90f1d24` Track block download times per individual block (sipa)
+- #7832 `4c3a00d` Reduce block timeout to 10 minutes (laanwj)
### Validation
+- #7821 `4226aac` init: allow shutdown during 'Activating best chain...' (laanwj)
+- #7835 `46898e7` Version 2 transactions remain non-standard until CSV activates (sdaftuar)
### Build system
+- #7487 `00d57b4` Workaround Travis-side CI issues (luke-jr)
+- #7606 `a10da9a` No need to set -L and --location for curl (MarcoFalke)
+- #7614 `ca8f160` Add curl to packages (now needed for depends) (luke-jr)
+- #7776 `a784675` Remove unnecessary executables from gitian release (laanwj)
### Wallet
+- #7715 `19866c1` Fix calculation of balances and available coins. (morcos)
### GUI
### Tests and QA
### Miscellaneous
+- #7617 `f04f4fd` Fix markdown syntax and line terminate LogPrint (MarcoFalke)
+- #7747 `4d035bc` added depends cross compile info (accraze)
+- #7741 `a0cea89` Mark p2p alert system as deprecated (btcdrak)
+- #7780 `c5f94f6` Disable bad-chain alert (btcdrak)
Credits
=======
Thanks to everyone who directly contributed to this release:
+- accraze
+- Alex Morcos
+- BtcDrak
+- Jonas Schnelli
+- Luke Dashjr
+- MarcoFalke
+- Mark Friedenbach
+- NicolasDorier
+- Pieter Wuille
+- Suhas Daftuar
+- Wladimir J. van der Laan
As well as everyone that helped translating on [Transifex](https://www.transifex.com/projects/p/bitcoin/).
diff --git a/doc/release-process.md b/doc/release-process.md
index 925580819..4ffff18a5 100644
--- a/doc/release-process.md
+++ b/doc/release-process.md
@@ -196,7 +196,7 @@ Note: check that SHA256SUMS itself doesn't end up in SHA256SUMS, which is a spur
- Optionally reddit /r/Dashpay, ... but this will usually sort out itself
-- Notify Flare (?) ***TODO*** so that he can start building [https://launchpad.net/~dashpay/+archive/ubuntu/dash](the PPAs) ***TODO***
+- Notify flare so that he can start building [the PPAs](https://launchpad.net/~dash.org/+archive/ubuntu/dash)
- Add release notes for the new version to the directory `doc/release-notes` in git master
diff --git a/qa/pull-tester/rpc-tests.py b/qa/pull-tester/rpc-tests.py
index 397e27220..2af9be8b4 100755
--- a/qa/pull-tester/rpc-tests.py
+++ b/qa/pull-tester/rpc-tests.py
@@ -74,6 +74,7 @@ if EXEEXT == ".exe" and "-win" not in opts:
#Tests
testScripts = [
+ 'bip68-112-113-p2p.py',
'wallet.py',
'listtransactions.py',
'receivedby.py',
@@ -106,10 +107,13 @@ testScripts = [
'invalidblockrequest.py', # TODO: works, needs dash_hash
'invalidtxrequest.py', # TODO: works, needs dash_hash
'abandonconflict.py',
+ 'p2p-versionbits-warning.py',
]
testScriptsExt = [
+ 'bip9-softforks.py',
'bip65-cltv.py',
'bip65-cltv-p2p.py', # TODO: works, needs dash_hash
+ 'bip68-sequence.py',
'bipdersig-p2p.py', # TODO: works, needs dash_hash
'bipdersig.py',
'getblocktemplate_longpoll.py', # FIXME: "socket.error: [Errno 54] Connection reset by peer" on my Mac, same as https://github.com/bitcoin/bitcoin/issues/6651
diff --git a/qa/rpc-tests/abandonconflict.py b/qa/rpc-tests/abandonconflict.py
index 38028df07..a83aa97fc 100755
--- a/qa/rpc-tests/abandonconflict.py
+++ b/qa/rpc-tests/abandonconflict.py
@@ -83,6 +83,12 @@ class AbandonConflictTest(BitcoinTestFramework):
# inputs are still spent, but change not received
newbalance = self.nodes[0].getbalance()
assert(newbalance == balance - Decimal("24.9996"))
+ # Unconfirmed received funds that are not in mempool, also shouldn't show
+ # up in unconfirmed balance
+ unconfbalance = self.nodes[0].getunconfirmedbalance() + self.nodes[0].getbalance()
+ assert(unconfbalance == newbalance)
+ # Also shouldn't show up in listunspent
+ assert(not txABC2 in [utxo["txid"] for utxo in self.nodes[0].listunspent(0)])
balance = newbalance
# Abandon original transaction and verify inputs are available again
diff --git a/qa/rpc-tests/bip68-112-113-p2p.py b/qa/rpc-tests/bip68-112-113-p2p.py
new file mode 100755
index 000000000..460e855ec
--- /dev/null
+++ b/qa/rpc-tests/bip68-112-113-p2p.py
@@ -0,0 +1,540 @@
+#!/usr/bin/env python2
+# Copyright (c) 2015 The Bitcoin Core developers
+# Distributed under the MIT/X11 software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.mininode import ToHex, CTransaction, NetworkThread
+from test_framework.blocktools import create_coinbase, create_block
+from test_framework.comptool import TestInstance, TestManager
+from test_framework.script import *
+from binascii import unhexlify
+import cStringIO
+import time
+
+'''
+This test is meant to exercise activation of the first version bits soft fork
+This soft fork will activate the following BIPS:
+BIP 68 - nSequence relative lock times
+BIP 112 - CHECKSEQUENCEVERIFY
+BIP 113 - MedianTimePast semantics for nLockTime
+
+regtest lock-in with 108/144 block signalling
+activation after a further 144 blocks
+
+mine 82 blocks whose coinbases will be used to generate inputs for our tests
+mine 61 blocks to transition from DEFINED to STARTED
+mine 144 blocks only 100 of which are signaling readiness in order to fail to change state this period
+mine 144 blocks with 108 signaling and verify STARTED->LOCKED_IN
+mine 140 blocks and seed block chain with the 82 inputs will use for our tests at height 572
+mine 3 blocks and verify still at LOCKED_IN and test that enforcement has not triggered
+mine 1 block and test that enforcement has triggered (which triggers ACTIVE)
+Test BIP 113 is enforced
+Mine 4 blocks so next height is 580 and test BIP 68 is enforced for time and height
+Mine 1 block so next height is 581 and test BIP 68 now passes time but not height
+Mine 1 block so next height is 582 and test BIP 68 now passes time and height
+Test that BIP 112 is enforced
+
+Various transactions will be used to test that the BIPs rules are not enforced before the soft fork activates
+And that after the soft fork activates transactions pass and fail as they should according to the rules.
+For each BIP, transactions of versions 1 and 2 will be tested.
+----------------
+BIP 113:
+bip113tx - modify the nLocktime variable
+
+BIP 68:
+bip68txs - 16 txs with nSequence relative locktime of 10 with various bits set as per the relative_locktimes below
+
+BIP 112:
+bip112txs_vary_nSequence - 16 txs with nSequence relative_locktimes of 10 evaluated against 10 OP_CSV OP_DROP
+bip112txs_vary_nSequence_9 - 16 txs with nSequence relative_locktimes of 9 evaluated against 10 OP_CSV OP_DROP
+bip112txs_vary_OP_CSV - 16 txs with nSequence = 10 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP
+bip112txs_vary_OP_CSV_9 - 16 txs with nSequence = 9 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP
+bip112tx_special - test negative argument to OP_CSV
+'''
+
+base_relative_locktime = 10
+seq_disable_flag = 1<<31
+seq_random_high_bit = 1<<25
+seq_type_flag = 1<<22
+seq_random_low_bit = 1<<18
+
+# b31,b25,b22,b18 represent the 31st, 25th, 22nd and 18th bits respectively in the nSequence field
+# relative_locktimes[b31][b25][b22][b18] is a base_relative_locktime with the indicated bits set if their indices are 1
+relative_locktimes = []
+for b31 in xrange(2):
+ b25times = []
+ for b25 in xrange(2):
+ b22times = []
+ for b22 in xrange(2):
+ b18times = []
+ for b18 in xrange(2):
+ rlt = base_relative_locktime
+ if (b31):
+ rlt = rlt | seq_disable_flag
+ if (b25):
+ rlt = rlt | seq_random_high_bit
+ if (b22):
+ rlt = rlt | seq_type_flag
+ if (b18):
+ rlt = rlt | seq_random_low_bit
+ b18times.append(rlt)
+ b22times.append(b18times)
+ b25times.append(b22times)
+ relative_locktimes.append(b25times)
+
+def all_rlt_txs(txarray):
+ txs = []
+ for b31 in xrange(2):
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ txs.append(txarray[b31][b25][b22][b18])
+ return txs
+
+class BIP68_112_113Test(ComparisonTestFramework):
+ def __init__(self):
+ self.num_nodes = 1
+
+ def setup_network(self):
+ # Must set the blockversion for this test
+ self.nodes = start_nodes(1, self.options.tmpdir,
+ extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=4']],
+ binary=[self.options.testbinary])
+
+ def run_test(self):
+ test = TestManager(self, self.options.tmpdir)
+ test.add_all_connections(self.nodes)
+ NetworkThread().start() # Start up network handling in another thread
+ test.run()
+
+ def send_generic_input_tx(self, node, coinbases):
+ amount = Decimal("499.99")
+ return node.sendrawtransaction(ToHex(self.sign_transaction(node, self.create_transaction(node, node.getblock(coinbases.pop())['tx'][0], self.nodeaddress, amount))))
+
+ def create_transaction(self, node, txid, to_address, amount):
+ inputs = [{ "txid" : txid, "vout" : 0}]
+ outputs = { to_address : amount }
+ rawtx = node.createrawtransaction(inputs, outputs)
+ tx = CTransaction()
+ f = cStringIO.StringIO(unhexlify(rawtx))
+ tx.deserialize(f)
+ return tx
+
+ def sign_transaction(self, node, unsignedtx):
+ rawtx = ToHex(unsignedtx)
+ signresult = node.signrawtransaction(rawtx)
+ tx = CTransaction()
+ f = cStringIO.StringIO(unhexlify(signresult['hex']))
+ tx.deserialize(f)
+ return tx
+
+ def generate_blocks(self, number, version, test_blocks = []):
+ for i in xrange(number):
+ block = self.create_test_block([], version)
+ test_blocks.append([block, True])
+ self.last_block_time += 600
+ self.tip = block.sha256
+ self.tipheight += 1
+ return test_blocks
+
+ def create_test_block(self, txs, version = 536870912):
+ block = create_block(self.tip, create_coinbase(self.tipheight + 1), self.last_block_time + 600)
+ block.nVersion = version
+ block.vtx.extend(txs)
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.rehash()
+ block.solve()
+ return block
+
+ def create_bip68txs(self, bip68inputs, txversion, locktime_delta = 0):
+ txs = []
+ assert(len(bip68inputs) >= 16)
+ i = 0
+ for b31 in xrange(2):
+ b25txs = []
+ for b25 in xrange(2):
+ b22txs = []
+ for b22 in xrange(2):
+ b18txs = []
+ for b18 in xrange(2):
+ tx = self.create_transaction(self.nodes[0], bip68inputs[i], self.nodeaddress, Decimal("499.98"))
+ i += 1
+ tx.nVersion = txversion
+ tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta
+ b18txs.append(self.sign_transaction(self.nodes[0], tx))
+ b22txs.append(b18txs)
+ b25txs.append(b22txs)
+ txs.append(b25txs)
+ return txs
+
+ def create_bip112special(self, input, txversion):
+ tx = self.create_transaction(self.nodes[0], input, self.nodeaddress, Decimal("499.98"))
+ tx.nVersion = txversion
+ signtx = self.sign_transaction(self.nodes[0], tx)
+ signtx.vin[0].scriptSig = CScript([-1, OP_NOP3, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+ return signtx
+
+ def create_bip112txs(self, bip112inputs, varyOP_CSV, txversion, locktime_delta = 0):
+ txs = []
+ assert(len(bip112inputs) >= 16)
+ i = 0
+ for b31 in xrange(2):
+ b25txs = []
+ for b25 in xrange(2):
+ b22txs = []
+ for b22 in xrange(2):
+ b18txs = []
+ for b18 in xrange(2):
+ tx = self.create_transaction(self.nodes[0], bip112inputs[i], self.nodeaddress, Decimal("499.98"))
+ i += 1
+ if (varyOP_CSV): # if varying OP_CSV, nSequence is fixed
+ tx.vin[0].nSequence = base_relative_locktime + locktime_delta
+ else: # vary nSequence instead, OP_CSV is fixed
+ tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta
+ tx.nVersion = txversion
+ signtx = self.sign_transaction(self.nodes[0], tx)
+ if (varyOP_CSV):
+ signtx.vin[0].scriptSig = CScript([relative_locktimes[b31][b25][b22][b18], OP_NOP3, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+ else:
+ signtx.vin[0].scriptSig = CScript([base_relative_locktime, OP_NOP3, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+ b18txs.append(signtx)
+ b22txs.append(b18txs)
+ b25txs.append(b22txs)
+ txs.append(b25txs)
+ return txs
+
+ def get_tests(self):
+ long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
+ self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time
+ self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2*32 + 1) # 82 blocks generated for inputs
+ self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time
+ self.tipheight = 82 # height of the next block to build
+ self.last_block_time = long_past_time
+ self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
+ self.nodeaddress = self.nodes[0].getnewaddress()
+
+ assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
+ test_blocks = self.generate_blocks(61, 4)
+ yield TestInstance(test_blocks, sync_every_block=False) # 1
+ # Advanced from DEFINED to STARTED, height = 143
+ assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
+
+ # Fail to achieve LOCKED_IN 100 out of 144 signal bit 0
+ # using a variety of bits to simulate multiple parallel softforks
+ test_blocks = self.generate_blocks(50, 536870913) # 0x20000001 (signalling ready)
+ test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
+ test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
+ test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not)
+ yield TestInstance(test_blocks, sync_every_block=False) # 2
+ # Failed to advance past STARTED, height = 287
+ assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
+
+ # 108 out of 144 signal bit 0 to achieve lock-in
+ # using a variety of bits to simulate multiple parallel softforks
+ test_blocks = self.generate_blocks(58, 536870913) # 0x20000001 (signalling ready)
+ test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
+ test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
+ test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not)
+ yield TestInstance(test_blocks, sync_every_block=False) # 3
+ # Advanced from STARTED to LOCKED_IN, height = 431
+ assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')
+
+ # 140 more version 4 blocks
+ test_blocks = self.generate_blocks(140, 4)
+ yield TestInstance(test_blocks, sync_every_block=False) # 4
+
+ ### Inputs at height = 572
+ # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
+ # Note we reuse inputs for v1 and v2 txs so must test these separately
+ # 16 normal inputs
+ bip68inputs = []
+ for i in xrange(16):
+ bip68inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+ # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
+ bip112basicinputs = []
+ for j in xrange(2):
+ inputs = []
+ for i in xrange(16):
+ inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+ bip112basicinputs.append(inputs)
+ # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
+ bip112diverseinputs = []
+ for j in xrange(2):
+ inputs = []
+ for i in xrange(16):
+ inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+ bip112diverseinputs.append(inputs)
+ # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
+ bip112specialinput = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)
+ # 1 normal input
+ bip113input = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)
+
+ self.nodes[0].setmocktime(self.last_block_time + 600)
+ inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572
+ self.nodes[0].setmocktime(0)
+ self.tip = int("0x" + inputblockhash + "L", 0)
+ self.tipheight += 1
+ self.last_block_time += 600
+ assert_equal(len(self.nodes[0].getblock(inputblockhash,True)["tx"]), 82+1)
+
+ # 2 more version 4 blocks
+ test_blocks = self.generate_blocks(2, 4)
+ yield TestInstance(test_blocks, sync_every_block=False) # 5
+ # Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)
+ assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')
+
+ # Test both version 1 and version 2 transactions for all tests
+ # BIP113 test transaction will be modified before each use to put in appropriate block time
+ bip113tx_v1 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("499.98"))
+ bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
+ bip113tx_v2 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("499.98"))
+ bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
+ bip113tx_v2.nVersion = 2
+
+ # For BIP68 test all 16 relative sequence locktimes
+ bip68txs_v1 = self.create_bip68txs(bip68inputs, 1)
+ bip68txs_v2 = self.create_bip68txs(bip68inputs, 2)
+
+ # For BIP112 test:
+ # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
+ bip112txs_vary_nSequence_v1 = self.create_bip112txs(bip112basicinputs[0], False, 1)
+ bip112txs_vary_nSequence_v2 = self.create_bip112txs(bip112basicinputs[0], False, 2)
+ # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
+ bip112txs_vary_nSequence_9_v1 = self.create_bip112txs(bip112basicinputs[1], False, 1, -1)
+ bip112txs_vary_nSequence_9_v2 = self.create_bip112txs(bip112basicinputs[1], False, 2, -1)
+ # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
+ bip112txs_vary_OP_CSV_v1 = self.create_bip112txs(bip112diverseinputs[0], True, 1)
+ bip112txs_vary_OP_CSV_v2 = self.create_bip112txs(bip112diverseinputs[0], True, 2)
+ # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
+ bip112txs_vary_OP_CSV_9_v1 = self.create_bip112txs(bip112diverseinputs[1], True, 1, -1)
+ bip112txs_vary_OP_CSV_9_v2 = self.create_bip112txs(bip112diverseinputs[1], True, 2, -1)
+ # -1 OP_CSV OP_DROP input
+ bip112tx_special_v1 = self.create_bip112special(bip112specialinput, 1)
+ bip112tx_special_v2 = self.create_bip112special(bip112specialinput, 2)
+
+
+ ### TESTING ###
+ ##################################
+ ### Before Soft Forks Activate ###
+ ##################################
+ # All txs should pass
+ ### Version 1 txs ###
+ success_txs = []
+ # add BIP113 tx and -1 CSV tx
+ bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
+ success_txs.append(bip113signed1)
+ success_txs.append(bip112tx_special_v1)
+ # add BIP 68 txs
+ success_txs.extend(all_rlt_txs(bip68txs_v1))
+ # add BIP 112 with seq=10 txs
+ success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
+ success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1))
+ # try BIP 112 with seq=9 txs
+ success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
+ success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
+ yield TestInstance([[self.create_test_block(success_txs), True]]) # 6
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ ### Version 2 txs ###
+ success_txs = []
+ # add BIP113 tx and -1 CSV tx
+ bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+ success_txs.append(bip113signed2)
+ success_txs.append(bip112tx_special_v2)
+ # add BIP 68 txs
+ success_txs.extend(all_rlt_txs(bip68txs_v2))
+ # add BIP 112 with seq=10 txs
+ success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2))
+ success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2))
+ # try BIP 112 with seq=9 txs
+ success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
+ success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
+ yield TestInstance([[self.create_test_block(success_txs), True]]) # 7
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+
+ # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
+ test_blocks = self.generate_blocks(1, 4)
+ yield TestInstance(test_blocks, sync_every_block=False) # 8
+ assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')
+
+
+ #################################
+ ### After Soft Forks Activate ###
+ #################################
+ ### BIP 113 ###
+ # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
+ bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
+ bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+ for bip113tx in [bip113signed1, bip113signed2]:
+ yield TestInstance([[self.create_test_block([bip113tx]), False]]) # 9,10
+ # BIP 113 tests should now pass if the locktime is < MTP
+ bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
+ bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
+ bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
+ bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+ for bip113tx in [bip113signed1, bip113signed2]:
+ yield TestInstance([[self.create_test_block([bip113tx]), True]]) # 11,12
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ # Next block height = 580 after 4 blocks of random version
+ test_blocks = self.generate_blocks(4, 1234)
+ yield TestInstance(test_blocks, sync_every_block=False) # 13
+
+ ### BIP 68 ###
+ ### Version 1 txs ###
+ # All still pass
+ success_txs = []
+ success_txs.extend(all_rlt_txs(bip68txs_v1))
+ yield TestInstance([[self.create_test_block(success_txs), True]]) # 14
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ ### Version 2 txs ###
+ bip68success_txs = []
+ # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ bip68success_txs.append(bip68txs_v2[1][b25][b22][b18])
+ yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 15
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
+ bip68timetxs = []
+ for b25 in xrange(2):
+ for b18 in xrange(2):
+ bip68timetxs.append(bip68txs_v2[0][b25][1][b18])
+ for tx in bip68timetxs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 16 - 19
+ bip68heighttxs = []
+ for b25 in xrange(2):
+ for b18 in xrange(2):
+ bip68heighttxs.append(bip68txs_v2[0][b25][0][b18])
+ for tx in bip68heighttxs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 20 - 23
+
+ # Advance one block to 581
+ test_blocks = self.generate_blocks(1, 1234)
+ yield TestInstance(test_blocks, sync_every_block=False) # 24
+
+ # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
+ bip68success_txs.extend(bip68timetxs)
+ yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 25
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ for tx in bip68heighttxs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 26 - 29
+
+ # Advance one block to 582
+ test_blocks = self.generate_blocks(1, 1234)
+ yield TestInstance(test_blocks, sync_every_block=False) # 30
+
+ # All BIP 68 txs should pass
+ bip68success_txs.extend(bip68heighttxs)
+ yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 31
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+
+ ### BIP 112 ###
+ ### Version 1 txs ###
+ # -1 OP_CSV tx should fail
+ yield TestInstance([[self.create_test_block([bip112tx_special_v1]), False]]) #32
+ # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass
+ success_txs = []
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ success_txs.append(bip112txs_vary_OP_CSV_v1[1][b25][b22][b18])
+ success_txs.append(bip112txs_vary_OP_CSV_9_v1[1][b25][b22][b18])
+ yield TestInstance([[self.create_test_block(success_txs), True]]) # 33
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
+ fail_txs = []
+ fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
+ fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ fail_txs.append(bip112txs_vary_OP_CSV_v1[0][b25][b22][b18])
+ fail_txs.append(bip112txs_vary_OP_CSV_9_v1[0][b25][b22][b18])
+
+ for tx in fail_txs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 34 - 81
+
+ ### Version 2 txs ###
+ # -1 OP_CSV tx should fail
+ yield TestInstance([[self.create_test_block([bip112tx_special_v2]), False]]) #82
+
+ # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
+ success_txs = []
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ success_txs.append(bip112txs_vary_OP_CSV_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV
+ success_txs.append(bip112txs_vary_OP_CSV_9_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV_9
+
+ yield TestInstance([[self.create_test_block(success_txs), True]]) # 83
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ ## SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
+ # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
+ fail_txs = []
+ fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) # 16/16 of vary_nSequence_9
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ fail_txs.append(bip112txs_vary_OP_CSV_9_v2[0][b25][b22][b18]) # 16/16 of vary_OP_CSV_9
+
+ for tx in fail_txs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 84 - 107
+
+ # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
+ fail_txs = []
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ fail_txs.append(bip112txs_vary_nSequence_v2[1][b25][b22][b18]) # 8/16 of vary_nSequence
+ for tx in fail_txs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 108-115
+
+ # If sequencelock types mismatch, tx should fail
+ fail_txs = []
+ for b25 in xrange(2):
+ for b18 in xrange(2):
+ fail_txs.append(bip112txs_vary_nSequence_v2[0][b25][1][b18]) # 12/16 of vary_nSequence
+ fail_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][1][b18]) # 12/16 of vary_OP_CSV
+ for tx in fail_txs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 116-123
+
+ # Remaining txs should pass, just test masking works properly
+ success_txs = []
+ for b25 in xrange(2):
+ for b18 in xrange(2):
+ success_txs.append(bip112txs_vary_nSequence_v2[0][b25][0][b18]) # 16/16 of vary_nSequence
+ success_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][0][b18]) # 16/16 of vary_OP_CSV
+ yield TestInstance([[self.create_test_block(success_txs), True]]) # 124
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ # Additional test, of checking that comparison of two time types works properly
+ time_txs = []
+ for b25 in xrange(2):
+ for b18 in xrange(2):
+ tx = bip112txs_vary_OP_CSV_v2[0][b25][1][b18]
+ tx.vin[0].nSequence = base_relative_locktime | seq_type_flag
+ signtx = self.sign_transaction(self.nodes[0], tx)
+ time_txs.append(signtx)
+ yield TestInstance([[self.create_test_block(time_txs), True]]) # 125
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ ### Missing aspects of test
+ ## Testing empty stack fails
+
+
+if __name__ == '__main__':
+ BIP68_112_113Test().main()
diff --git a/qa/rpc-tests/bip68-sequence.py b/qa/rpc-tests/bip68-sequence.py
new file mode 100755
index 000000000..4f4a39f15
--- /dev/null
+++ b/qa/rpc-tests/bip68-sequence.py
@@ -0,0 +1,425 @@
+#!/usr/bin/env python2
+# Copyright (c) 2014-2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+#
+# Test BIP68 implementation
+#
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+from test_framework.script import *
+from test_framework.mininode import *
+from test_framework.blocktools import *
+
+COIN = 100000000
+SEQUENCE_LOCKTIME_DISABLE_FLAG = (1<<31)
+SEQUENCE_LOCKTIME_TYPE_FLAG = (1<<22) # this means use time (0 means height)
+SEQUENCE_LOCKTIME_GRANULARITY = 9 # this is a bit-shift
+SEQUENCE_LOCKTIME_MASK = 0x0000ffff
+
+# RPC error for non-BIP68 final transactions
+NOT_FINAL_ERROR = "64: non-BIP68-final"
+
+class BIP68Test(BitcoinTestFramework):
+
+ def setup_network(self):
+ self.nodes = []
+ self.nodes.append(start_node(0, self.options.tmpdir, ["-debug", "-blockprioritysize=0"]))
+ self.nodes.append(start_node(1, self.options.tmpdir, ["-debug", "-blockprioritysize=0", "-acceptnonstdtxn=0"]))
+ self.is_network_split = False
+ self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
+ connect_nodes(self.nodes[0], 1)
+
+ def run_test(self):
+ # Generate some coins
+ self.nodes[0].generate(110)
+
+ print "Running test disable flag"
+ self.test_disable_flag()
+
+ print "Running test sequence-lock-confirmed-inputs"
+ self.test_sequence_lock_confirmed_inputs()
+
+ print "Running test sequence-lock-unconfirmed-inputs"
+ self.test_sequence_lock_unconfirmed_inputs()
+
+ print "Running test BIP68 not consensus before versionbits activation"
+ self.test_bip68_not_consensus()
+
+ print "Verifying nVersion=2 transactions aren't standard"
+ self.test_version2_relay(before_activation=True)
+
+ print "Activating BIP68 (and 112/113)"
+ self.activateCSV()
+
+ print "Verifying nVersion=2 transactions are now standard"
+ self.test_version2_relay(before_activation=False)
+
+ print "Passed\n"
+
+ # Test that BIP68 is not in effect if tx version is 1, or if
+ # the first sequence bit is set.
+ def test_disable_flag(self):
+ # Create some unconfirmed inputs
+ new_addr = self.nodes[0].getnewaddress()
+ self.nodes[0].sendtoaddress(new_addr, 2) # send 2 BTC
+
+ utxos = self.nodes[0].listunspent(0, 0)
+ assert(len(utxos) > 0)
+
+ utxo = utxos[0]
+
+ tx1 = CTransaction()
+ value = int(satoshi_round(utxo["amount"] - self.relayfee)*COIN)
+
+ # Check that the disable flag disables relative locktime.
+ # If sequence locks were used, this would require 1 block for the
+ # input to mature.
+ sequence_value = SEQUENCE_LOCKTIME_DISABLE_FLAG | 1
+ tx1.vin = [CTxIn(COutPoint(int(utxo["txid"], 16), utxo["vout"]), nSequence=sequence_value)]
+ tx1.vout = [CTxOut(value, CScript([b'a']))]
+
+ tx1_signed = self.nodes[0].signrawtransaction(ToHex(tx1))["hex"]
+ tx1_id = self.nodes[0].sendrawtransaction(tx1_signed)
+ tx1_id = int(tx1_id, 16)
+
+ # This transaction will enable sequence-locks, so this transaction should
+ # fail
+ tx2 = CTransaction()
+ tx2.nVersion = 2
+ sequence_value = sequence_value & 0x7fffffff
+ tx2.vin = [CTxIn(COutPoint(tx1_id, 0), nSequence=sequence_value)]
+ tx2.vout = [CTxOut(int(value-self.relayfee*COIN), CScript([b'a']))]
+ tx2.rehash()
+
+ try:
+ self.nodes[0].sendrawtransaction(ToHex(tx2))
+ except JSONRPCException as exp:
+ assert_equal(exp.error["message"], NOT_FINAL_ERROR)
+ else:
+ assert(False)
+
+ # Setting the version back down to 1 should disable the sequence lock,
+ # so this should be accepted.
+ tx2.nVersion = 1
+
+ self.nodes[0].sendrawtransaction(ToHex(tx2))
+
+ # Calculate the median time past of a prior block ("confirmations" before
+ # the current tip).
+ def get_median_time_past(self, confirmations):
+ block_hash = self.nodes[0].getblockhash(self.nodes[0].getblockcount()-confirmations)
+ return self.nodes[0].getblockheader(block_hash)["mediantime"]
+
+ # Test that sequence locks are respected for transactions spending confirmed inputs.
+ def test_sequence_lock_confirmed_inputs(self):
+ # Create lots of confirmed utxos, and use them to generate lots of random
+ # transactions.
+ max_outputs = 50
+ addresses = []
+ while len(addresses) < max_outputs:
+ addresses.append(self.nodes[0].getnewaddress())
+ while len(self.nodes[0].listunspent()) < 200:
+ import random
+ random.shuffle(addresses)
+ num_outputs = random.randint(1, max_outputs)
+ outputs = {}
+ for i in xrange(num_outputs):
+ outputs[addresses[i]] = random.randint(1, 20)*0.01
+ self.nodes[0].sendmany("", outputs)
+ self.nodes[0].generate(1)
+
+ utxos = self.nodes[0].listunspent()
+
+ # Try creating a lot of random transactions.
+ # Each time, choose a random number of inputs, and randomly set
+ # some of those inputs to be sequence locked (and randomly choose
+ # between height/time locking). Small random chance of making the locks
+ # all pass.
+ for i in xrange(400):
+ # Randomly choose up to 10 inputs
+ num_inputs = random.randint(1, 10)
+ random.shuffle(utxos)
+
+ # Track whether any sequence locks used should fail
+ should_pass = True
+
+ # Track whether this transaction was built with sequence locks
+ using_sequence_locks = False
+
+ tx = CTransaction()
+ tx.nVersion = 2
+ value = 0
+ for j in xrange(num_inputs):
+ sequence_value = 0xfffffffe # this disables sequence locks
+
+ # 50% chance we enable sequence locks
+ if random.randint(0,1):
+ using_sequence_locks = True
+
+ # 10% of the time, make the input sequence value pass
+ input_will_pass = (random.randint(1,10) == 1)
+ sequence_value = utxos[j]["confirmations"]
+ if not input_will_pass:
+ sequence_value += 1
+ should_pass = False
+
+ # Figure out what the median-time-past was for the confirmed input
+ # Note that if an input has N confirmations, we're going back N blocks
+ # from the tip so that we're looking up MTP of the block
+ # PRIOR to the one the input appears in, as per the BIP68 spec.
+ orig_time = self.get_median_time_past(utxos[j]["confirmations"])
+ cur_time = self.get_median_time_past(0) # MTP of the tip
+
+ # can only timelock this input if it's not too old -- otherwise use height
+ can_time_lock = True
+ if ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY) >= SEQUENCE_LOCKTIME_MASK:
+ can_time_lock = False
+
+ # if time-lockable, then 50% chance we make this a time lock
+ if random.randint(0,1) and can_time_lock:
+ # Find first time-lock value that fails, or latest one that succeeds
+ time_delta = sequence_value << SEQUENCE_LOCKTIME_GRANULARITY
+ if input_will_pass and time_delta > cur_time - orig_time:
+ sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)
+ elif (not input_will_pass and time_delta <= cur_time - orig_time):
+ sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)+1
+ sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
+ tx.vin.append(CTxIn(COutPoint(int(utxos[j]["txid"], 16), utxos[j]["vout"]), nSequence=sequence_value))
+ value += utxos[j]["amount"]*COIN
+ # Overestimate the size of the tx - signatures should be less than 120 bytes, and leave 50 for the output
+ tx_size = len(ToHex(tx))//2 + 120*num_inputs + 50
+ tx.vout.append(CTxOut(int(value-self.relayfee*tx_size*COIN/1000), CScript([b'a'])))
+ rawtx = self.nodes[0].signrawtransaction(ToHex(tx))["hex"]
+
+ try:
+ self.nodes[0].sendrawtransaction(rawtx)
+ except JSONRPCException as exp:
+ assert(not should_pass and using_sequence_locks)
+ assert_equal(exp.error["message"], NOT_FINAL_ERROR)
+ else:
+ assert(should_pass or not using_sequence_locks)
+ # Recalculate utxos if we successfully sent the transaction
+ utxos = self.nodes[0].listunspent()
+
+ # Test that sequence locks on unconfirmed inputs must have nSequence
+ # height or time of 0 to be accepted.
+ # Then test that BIP68-invalid transactions are removed from the mempool
+ # after a reorg.
+ def test_sequence_lock_unconfirmed_inputs(self):
+ # Store height so we can easily reset the chain at the end of the test
+ cur_height = self.nodes[0].getblockcount()
+
+ # Create a mempool tx.
+ txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2)
+ tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
+ tx1.rehash()
+
+ # Anyone-can-spend mempool tx.
+ # Sequence lock of 0 should pass.
+ tx2 = CTransaction()
+ tx2.nVersion = 2
+ tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
+ tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
+ tx2_raw = self.nodes[0].signrawtransaction(ToHex(tx2))["hex"]
+ tx2 = FromHex(tx2, tx2_raw)
+ tx2.rehash()
+
+ self.nodes[0].sendrawtransaction(tx2_raw)
+
+ # Create a spend of the 0th output of orig_tx with a sequence lock
+ # of 1, and test what happens when submitting.
+ # orig_tx.vout[0] must be an anyone-can-spend output
+ def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock):
+ sequence_value = 1
+ if not use_height_lock:
+ sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
+
+ tx = CTransaction()
+ tx.nVersion = 2
+ tx.vin = [CTxIn(COutPoint(orig_tx.sha256, 0), nSequence=sequence_value)]
+ tx.vout = [CTxOut(int(orig_tx.vout[0].nValue - relayfee*COIN), CScript([b'a']))]
+ tx.rehash()
+
+ try:
+ node.sendrawtransaction(ToHex(tx))
+ except JSONRPCException as exp:
+ assert_equal(exp.error["message"], NOT_FINAL_ERROR)
+ assert(orig_tx.hash in node.getrawmempool())
+ else:
+ # orig_tx must not be in mempool
+ assert(orig_tx.hash not in node.getrawmempool())
+ return tx
+
+ test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True)
+ test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
+
+ # Now mine some blocks, but make sure tx2 doesn't get mined.
+ # Use prioritisetransaction to lower the effective feerate to 0
+ self.nodes[0].prioritisetransaction(tx2.hash, -1e15, int(-self.relayfee*COIN))
+ cur_time = int(time.time())
+ for i in xrange(10):
+ self.nodes[0].setmocktime(cur_time + 600)
+ self.nodes[0].generate(1)
+ cur_time += 600
+
+ assert(tx2.hash in self.nodes[0].getrawmempool())
+
+ test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True)
+ test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
+
+ # Mine tx2, and then try again
+ self.nodes[0].prioritisetransaction(tx2.hash, 1e15, int(self.relayfee*COIN))
+
+ # Advance the time on the node so that we can test timelocks
+ self.nodes[0].setmocktime(cur_time+600)
+ self.nodes[0].generate(1)
+ assert(tx2.hash not in self.nodes[0].getrawmempool())
+
+ # Now that tx2 is not in the mempool, a sequence locked spend should
+ # succeed
+ tx3 = test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
+ assert(tx3.hash in self.nodes[0].getrawmempool())
+
+ self.nodes[0].generate(1)
+ assert(tx3.hash not in self.nodes[0].getrawmempool())
+
+ # One more test, this time using height locks
+ tx4 = test_nonzero_locks(tx3, self.nodes[0], self.relayfee, use_height_lock=True)
+ assert(tx4.hash in self.nodes[0].getrawmempool())
+
+ # Now try combining confirmed and unconfirmed inputs
+ tx5 = test_nonzero_locks(tx4, self.nodes[0], self.relayfee, use_height_lock=True)
+ assert(tx5.hash not in self.nodes[0].getrawmempool())
+
+ utxos = self.nodes[0].listunspent()
+ tx5.vin.append(CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["vout"]), nSequence=1))
+ tx5.vout[0].nValue += int(utxos[0]["amount"]*COIN)
+ raw_tx5 = self.nodes[0].signrawtransaction(ToHex(tx5))["hex"]
+
+ try:
+ self.nodes[0].sendrawtransaction(raw_tx5)
+ except JSONRPCException as exp:
+ assert_equal(exp.error["message"], NOT_FINAL_ERROR)
+ else:
+ assert(False)
+
+ # Test mempool-BIP68 consistency after reorg
+ #
+ # State of the transactions in the last blocks:
+ # ... -> [ tx2 ] -> [ tx3 ]
+ # tip-1 tip
+ # And currently tx4 is in the mempool.
+ #
+ # If we invalidate the tip, tx3 should get added to the mempool, causing
+ # tx4 to be removed (fails sequence-lock).
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ assert(tx4.hash not in self.nodes[0].getrawmempool())
+ assert(tx3.hash in self.nodes[0].getrawmempool())
+
+ # Now mine 2 empty blocks to reorg out the current tip (labeled tip-1 in
+ # diagram above).
+ # This would cause tx2 to be added back to the mempool, which in turn causes
+ # tx3 to be removed.
+ tip = int(self.nodes[0].getblockhash(self.nodes[0].getblockcount()-1), 16)
+ height = self.nodes[0].getblockcount()
+ for i in xrange(2):
+ block = create_block(tip, create_coinbase(height), cur_time)
+ block.nVersion = 3
+ block.rehash()
+ block.solve()
+ tip = block.sha256
+ height += 1
+ self.nodes[0].submitblock(ToHex(block))
+ cur_time += 1
+
+ mempool = self.nodes[0].getrawmempool()
+ assert(tx3.hash not in mempool)
+ assert(tx2.hash in mempool)
+
+ # Reset the chain and get rid of the mocktimed-blocks
+ self.nodes[0].setmocktime(0)
+ self.nodes[0].invalidateblock(self.nodes[0].getblockhash(cur_height+1))
+ self.nodes[0].generate(10)
+
+ # Make sure that BIP68 isn't being used to validate blocks, prior to
+ # versionbits activation. If more blocks are mined prior to this test
+ # being run, then it's possible the test has activated the soft fork, and
+ # this test should be moved to run earlier, or deleted.
+ def test_bip68_not_consensus(self):
+ assert(get_bip9_status(self.nodes[0], 'csv')['status'] != 'active')
+ txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2)
+
+ tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
+ tx1.rehash()
+
+ # Make an anyone-can-spend transaction
+ tx2 = CTransaction()
+ tx2.nVersion = 1
+ tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
+ tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
+
+ # sign tx2
+ tx2_raw = self.nodes[0].signrawtransaction(ToHex(tx2))["hex"]
+ tx2 = FromHex(tx2, tx2_raw)
+ tx2.rehash()
+
+ self.nodes[0].sendrawtransaction(ToHex(tx2))
+
+ # Now make an invalid spend of tx2 according to BIP68
+ sequence_value = 100 # 100 block relative locktime
+
+ tx3 = CTransaction()
+ tx3.nVersion = 2
+ tx3.vin = [CTxIn(COutPoint(tx2.sha256, 0), nSequence=sequence_value)]
+ tx3.vout = [CTxOut(int(tx2.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
+ tx3.rehash()
+
+ try:
+ self.nodes[0].sendrawtransaction(ToHex(tx3))
+ except JSONRPCException as exp:
+ assert_equal(exp.error["message"], NOT_FINAL_ERROR)
+ else:
+ assert(False)
+
+ # make a block that violates bip68; ensure that the tip updates
+ tip = int(self.nodes[0].getbestblockhash(), 16)
+ block = create_block(tip, create_coinbase(self.nodes[0].getblockcount()+1))
+ block.nVersion = 3
+ block.vtx.extend([tx1, tx2, tx3])
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.rehash()
+ block.solve()
+
+ self.nodes[0].submitblock(ToHex(block))
+ assert_equal(self.nodes[0].getbestblockhash(), block.hash)
+
+ def activateCSV(self):
+ # activation should happen at block height 432 (3 periods)
+ min_activation_height = 432
+ height = self.nodes[0].getblockcount()
+ assert(height < 432)
+ self.nodes[0].generate(432-height)
+ assert(get_bip9_status(self.nodes[0], 'csv')['status'] == 'active')
+ sync_blocks(self.nodes)
+
+ # Use self.nodes[1] to test standardness relay policy
+ def test_version2_relay(self, before_activation):
+ inputs = [ ]
+ outputs = { self.nodes[1].getnewaddress() : 1.0 }
+ rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
+ rawtxfund = self.nodes[1].fundrawtransaction(rawtx)['hex']
+ tx = FromHex(CTransaction(), rawtxfund)
+ tx.nVersion = 2
+ tx_signed = self.nodes[1].signrawtransaction(ToHex(tx))["hex"]
+ try:
+ tx_id = self.nodes[1].sendrawtransaction(tx_signed)
+ assert(before_activation == False)
+ except:
+ assert(before_activation)
+
+
+if __name__ == '__main__':
+ BIP68Test().main()
diff --git a/qa/rpc-tests/bip9-softforks.py b/qa/rpc-tests/bip9-softforks.py
new file mode 100755
index 000000000..97c6f3f3a
--- /dev/null
+++ b/qa/rpc-tests/bip9-softforks.py
@@ -0,0 +1,220 @@
+#!/usr/bin/env python2
+# Copyright (c) 2015 The Bitcoin Core developers
+# Distributed under the MIT/X11 software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.mininode import CTransaction, NetworkThread
+from test_framework.blocktools import create_coinbase, create_block
+from test_framework.comptool import TestInstance, TestManager
+from test_framework.script import CScript, OP_1NEGATE, OP_NOP3, OP_DROP
+from binascii import hexlify, unhexlify
+import cStringIO
+import time
+import itertools
+
+'''
+This test is meant to exercise BIP forks
+Connect to a single node.
+regtest lock-in with 108/144 block signalling
+activation after a further 144 blocks
+mine 2 block and save coinbases for later use
+mine 141 blocks to transition from DEFINED to STARTED
+mine 100 blocks signalling readiness and 44 not in order to fail to change state this period
+mine 108 blocks signalling readiness and 36 blocks not signalling readiness (STARTED->LOCKED_IN)
+mine a further 143 blocks (LOCKED_IN)
+test that enforcement has not triggered (which triggers ACTIVE)
+test that enforcement has triggered
+'''
+
+
+
+class BIP9SoftForksTest(ComparisonTestFramework):
+
+ def __init__(self):
+ self.num_nodes = 1
+
+ def setup_network(self):
+ self.nodes = start_nodes(1, self.options.tmpdir,
+ extra_args=[['-debug', '-whitelist=127.0.0.1']],
+ binary=[self.options.testbinary])
+
+ def run_test(self):
+ self.test = TestManager(self, self.options.tmpdir)
+ self.test.add_all_connections(self.nodes)
+ NetworkThread().start() # Start up network handling in another thread
+ self.test.run()
+
+ def create_transaction(self, node, coinbase, to_address, amount):
+ from_txid = node.getblock(coinbase)['tx'][0]
+ inputs = [{ "txid" : from_txid, "vout" : 0}]
+ outputs = { to_address : amount }
+ rawtx = node.createrawtransaction(inputs, outputs)
+ tx = CTransaction()
+ f = cStringIO.StringIO(unhexlify(rawtx))
+ tx.deserialize(f)
+ tx.nVersion = 2
+ return tx
+
+ def sign_transaction(self, node, tx):
+ signresult = node.signrawtransaction(hexlify(tx.serialize()))
+ tx = CTransaction()
+ f = cStringIO.StringIO(unhexlify(signresult['hex']))
+ tx.deserialize(f)
+ return tx
+
+ def generate_blocks(self, number, version, test_blocks = []):
+ for i in xrange(number):
+ block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
+ block.nVersion = version
+ block.rehash()
+ block.solve()
+ test_blocks.append([block, True])
+ self.last_block_time += 1
+ self.tip = block.sha256
+ self.height += 1
+ return test_blocks
+
+ def get_bip9_status(self, key):
+ info = self.nodes[0].getblockchaininfo()
+ for row in info['bip9_softforks']:
+ if row['id'] == key:
+ return row
+ raise IndexError ('key:"%s" not found' % key)
+
+
+ def test_BIP(self, bipName, activated_version, invalidate, invalidatePostSignature):
+ # generate some coins for later
+ self.coinbase_blocks = self.nodes[0].generate(2)
+ self.height = 3 # height of the next block to build
+ self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
+ self.nodeaddress = self.nodes[0].getnewaddress()
+ self.last_block_time = int(time.time())
+
+ assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
+
+ # Test 1
+ # Advance from DEFINED to STARTED
+ test_blocks = self.generate_blocks(141, 4)
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ assert_equal(self.get_bip9_status(bipName)['status'], 'started')
+
+ # Test 2
+ # Fail to achieve LOCKED_IN 100 out of 144 signal bit 1
+ # using a variety of bits to simulate multiple parallel softforks
+ test_blocks = self.generate_blocks(50, activated_version) # 0x20000001 (signalling ready)
+ test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
+ test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
+ test_blocks = self.generate_blocks(24, 4, test_blocks) # 0x20010000 (signalling not)
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ assert_equal(self.get_bip9_status(bipName)['status'], 'started')
+
+ # Test 3
+ # 108 out of 144 signal bit 1 to achieve LOCKED_IN
+ # using a variety of bits to simulate multiple parallel softforks
+ test_blocks = self.generate_blocks(58, activated_version) # 0x20000001 (signalling ready)
+ test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
+ test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
+ test_blocks = self.generate_blocks(10, 4, test_blocks) # 0x20010000 (signalling not)
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
+
+ # Test 4
+ # 143 more version 536870913 blocks (waiting period-1)
+ test_blocks = self.generate_blocks(143, 4)
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
+
+ # Test 5
+ # Check that the new rule is enforced
+ spendtx = self.create_transaction(self.nodes[0],
+ self.coinbase_blocks[0], self.nodeaddress, 1.0)
+ invalidate(spendtx)
+ spendtx = self.sign_transaction(self.nodes[0], spendtx)
+ spendtx.rehash()
+ invalidatePostSignature(spendtx)
+ spendtx.rehash()
+ block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
+ block.nVersion = activated_version
+ block.vtx.append(spendtx)
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.rehash()
+ block.solve()
+
+ self.last_block_time += 1
+ self.tip = block.sha256
+ self.height += 1
+ yield TestInstance([[block, True]])
+
+ assert_equal(self.get_bip9_status(bipName)['status'], 'active')
+
+ # Test 6
+ # Check that the new sequence lock rules are enforced
+ spendtx = self.create_transaction(self.nodes[0],
+ self.coinbase_blocks[1], self.nodeaddress, 1.0)
+ invalidate(spendtx)
+ spendtx = self.sign_transaction(self.nodes[0], spendtx)
+ spendtx.rehash()
+ invalidatePostSignature(spendtx)
+ spendtx.rehash()
+
+ block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
+ block.nVersion = 5
+ block.vtx.append(spendtx)
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.rehash()
+ block.solve()
+ self.last_block_time += 1
+ yield TestInstance([[block, False]])
+
+ # Restart all
+ stop_nodes(self.nodes)
+ wait_bitcoinds()
+ shutil.rmtree(self.options.tmpdir)
+ self.setup_chain()
+ self.setup_network()
+ self.test.clear_all_connections()
+ self.test.add_all_connections(self.nodes)
+ NetworkThread().start() # Start up network handling in another thread
+
+
+
+ def get_tests(self):
+ for test in itertools.chain(
+ self.test_BIP('csv', 536870913, self.sequence_lock_invalidate, self.donothing),
+ self.test_BIP('csv', 536870913, self.mtp_invalidate, self.donothing),
+ self.test_BIP('csv', 536870913, self.donothing, self.csv_invalidate)
+ ):
+ yield test
+
+ def donothing(self, tx):
+ return
+
+ def csv_invalidate(self, tx):
+ '''Modify the signature in vin 0 of the tx to fail CSV
+ Prepends -1 CSV DROP in the scriptSig itself.
+ '''
+ tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_NOP3, OP_DROP] +
+ list(CScript(tx.vin[0].scriptSig)))
+
+ def sequence_lock_invalidate(self, tx):
+ '''Modify the nSequence to make it fails once sequence lock rule is activated (high timespan)
+ '''
+ tx.vin[0].nSequence = 0x00FFFFFF
+ tx.nLockTime = 0
+
+ def mtp_invalidate(self, tx):
+ '''Modify the nLockTime to make it fails once MTP rule is activated
+ '''
+ # Disable Sequence lock, Activate nLockTime
+ tx.vin[0].nSequence = 0x90FFFFFF
+ tx.nLockTime = self.last_block_time
+
+if __name__ == '__main__':
+ BIP9SoftForksTest().main()
\ No newline at end of file
diff --git a/qa/rpc-tests/multi_rpc.py b/qa/rpc-tests/multi_rpc.py
index d41ab5ed1..d4d99ead7 100755
--- a/qa/rpc-tests/multi_rpc.py
+++ b/qa/rpc-tests/multi_rpc.py
@@ -44,7 +44,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
#Old authpair
authpair = url.username + ':' + url.password
- #New authpair generated via contrib/rpcuser tool
+ #New authpair generated via share/rpcuser tool
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
password = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM="
diff --git a/qa/rpc-tests/p2p-versionbits-warning.py b/qa/rpc-tests/p2p-versionbits-warning.py
new file mode 100755
index 000000000..061dcbf0e
--- /dev/null
+++ b/qa/rpc-tests/p2p-versionbits-warning.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python2
+# Copyright (c) 2016 The Bitcoin Core developers
+# Distributed under the MIT/X11 software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+
+from test_framework.mininode import *
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+import time
+from test_framework.blocktools import create_block, create_coinbase
+
+'''
+Test version bits' warning system.
+
+Generate chains with block versions that appear to be signalling unknown
+soft-forks, and test that warning alerts are generated.
+'''
+
+VB_PERIOD = 144 # versionbits period length for regtest
+VB_THRESHOLD = 108 # versionbits activation threshold for regtest
+VB_TOP_BITS = 0x20000000
+VB_UNKNOWN_BIT = 27 # Choose a bit unassigned to any deployment
+
+# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending
+# p2p messages to a node, generating the messages in the main testing logic.
+class TestNode(NodeConnCB):
+ def __init__(self):
+ NodeConnCB.__init__(self)
+ self.connection = None
+ self.ping_counter = 1
+ self.last_pong = msg_pong()
+
+ def add_connection(self, conn):
+ self.connection = conn
+
+ def on_inv(self, conn, message):
+ pass
+
+ # Wrapper for the NodeConn's send_message function
+ def send_message(self, message):
+ self.connection.send_message(message)
+
+ def on_pong(self, conn, message):
+ self.last_pong = message
+
+ # Sync up with the node after delivery of a block
+ def sync_with_ping(self, timeout=30):
+ self.connection.send_message(msg_ping(nonce=self.ping_counter))
+ received_pong = False
+ sleep_time = 0.05
+ while not received_pong and timeout > 0:
+ time.sleep(sleep_time)
+ timeout -= sleep_time
+ with mininode_lock:
+ if self.last_pong.nonce == self.ping_counter:
+ received_pong = True
+ self.ping_counter += 1
+ return received_pong
+
+
+class VersionBitsWarningTest(BitcoinTestFramework):
+ def setup_chain(self):
+ initialize_chain_clean(self.options.tmpdir, 1)
+
+ def setup_network(self):
+ self.nodes = []
+ self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
+ # Open and close to create zero-length file
+ with open(self.alert_filename, 'w') as f:
+ pass
+ self.node_options = ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]
+ self.nodes.append(start_node(0, self.options.tmpdir, self.node_options))
+
+ import re
+ self.vb_pattern = re.compile("^Warning.*versionbit")
+
+ # Send numblocks blocks via peer with nVersionToUse set.
+ def send_blocks_with_version(self, peer, numblocks, nVersionToUse):
+ tip = self.nodes[0].getbestblockhash()
+ height = self.nodes[0].getblockcount()
+ block_time = self.nodes[0].getblockheader(tip)["time"]+1
+ tip = int(tip, 16)
+
+ for i in xrange(numblocks):
+ block = create_block(tip, create_coinbase(height+1), block_time)
+ block.nVersion = nVersionToUse
+ block.solve()
+ peer.send_message(msg_block(block))
+ block_time += 1
+ height += 1
+ tip = block.sha256
+ peer.sync_with_ping()
+
+ def test_versionbits_in_alert_file(self):
+ with open(self.alert_filename, 'r') as f:
+ alert_text = f.read()
+ assert(self.vb_pattern.match(alert_text))
+
+ def run_test(self):
+ # Setup the p2p connection and start up the network thread.
+ test_node = TestNode()
+
+ connections = []
+ connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
+ test_node.add_connection(connections[0])
+
+ NetworkThread().start() # Start up network handling in another thread
+
+ # Test logic begins here
+ test_node.wait_for_verack()
+
+ # 1. Have the node mine one period worth of blocks
+ self.nodes[0].generate(VB_PERIOD)
+
+ # 2. Now build one period of blocks on the tip, with < VB_THRESHOLD
+ # blocks signaling some unknown bit.
+ nVersion = VB_TOP_BITS | (1<= VB_THRESHOLD blocks signaling
+ # some unknown bit
+ self.send_blocks_with_version(test_node, VB_THRESHOLD, nVersion)
+ self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD)
+ # Might not get a versionbits-related alert yet, as we should
+ # have gotten a different alert due to more than 51/100 blocks
+ # being of unexpected version.
+ # Check that getinfo() shows some kind of error.
+ assert(len(self.nodes[0].getinfo()["errors"]) != 0)
+
+ # Mine a period worth of expected blocks so the generic block-version warning
+ # is cleared, and restart the node. This should move the versionbit state
+ # to ACTIVE.
+ self.nodes[0].generate(VB_PERIOD)
+ stop_node(self.nodes[0], 0)
+ wait_bitcoinds()
+ # Empty out the alert file
+ with open(self.alert_filename, 'w') as f:
+ pass
+ self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""])
+
+ # Connecting one block should be enough to generate an error.
+ self.nodes[0].generate(1)
+ assert(len(self.nodes[0].getinfo()["errors"]) != 0)
+ stop_node(self.nodes[0], 0)
+ wait_bitcoinds()
+ self.test_versionbits_in_alert_file()
+
+ # Test framework expects the node to still be running...
+ self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""])
+
+
+if __name__ == '__main__':
+ VersionBitsWarningTest().main()
diff --git a/qa/rpc-tests/test_framework/comptool.py b/qa/rpc-tests/test_framework/comptool.py
index f9fdea65a..2468b4835 100755
--- a/qa/rpc-tests/test_framework/comptool.py
+++ b/qa/rpc-tests/test_framework/comptool.py
@@ -193,6 +193,10 @@ class TestManager(object):
# associated NodeConn
test_node.add_connection(self.connections[-1])
+ def clear_all_connections(self):
+ self.connections = []
+ self.test_nodes = []
+
def wait_for_disconnections(self):
def disconnected():
return all(node.closed for node in self.test_nodes)
diff --git a/qa/rpc-tests/test_framework/mininode.py b/qa/rpc-tests/test_framework/mininode.py
index 28b9ac928..891d50b1e 100755
--- a/qa/rpc-tests/test_framework/mininode.py
+++ b/qa/rpc-tests/test_framework/mininode.py
@@ -235,6 +235,14 @@ def ser_int_vector(l):
r += struct.pack("
-
+ ./rpcuser.py
diff --git a/src/Makefile.am b/src/Makefile.am
index 6be457fdd..667aed5bc 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
@@ -178,6 +178,7 @@ BITCOIN_CORE_H = \
utiltime.h \
validationinterface.h \
version.h \
+ versionbits.h \
wallet/crypter.h \
wallet/db.h \
wallet/wallet.h \
@@ -233,6 +234,7 @@ libbitcoin_server_a_SOURCES = \
txdb.cpp \
txmempool.cpp \
validationinterface.cpp \
+ versionbits.cpp \
$(BITCOIN_CORE_H)
if ENABLE_ZMQ
diff --git a/src/Makefile.test.include b/src/Makefile.test.include
index f92528bf1..bcd27e6c1 100644
--- a/src/Makefile.test.include
+++ b/src/Makefile.test.include
@@ -82,6 +82,7 @@ BITCOIN_TESTS =\
test/timedata_tests.cpp \
test/transaction_tests.cpp \
test/txvalidationcache_tests.cpp \
+ test/versionbits_tests.cpp \
test/uint256_tests.cpp \
test/univalue_tests.cpp \
test/util_tests.cpp
diff --git a/src/chain.h b/src/chain.h
index b9b1b9306..ae6c4338d 100644
--- a/src/chain.h
+++ b/src/chain.h
@@ -14,8 +14,6 @@
#include
-#include
-
struct CDiskBlockPos
{
int nFile;
diff --git a/src/chainparams.cpp b/src/chainparams.cpp
index d90cd349e..caf62bbfb 100644
--- a/src/chainparams.cpp
+++ b/src/chainparams.cpp
@@ -91,6 +91,17 @@ public:
consensus.nPowTargetSpacing = 2.5 * 60; // Dash: 2.5 minutes
consensus.fPowAllowMinDifficultyBlocks = false;
consensus.fPowNoRetargeting = false;
+ consensus.nRuleChangeActivationThreshold = 1916; // 95% of 2016
+ consensus.nMinerConfirmationWindow = 2016; // nPowTargetTimespan / nPowTargetSpacing
+ consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].bit = 28;
+ consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nStartTime = 1199145601; // January 1, 2008
+ consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nTimeout = 1230767999; // December 31, 2008
+
+ // Deployment of BIP68, BIP112, and BIP113.
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].bit = 0;
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nStartTime = 1462060800; // May 1st, 2016
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nTimeout = 1493596800; // May 1st, 2017
+
/**
* The message start string is designed to be unlikely to occur in normal data.
* The characters are rarely used upper ASCII, not valid as UTF-8, and produce
@@ -195,6 +206,16 @@ public:
consensus.nPowTargetSpacing = 2.5 * 60; // Dash: 2.5 minutes
consensus.fPowAllowMinDifficultyBlocks = true;
consensus.fPowNoRetargeting = false;
+ consensus.nRuleChangeActivationThreshold = 1512; // 75% for testchains
+ consensus.nMinerConfirmationWindow = 2016; // nPowTargetTimespan / nPowTargetSpacing
+ consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].bit = 28;
+ consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nStartTime = 1199145601; // January 1, 2008
+ consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nTimeout = 1230767999; // December 31, 2008
+
+ // Deployment of BIP68, BIP112, and BIP113.
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].bit = 0;
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nStartTime = 1456790400; // March 1st, 2016
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nTimeout = 1493596800; // May 1st, 2017
pchMessageStart[0] = 0xce;
pchMessageStart[1] = 0xe2;
@@ -282,6 +303,14 @@ public:
consensus.nPowTargetSpacing = 2.5 * 60; // Dash: 2.5 minutes
consensus.fPowAllowMinDifficultyBlocks = true;
consensus.fPowNoRetargeting = true;
+ consensus.nRuleChangeActivationThreshold = 108; // 75% for testchains
+ consensus.nMinerConfirmationWindow = 144; // Faster than normal for regtest (144 instead of 2016)
+ consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].bit = 28;
+ consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nStartTime = 0;
+ consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nTimeout = 999999999999ULL;
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].bit = 0;
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nStartTime = 0;
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nTimeout = 999999999999ULL;
pchMessageStart[0] = 0xfc;
pchMessageStart[1] = 0xc1;
diff --git a/src/clientversion.h b/src/clientversion.h
index acbaa6f54..f741b9d15 100644
--- a/src/clientversion.h
+++ b/src/clientversion.h
@@ -17,7 +17,7 @@
#define CLIENT_VERSION_MAJOR 0
#define CLIENT_VERSION_MINOR 12
#define CLIENT_VERSION_REVISION 1
-#define CLIENT_VERSION_BUILD 1
+#define CLIENT_VERSION_BUILD 0
//! Set to true for release, false for prerelease or test build
#define CLIENT_VERSION_IS_RELEASE true
diff --git a/src/consensus/consensus.h b/src/consensus/consensus.h
index 5a099cf53..ad9cc2617 100644
--- a/src/consensus/consensus.h
+++ b/src/consensus/consensus.h
@@ -13,8 +13,11 @@ static const unsigned int MAX_BLOCK_SIGOPS = MAX_BLOCK_SIZE/50;
/** Coinbase transaction outputs can only be spent after this number of new blocks (network rule) */
static const int COINBASE_MATURITY = 100;
-/** Flags for LockTime() */
+/** Flags for nSequence and nLockTime locks */
enum {
+ /* Interpret sequence numbers as relative lock-time constraints. */
+ LOCKTIME_VERIFY_SEQUENCE = (1 << 0),
+
/* Use GetMedianTimePast() instead of nTime for end point timestamp. */
LOCKTIME_MEDIAN_TIME_PAST = (1 << 1),
};
diff --git a/src/consensus/params.h b/src/consensus/params.h
index e38f5ea71..b860c58d7 100644
--- a/src/consensus/params.h
+++ b/src/consensus/params.h
@@ -7,8 +7,30 @@
#define BITCOIN_CONSENSUS_PARAMS_H
#include "uint256.h"
+#include