2020-03-30 14:21:47 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# Copyright (c) 2010 ArtForz -- public domain half-a-node
|
|
|
|
# Copyright (c) 2012 Jeff Garzik
|
2023-08-16 19:27:31 +02:00
|
|
|
# Copyright (c) 2010-2020 The Bitcoin Core developers
|
2020-03-30 14:21:47 +02:00
|
|
|
# Distributed under the MIT software license, see the accompanying
|
|
|
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
2018-03-21 16:16:28 +01:00
|
|
|
"""Bitcoin test framework primitive and message structures
|
2020-03-30 14:21:47 +02:00
|
|
|
CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
|
|
|
|
data structures that should map to corresponding structures in
|
|
|
|
bitcoin/primitives
|
|
|
|
msg_block, msg_tx, msg_headers, etc.:
|
|
|
|
data structures that represent network messages
|
2018-09-27 17:06:40 +02:00
|
|
|
|
|
|
|
ser_*, deser_*: functions that handle serialization/deserialization.
|
|
|
|
|
|
|
|
Classes use __slots__ to ensure extraneous attributes aren't accidentally added
|
|
|
|
by tests, compromising their intended effect.
|
|
|
|
"""
|
2024-04-04 19:13:03 +02:00
|
|
|
from base64 import b32decode, b32encode
|
2020-03-30 14:21:47 +02:00
|
|
|
import copy
|
|
|
|
from collections import namedtuple
|
|
|
|
import hashlib
|
|
|
|
from io import BytesIO
|
|
|
|
import random
|
|
|
|
import socket
|
|
|
|
import struct
|
|
|
|
import time
|
|
|
|
|
2023-09-09 11:01:53 +02:00
|
|
|
from test_framework.crypto.siphash import siphash256
|
2021-07-31 21:23:16 +02:00
|
|
|
from test_framework.util import assert_equal
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
import dash_hash
|
|
|
|
|
2018-08-11 12:59:08 +02:00
|
|
|
MAX_LOCATOR_SZ = 101
|
2024-08-13 11:23:45 +02:00
|
|
|
MAX_BLOCK_SIZE = 2000000
|
2020-04-21 13:25:12 +02:00
|
|
|
MAX_BLOOM_FILTER_SIZE = 36000
|
|
|
|
MAX_BLOOM_HASH_FUNCS = 50
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2021-04-08 21:35:16 +02:00
|
|
|
COIN = 100000000 # 1 btc in satoshis
|
2020-03-11 16:32:59 +01:00
|
|
|
MAX_MONEY = 21000000 * COIN
|
2021-04-08 21:35:16 +02:00
|
|
|
|
|
|
|
BIP125_SEQUENCE_NUMBER = 0xfffffffd # Sequence number that is BIP 125 opt-in and BIP 68-opt-out
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2020-06-24 21:57:29 +02:00
|
|
|
MAX_PROTOCOL_MESSAGE_LENGTH = 3 * 1024 * 1024 # Maximum length of incoming protocol messages
|
2024-09-06 22:13:47 +02:00
|
|
|
MAX_HEADERS_UNCOMPRESSED_RESULT = 2000 # Number of headers sent in one getheaders result
|
2024-09-06 21:29:33 +02:00
|
|
|
MAX_HEADERS_COMPRESSED_RESULT = 8000 # Number of headers2 sent in one getheaders2 result
|
2020-06-24 21:57:29 +02:00
|
|
|
MAX_INV_SIZE = 50000 # Maximum number of entries in an 'inv' protocol message
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
NODE_NETWORK = (1 << 0)
|
2017-12-19 21:16:26 +01:00
|
|
|
NODE_BLOOM = (1 << 2)
|
2020-06-01 04:58:42 +02:00
|
|
|
NODE_COMPACT_FILTERS = (1 << 6)
|
2017-12-19 21:16:26 +01:00
|
|
|
NODE_NETWORK_LIMITED = (1 << 10)
|
2022-03-11 20:39:12 +01:00
|
|
|
NODE_HEADERS_COMPRESSED = (1 << 11)
|
2024-10-09 14:20:57 +02:00
|
|
|
NODE_P2P_V2 = (1 << 12)
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2022-09-19 21:22:09 +02:00
|
|
|
MSG_TX = 1
|
|
|
|
MSG_BLOCK = 2
|
2020-03-30 21:27:54 +02:00
|
|
|
MSG_FILTERED_BLOCK = 3
|
2024-08-22 20:49:40 +02:00
|
|
|
MSG_GOVERNANCE_OBJECT = 17
|
|
|
|
MSG_GOVERNANCE_OBJECT_VOTE = 18
|
2022-09-20 09:52:32 +02:00
|
|
|
MSG_CMPCT_BLOCK = 20
|
2022-09-19 21:22:09 +02:00
|
|
|
MSG_TYPE_MASK = 0xffffffff >> 2
|
|
|
|
|
2021-09-19 06:31:43 +02:00
|
|
|
FILTER_TYPE_BASIC = 0
|
|
|
|
|
2024-10-24 16:23:01 +02:00
|
|
|
MAGIC_BYTES = {
|
|
|
|
"mainnet": b"\xbf\x0c\x6b\xbd", # mainnet
|
|
|
|
"testnet3": b"\xce\xe2\xca\xff", # testnet3
|
|
|
|
"regtest": b"\xfc\xc1\xb7\xdc", # regtest
|
|
|
|
"devnet": b"\xe2\xca\xff\xce", # devnet
|
|
|
|
}
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def sha256(s):
|
2021-12-09 15:44:40 +01:00
|
|
|
return hashlib.sha256(s).digest()
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
|
|
|
|
def hash256(s):
|
|
|
|
return sha256(sha256(s))
|
|
|
|
|
|
|
|
def dashhash(s):
|
|
|
|
return dash_hash.getPoWHash(s)
|
|
|
|
|
|
|
|
def ser_compact_size(l):
|
|
|
|
r = b""
|
|
|
|
if l < 253:
|
|
|
|
r = struct.pack("B", l)
|
|
|
|
elif l < 0x10000:
|
|
|
|
r = struct.pack("<BH", 253, l)
|
|
|
|
elif l < 0x100000000:
|
|
|
|
r = struct.pack("<BI", 254, l)
|
|
|
|
else:
|
|
|
|
r = struct.pack("<BQ", 255, l)
|
|
|
|
return r
|
|
|
|
|
2023-04-25 12:30:50 +02:00
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def deser_compact_size(f):
|
|
|
|
nit = struct.unpack("<B", f.read(1))[0]
|
|
|
|
if nit == 253:
|
|
|
|
nit = struct.unpack("<H", f.read(2))[0]
|
|
|
|
elif nit == 254:
|
|
|
|
nit = struct.unpack("<I", f.read(4))[0]
|
|
|
|
elif nit == 255:
|
|
|
|
nit = struct.unpack("<Q", f.read(8))[0]
|
|
|
|
return nit
|
|
|
|
|
2023-04-25 12:30:50 +02:00
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def deser_string(f):
|
|
|
|
nit = deser_compact_size(f)
|
|
|
|
return f.read(nit)
|
|
|
|
|
2023-04-25 12:30:50 +02:00
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def ser_string(s):
|
|
|
|
return ser_compact_size(len(s)) + s
|
|
|
|
|
2023-04-25 12:30:50 +02:00
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def deser_uint256(f):
|
2023-04-25 12:30:50 +02:00
|
|
|
return int.from_bytes(f.read(32), 'little')
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
|
|
|
|
def ser_uint256(u):
|
2023-04-25 12:30:50 +02:00
|
|
|
return u.to_bytes(32, 'little')
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
|
|
|
|
def uint256_from_str(s):
|
2023-04-25 12:30:50 +02:00
|
|
|
return int.from_bytes(s[:32], 'little')
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
|
2020-11-17 20:40:15 +01:00
|
|
|
def uint256_to_string(uint256):
|
|
|
|
return '%064x' % uint256
|
|
|
|
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def uint256_from_compact(c):
|
|
|
|
nbytes = (c >> 24) & 0xFF
|
|
|
|
v = (c & 0xFFFFFF) << (8 * (nbytes - 3))
|
|
|
|
return v
|
|
|
|
|
|
|
|
|
2021-05-29 22:24:52 +02:00
|
|
|
# deser_function_name: Allow for an alternate deserialization function on the
|
|
|
|
# entries in the vector.
|
|
|
|
def deser_vector(f, c, deser_function_name=None):
|
2020-03-30 14:21:47 +02:00
|
|
|
nit = deser_compact_size(f)
|
|
|
|
r = []
|
2020-08-11 02:50:34 +02:00
|
|
|
for _ in range(nit):
|
2020-03-30 14:21:47 +02:00
|
|
|
t = c()
|
2021-05-29 22:24:52 +02:00
|
|
|
if deser_function_name:
|
|
|
|
getattr(t, deser_function_name)(f)
|
|
|
|
else:
|
|
|
|
t.deserialize(f)
|
2020-03-30 14:21:47 +02:00
|
|
|
r.append(t)
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2021-05-26 14:14:27 +02:00
|
|
|
# ser_function_name: Allow for an alternate serialization function on the
|
2021-05-29 22:24:52 +02:00
|
|
|
# entries in the vector (we use this for serializing addrv2 messages).
|
2021-05-26 14:14:27 +02:00
|
|
|
def ser_vector(l, ser_function_name=None):
|
2020-03-30 14:21:47 +02:00
|
|
|
r = ser_compact_size(len(l))
|
|
|
|
for i in l:
|
2021-05-26 14:14:27 +02:00
|
|
|
if ser_function_name:
|
|
|
|
r += getattr(i, ser_function_name)()
|
|
|
|
else:
|
|
|
|
r += i.serialize()
|
2020-03-30 14:21:47 +02:00
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
|
|
def deser_uint256_vector(f):
|
|
|
|
nit = deser_compact_size(f)
|
|
|
|
r = []
|
2020-08-11 02:50:34 +02:00
|
|
|
for _ in range(nit):
|
2020-03-30 14:21:47 +02:00
|
|
|
t = deser_uint256(f)
|
|
|
|
r.append(t)
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
|
|
def ser_uint256_vector(l):
|
|
|
|
r = ser_compact_size(len(l))
|
|
|
|
for i in l:
|
|
|
|
r += ser_uint256(i)
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
|
|
def deser_dyn_bitset(f, bytes_based):
|
|
|
|
if bytes_based:
|
|
|
|
nb = deser_compact_size(f)
|
|
|
|
n = nb * 8
|
|
|
|
else:
|
|
|
|
n = deser_compact_size(f)
|
|
|
|
nb = int((n + 7) / 8)
|
|
|
|
b = f.read(nb)
|
|
|
|
r = []
|
|
|
|
for i in range(n):
|
|
|
|
r.append((b[int(i / 8)] & (1 << (i % 8))) != 0)
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
|
|
def ser_dyn_bitset(l, bytes_based):
|
|
|
|
n = len(l)
|
|
|
|
nb = int((n + 7) / 8)
|
|
|
|
r = [0] * nb
|
|
|
|
for i in range(n):
|
|
|
|
r[int(i / 8)] |= (1 if l[i] else 0) << (i % 8)
|
|
|
|
if bytes_based:
|
|
|
|
r = ser_compact_size(nb) + bytes(r)
|
|
|
|
else:
|
|
|
|
r = ser_compact_size(n) + bytes(r)
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2021-06-24 12:47:04 +02:00
|
|
|
def from_hex(obj, hex_string):
|
|
|
|
"""Deserialize from a hex string representation (e.g. from RPC)
|
|
|
|
|
|
|
|
Note that there is no complementary helper like e.g. `to_hex` for the
|
|
|
|
inverse operation. To serialize a message object to a hex string, simply
|
|
|
|
use obj.serialize().hex()"""
|
2021-07-31 21:23:16 +02:00
|
|
|
obj.deserialize(BytesIO(bytes.fromhex(hex_string)))
|
2020-03-30 14:21:47 +02:00
|
|
|
return obj
|
|
|
|
|
2021-06-24 12:47:04 +02:00
|
|
|
|
|
|
|
def tx_from_hex(hex_string):
|
|
|
|
"""Deserialize from hex string to a transaction object"""
|
|
|
|
return from_hex(CTransaction(), hex_string)
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
# Objects that map to dashd objects, which can be serialized/deserialized
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class CService:
|
|
|
|
__slots__ = ("ip", "port")
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self):
|
|
|
|
self.ip = ""
|
|
|
|
self.port = 0
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.ip = socket.inet_ntop(socket.AF_INET6, f.read(16))
|
|
|
|
self.port = struct.unpack(">H", f.read(2))[0]
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += socket.inet_pton(socket.AF_INET6, self.ip)
|
|
|
|
r += struct.pack(">H", self.port)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "CService(ip=%s port=%i)" % (self.ip, self.port)
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class CAddress:
|
2021-05-29 22:24:52 +02:00
|
|
|
__slots__ = ("net", "ip", "nServices", "port", "time")
|
|
|
|
|
|
|
|
# see https://github.com/bitcoin/bips/blob/master/bip-0155.mediawiki
|
|
|
|
NET_IPV4 = 1
|
2024-04-04 19:13:03 +02:00
|
|
|
NET_I2P = 5
|
2021-05-29 22:24:52 +02:00
|
|
|
|
|
|
|
ADDRV2_NET_NAME = {
|
2024-04-04 19:13:03 +02:00
|
|
|
NET_IPV4: "IPv4",
|
|
|
|
NET_I2P: "I2P"
|
2021-05-29 22:24:52 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
ADDRV2_ADDRESS_LENGTH = {
|
2024-04-04 19:13:03 +02:00
|
|
|
NET_IPV4: 4,
|
|
|
|
NET_I2P: 32
|
2021-05-29 22:24:52 +02:00
|
|
|
}
|
|
|
|
|
2024-04-04 19:13:03 +02:00
|
|
|
I2P_PAD = "===="
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self):
|
2020-04-19 15:21:47 +02:00
|
|
|
self.time = 0
|
2020-03-30 14:21:47 +02:00
|
|
|
self.nServices = 1
|
2021-05-29 22:24:52 +02:00
|
|
|
self.net = self.NET_IPV4
|
2020-03-30 14:21:47 +02:00
|
|
|
self.ip = "0.0.0.0"
|
|
|
|
self.port = 0
|
|
|
|
|
2024-04-04 19:13:03 +02:00
|
|
|
def __eq__(self, other):
|
|
|
|
return self.net == other.net and self.ip == other.ip and self.nServices == other.nServices and self.port == other.port and self.time == other.time
|
|
|
|
|
2020-07-10 17:48:20 +02:00
|
|
|
def deserialize(self, f, *, with_time=True):
|
2021-05-29 22:24:52 +02:00
|
|
|
"""Deserialize from addrv1 format (pre-BIP155)"""
|
2020-04-19 15:21:47 +02:00
|
|
|
if with_time:
|
2021-05-29 22:24:52 +02:00
|
|
|
# VERSION messages serialize CAddress objects without time
|
|
|
|
self.time = struct.unpack("<I", f.read(4))[0]
|
2020-03-30 14:21:47 +02:00
|
|
|
self.nServices = struct.unpack("<Q", f.read(8))[0]
|
2021-05-29 22:24:52 +02:00
|
|
|
# We only support IPv4 which means skip 12 bytes and read the next 4 as IPv4 address.
|
|
|
|
f.read(12)
|
|
|
|
self.net = self.NET_IPV4
|
2020-03-30 14:21:47 +02:00
|
|
|
self.ip = socket.inet_ntoa(f.read(4))
|
|
|
|
self.port = struct.unpack(">H", f.read(2))[0]
|
|
|
|
|
2020-07-10 17:48:20 +02:00
|
|
|
def serialize(self, *, with_time=True):
|
2021-05-29 22:24:52 +02:00
|
|
|
"""Serialize in addrv1 format (pre-BIP155)"""
|
|
|
|
assert self.net == self.NET_IPV4
|
2020-03-30 14:21:47 +02:00
|
|
|
r = b""
|
2020-04-19 15:21:47 +02:00
|
|
|
if with_time:
|
2021-05-29 22:24:52 +02:00
|
|
|
# VERSION messages serialize CAddress objects without time
|
|
|
|
r += struct.pack("<I", self.time)
|
2020-03-30 14:21:47 +02:00
|
|
|
r += struct.pack("<Q", self.nServices)
|
2021-05-29 22:24:52 +02:00
|
|
|
r += b"\x00" * 10 + b"\xff" * 2
|
|
|
|
r += socket.inet_aton(self.ip)
|
|
|
|
r += struct.pack(">H", self.port)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def deserialize_v2(self, f):
|
|
|
|
"""Deserialize from addrv2 format (BIP155)"""
|
|
|
|
self.time = struct.unpack("<I", f.read(4))[0]
|
|
|
|
|
|
|
|
self.nServices = deser_compact_size(f)
|
|
|
|
|
|
|
|
self.net = struct.unpack("B", f.read(1))[0]
|
2024-04-04 19:13:03 +02:00
|
|
|
assert self.net in (self.NET_IPV4, self.NET_I2P)
|
2021-05-29 22:24:52 +02:00
|
|
|
|
|
|
|
address_length = deser_compact_size(f)
|
|
|
|
assert address_length == self.ADDRV2_ADDRESS_LENGTH[self.net]
|
|
|
|
|
2024-04-04 19:13:03 +02:00
|
|
|
addr_bytes = f.read(address_length)
|
|
|
|
if self.net == self.NET_IPV4:
|
|
|
|
self.ip = socket.inet_ntoa(addr_bytes)
|
|
|
|
else:
|
|
|
|
self.ip = b32encode(addr_bytes)[0:-len(self.I2P_PAD)].decode("ascii").lower() + ".b32.i2p"
|
2021-05-29 22:24:52 +02:00
|
|
|
|
|
|
|
self.port = struct.unpack(">H", f.read(2))[0]
|
|
|
|
|
|
|
|
def serialize_v2(self):
|
|
|
|
"""Serialize in addrv2 format (BIP155)"""
|
2024-04-04 19:13:03 +02:00
|
|
|
assert self.net in (self.NET_IPV4, self.NET_I2P)
|
2021-05-29 22:24:52 +02:00
|
|
|
r = b""
|
|
|
|
r += struct.pack("<I", self.time)
|
|
|
|
r += ser_compact_size(self.nServices)
|
|
|
|
r += struct.pack("B", self.net)
|
|
|
|
r += ser_compact_size(self.ADDRV2_ADDRESS_LENGTH[self.net])
|
2024-04-04 19:13:03 +02:00
|
|
|
if self.net == self.NET_IPV4:
|
|
|
|
r += socket.inet_aton(self.ip)
|
|
|
|
else:
|
|
|
|
sfx = ".b32.i2p"
|
|
|
|
assert self.ip.endswith(sfx)
|
|
|
|
r += b32decode(self.ip[0:-len(sfx)] + self.I2P_PAD, True)
|
2020-03-30 14:21:47 +02:00
|
|
|
r += struct.pack(">H", self.port)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
2021-05-29 22:24:52 +02:00
|
|
|
return ("CAddress(nServices=%i net=%s addr=%s port=%i)"
|
|
|
|
% (self.nServices, self.ADDRV2_NET_NAME[self.net], self.ip, self.port))
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class CInv:
|
|
|
|
__slots__ = ("hash", "type")
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
typemap = {
|
|
|
|
0: "Error",
|
2020-03-30 21:27:54 +02:00
|
|
|
MSG_TX: "TX",
|
|
|
|
MSG_BLOCK: "Block",
|
|
|
|
MSG_FILTERED_BLOCK: "filtered Block",
|
2024-08-22 20:49:40 +02:00
|
|
|
MSG_GOVERNANCE_OBJECT: "Governance Object",
|
|
|
|
MSG_GOVERNANCE_OBJECT_VOTE: "Governance Vote",
|
2020-09-03 17:23:29 +02:00
|
|
|
MSG_CMPCT_BLOCK: "CompactBlock",
|
2020-03-30 14:21:47 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
def __init__(self, t=0, h=0):
|
|
|
|
self.type = t
|
|
|
|
self.hash = h
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
2020-09-03 17:23:29 +02:00
|
|
|
self.type = struct.unpack("<I", f.read(4))[0]
|
2020-03-30 14:21:47 +02:00
|
|
|
self.hash = deser_uint256(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
2020-09-03 17:23:29 +02:00
|
|
|
r += struct.pack("<I", self.type)
|
2020-03-30 14:21:47 +02:00
|
|
|
r += ser_uint256(self.hash)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "CInv(type=%s hash=%064x)" \
|
|
|
|
% (self.typemap.get(self.type, "%d" % self.type), self.hash)
|
|
|
|
|
2024-01-13 23:50:06 +01:00
|
|
|
def __eq__(self, other):
|
|
|
|
return isinstance(other, CInv) and self.hash == other.hash and self.type == other.type
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class CBlockLocator:
|
|
|
|
__slots__ = ("nVersion", "vHave")
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self):
|
|
|
|
self.vHave = []
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
2024-08-27 20:53:33 +02:00
|
|
|
struct.unpack("<i", f.read(4))[0] # Ignore version field.
|
2020-03-30 14:21:47 +02:00
|
|
|
self.vHave = deser_uint256_vector(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
2024-08-27 20:53:33 +02:00
|
|
|
r += struct.pack("<i", 0) # Bitcoin Core ignores version field. Set it to 0.
|
2020-03-30 14:21:47 +02:00
|
|
|
r += ser_uint256_vector(self.vHave)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
2024-08-27 20:53:33 +02:00
|
|
|
return "CBlockLocator(vHave=%s)" % (repr(self.vHave))
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class COutPoint:
|
|
|
|
__slots__ = ("hash", "n")
|
|
|
|
|
2020-11-09 22:59:01 +01:00
|
|
|
def __init__(self, hash=0, n=0xFFFFFFFF):
|
2020-03-30 14:21:47 +02:00
|
|
|
self.hash = hash
|
|
|
|
self.n = n
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.hash = deser_uint256(f)
|
|
|
|
self.n = struct.unpack("<I", f.read(4))[0]
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += ser_uint256(self.hash)
|
|
|
|
r += struct.pack("<I", self.n)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class CTxIn:
|
|
|
|
__slots__ = ("nSequence", "prevout", "scriptSig")
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self, outpoint=None, scriptSig=b"", nSequence=0):
|
|
|
|
if outpoint is None:
|
|
|
|
self.prevout = COutPoint()
|
|
|
|
else:
|
|
|
|
self.prevout = outpoint
|
|
|
|
self.scriptSig = scriptSig
|
|
|
|
self.nSequence = nSequence
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.prevout = COutPoint()
|
|
|
|
self.prevout.deserialize(f)
|
|
|
|
self.scriptSig = deser_string(f)
|
|
|
|
self.nSequence = struct.unpack("<I", f.read(4))[0]
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += self.prevout.serialize()
|
|
|
|
r += ser_string(self.scriptSig)
|
|
|
|
r += struct.pack("<I", self.nSequence)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
|
2021-08-27 21:03:02 +02:00
|
|
|
% (repr(self.prevout), self.scriptSig.hex(),
|
2020-03-30 14:21:47 +02:00
|
|
|
self.nSequence)
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class CTxOut:
|
|
|
|
__slots__ = ("nValue", "scriptPubKey")
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self, nValue=0, scriptPubKey=b""):
|
|
|
|
self.nValue = nValue
|
|
|
|
self.scriptPubKey = scriptPubKey
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.nValue = struct.unpack("<q", f.read(8))[0]
|
|
|
|
self.scriptPubKey = deser_string(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<q", self.nValue)
|
|
|
|
r += ser_string(self.scriptPubKey)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \
|
|
|
|
% (self.nValue // COIN, self.nValue % COIN,
|
2021-08-27 21:03:02 +02:00
|
|
|
self.scriptPubKey.hex())
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class CTransaction:
|
|
|
|
__slots__ = ("hash", "nLockTime", "nVersion", "sha256", "vin", "vout",
|
|
|
|
"nType", "vExtraPayload")
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self, tx=None):
|
|
|
|
if tx is None:
|
|
|
|
self.nVersion = 1
|
|
|
|
self.nType = 0
|
|
|
|
self.vin = []
|
|
|
|
self.vout = []
|
|
|
|
self.nLockTime = 0
|
|
|
|
self.vExtraPayload = None
|
|
|
|
self.sha256 = None
|
|
|
|
self.hash = None
|
|
|
|
else:
|
|
|
|
self.nVersion = tx.nVersion
|
|
|
|
self.nType = tx.nType
|
|
|
|
self.vin = copy.deepcopy(tx.vin)
|
|
|
|
self.vout = copy.deepcopy(tx.vout)
|
|
|
|
self.nLockTime = tx.nLockTime
|
|
|
|
self.vExtraPayload = tx.vExtraPayload
|
|
|
|
self.sha256 = tx.sha256
|
|
|
|
self.hash = tx.hash
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
ver32bit = struct.unpack("<i", f.read(4))[0]
|
|
|
|
self.nVersion = ver32bit & 0xffff
|
|
|
|
self.nType = (ver32bit >> 16) & 0xffff
|
|
|
|
self.vin = deser_vector(f, CTxIn)
|
|
|
|
self.vout = deser_vector(f, CTxOut)
|
|
|
|
self.nLockTime = struct.unpack("<I", f.read(4))[0]
|
|
|
|
if self.nType != 0:
|
|
|
|
self.vExtraPayload = deser_string(f)
|
|
|
|
self.sha256 = None
|
|
|
|
self.hash = None
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
ver32bit = int(self.nVersion | (self.nType << 16))
|
|
|
|
r += struct.pack("<i", ver32bit)
|
|
|
|
r += ser_vector(self.vin)
|
|
|
|
r += ser_vector(self.vout)
|
|
|
|
r += struct.pack("<I", self.nLockTime)
|
|
|
|
if self.nType != 0:
|
|
|
|
r += ser_string(self.vExtraPayload)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def rehash(self):
|
|
|
|
self.sha256 = None
|
|
|
|
self.calc_sha256()
|
2021-04-08 21:35:16 +02:00
|
|
|
return self.hash
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def calc_sha256(self):
|
|
|
|
if self.sha256 is None:
|
|
|
|
self.sha256 = uint256_from_str(hash256(self.serialize()))
|
2023-07-05 20:34:58 +02:00
|
|
|
self.hash = hash256(self.serialize())[::-1].hex()
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def is_valid(self):
|
|
|
|
self.calc_sha256()
|
|
|
|
for tout in self.vout:
|
|
|
|
if tout.nValue < 0 or tout.nValue > 21000000 * COIN:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2024-01-30 09:55:27 +01:00
|
|
|
# Calculate the virtual transaction size using
|
|
|
|
# serialization size (does NOT use sigops).
|
|
|
|
def get_vsize(self):
|
|
|
|
return len(self.serialize())
|
|
|
|
|
2021-08-02 15:51:42 +02:00
|
|
|
# it's just a helper that return vsize to reduce conflicts during backporting
|
Merge bitcoin/bitcoin#21800: mempool/validation: mempool ancestor/descendant limits for packages
accf3d5868460b4b14ab607fd66ac985b086fbb3 [test] mempool package ancestor/descendant limits (glozow)
2b6b26e57c24d2f0abd442c1c33098e3121572ce [test] parameterizable fee for make_chain and create_child_with_parents (glozow)
313c09f7b7beddfdb74c284720d209c81dfdb94f [test] helper function to increase transaction weight (glozow)
f8253d69d6f02850995a11eeb71fedc22e6f6575 extract/rename helper functions from rpc_packages.py (glozow)
3cd663a5d33aa7ef87994e452bced7f192d021a0 [policy] ancestor/descendant limits for packages (glozow)
c6e016aa139c8363e9b38bbc1ba0dca55700b8a7 [mempool] check ancestor/descendant limits for packages (glozow)
f551841d3ec080a2d7a7988c7b35088dff6c5830 [refactor] pass size/count instead of entry to CalculateAncestorsAndCheckLimits (glozow)
97dd1c729d2bbedf9527b914c0cc8267b8a7c21b MOVEONLY: add helper function for calculating ancestors and checking limits (glozow)
f95bbf58aaf72aab8a9c5827b1f162f3b8ac38f4 misc package validation doc improvements (glozow)
Pull request description:
This PR implements a function to calculate mempool ancestors for a package and enforces ancestor/descendant limits on them as a whole. It reuses a portion of `CalculateMemPoolAncestors()`; there's also a small refactor to move the reused code into a generic helper function. Instead of calculating ancestors and descendants on every single transaction in the package and their ancestors, we use a "worst case" heuristic, treating every transaction in the package as each other's ancestor and descendant. This may overestimate everyone's counts, but is still pretty accurate in the our main package use cases, in which at least one of the transactions in the package is directly related to all the others (e.g. 1 parent + 1 child, multiple parents with 1 child, or chains).
Note on Terminology: While "package" is often used to describe groups of related transactions _within_ the mempool, here, I only use package to mean the group of not-in-mempool transactions we are currently validating.
#### Motivation
It would be a potential DoS vector to allow submission of packages to mempool without a proper guard for mempool ancestors/descendants. In general, the purpose of mempool ancestor/descendant limits is to limit the computational complexity of dealing with families during removals and additions. We want to be able to validate multiple transactions on top of the mempool, but also avoid these scenarios:
- We underestimate the ancestors/descendants during package validation and end up with extremely complex families in our mempool (potentially a DoS vector).
- We expend an unreasonable amount of resources calculating everyone's ancestors and descendants during package validation.
ACKs for top commit:
JeremyRubin:
utACK accf3d5
ariard:
ACK accf3d5.
Tree-SHA512: 0d18ce4b77398fe872e0b7c2cc66d3aac2135e561b64029584339e1f4de2a6a16ebab3dd5784f376e119cbafc4d50168b28d3bd95d0b3d01158714ade2e3624d
Signed-off-by: Vijay <vijaydas.mp@gmail.com>
2021-08-09 05:53:10 +02:00
|
|
|
def get_weight(self):
|
|
|
|
return self.get_vsize()
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __repr__(self):
|
|
|
|
return "CTransaction(nVersion=%i vin=%s vout=%s nLockTime=%i)" \
|
|
|
|
% (self.nVersion, repr(self.vin), repr(self.vout), self.nLockTime)
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class CBlockHeader:
|
|
|
|
__slots__ = ("hash", "hashMerkleRoot", "hashPrevBlock", "nBits", "nNonce",
|
|
|
|
"nTime", "nVersion", "sha256")
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self, header=None):
|
|
|
|
if header is None:
|
|
|
|
self.set_null()
|
|
|
|
else:
|
|
|
|
self.nVersion = header.nVersion
|
|
|
|
self.hashPrevBlock = header.hashPrevBlock
|
|
|
|
self.hashMerkleRoot = header.hashMerkleRoot
|
|
|
|
self.nTime = header.nTime
|
|
|
|
self.nBits = header.nBits
|
|
|
|
self.nNonce = header.nNonce
|
|
|
|
self.sha256 = header.sha256
|
|
|
|
self.hash = header.hash
|
|
|
|
self.calc_sha256()
|
|
|
|
|
|
|
|
def set_null(self):
|
2021-08-03 10:09:42 +02:00
|
|
|
self.nVersion = 4
|
2020-03-30 14:21:47 +02:00
|
|
|
self.hashPrevBlock = 0
|
|
|
|
self.hashMerkleRoot = 0
|
|
|
|
self.nTime = 0
|
|
|
|
self.nBits = 0
|
|
|
|
self.nNonce = 0
|
|
|
|
self.sha256 = None
|
|
|
|
self.hash = None
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.nVersion = struct.unpack("<i", f.read(4))[0]
|
|
|
|
self.hashPrevBlock = deser_uint256(f)
|
|
|
|
self.hashMerkleRoot = deser_uint256(f)
|
|
|
|
self.nTime = struct.unpack("<I", f.read(4))[0]
|
|
|
|
self.nBits = struct.unpack("<I", f.read(4))[0]
|
|
|
|
self.nNonce = struct.unpack("<I", f.read(4))[0]
|
|
|
|
self.sha256 = None
|
|
|
|
self.hash = None
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<i", self.nVersion)
|
|
|
|
r += ser_uint256(self.hashPrevBlock)
|
|
|
|
r += ser_uint256(self.hashMerkleRoot)
|
|
|
|
r += struct.pack("<I", self.nTime)
|
|
|
|
r += struct.pack("<I", self.nBits)
|
|
|
|
r += struct.pack("<I", self.nNonce)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def calc_sha256(self):
|
|
|
|
if self.sha256 is None:
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<i", self.nVersion)
|
|
|
|
r += ser_uint256(self.hashPrevBlock)
|
|
|
|
r += ser_uint256(self.hashMerkleRoot)
|
|
|
|
r += struct.pack("<I", self.nTime)
|
|
|
|
r += struct.pack("<I", self.nBits)
|
|
|
|
r += struct.pack("<I", self.nNonce)
|
|
|
|
self.sha256 = uint256_from_str(dashhash(r))
|
2023-07-05 20:34:58 +02:00
|
|
|
self.hash = dashhash(r)[::-1].hex()
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def rehash(self):
|
|
|
|
self.sha256 = None
|
|
|
|
self.calc_sha256()
|
|
|
|
return self.sha256
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \
|
|
|
|
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
|
|
|
|
time.ctime(self.nTime), self.nBits, self.nNonce)
|
|
|
|
|
2019-02-12 22:51:59 +01:00
|
|
|
BLOCK_HEADER_SIZE = len(CBlockHeader().serialize())
|
|
|
|
assert_equal(BLOCK_HEADER_SIZE, 80)
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
class CBlock(CBlockHeader):
|
2018-09-27 17:06:40 +02:00
|
|
|
__slots__ = ("vtx",)
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self, header=None):
|
2020-04-25 15:25:26 +02:00
|
|
|
super().__init__(header)
|
2020-03-30 14:21:47 +02:00
|
|
|
self.vtx = []
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
2020-04-25 15:25:26 +02:00
|
|
|
super().deserialize(f)
|
2020-03-30 14:21:47 +02:00
|
|
|
self.vtx = deser_vector(f, CTransaction)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
2020-04-25 15:25:26 +02:00
|
|
|
r += super().serialize()
|
2020-03-30 14:21:47 +02:00
|
|
|
r += ser_vector(self.vtx)
|
|
|
|
return r
|
|
|
|
|
|
|
|
# Calculate the merkle root given a vector of transaction hashes
|
|
|
|
@staticmethod
|
|
|
|
def get_merkle_root(hashes):
|
|
|
|
while len(hashes) > 1:
|
|
|
|
newhashes = []
|
|
|
|
for i in range(0, len(hashes), 2):
|
|
|
|
i2 = min(i+1, len(hashes)-1)
|
|
|
|
newhashes.append(hash256(hashes[i] + hashes[i2]))
|
|
|
|
hashes = newhashes
|
|
|
|
return uint256_from_str(hashes[0])
|
|
|
|
|
|
|
|
def calc_merkle_root(self):
|
|
|
|
hashes = []
|
|
|
|
for tx in self.vtx:
|
|
|
|
tx.calc_sha256()
|
|
|
|
hashes.append(ser_uint256(tx.sha256))
|
|
|
|
return self.get_merkle_root(hashes)
|
|
|
|
|
|
|
|
def is_valid(self):
|
|
|
|
self.calc_sha256()
|
|
|
|
target = uint256_from_compact(self.nBits)
|
|
|
|
if self.sha256 > target:
|
|
|
|
return False
|
|
|
|
for tx in self.vtx:
|
|
|
|
if not tx.is_valid():
|
|
|
|
return False
|
|
|
|
if self.calc_merkle_root() != self.hashMerkleRoot:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
def solve(self):
|
|
|
|
self.rehash()
|
|
|
|
target = uint256_from_compact(self.nBits)
|
|
|
|
while self.sha256 > target:
|
|
|
|
self.nNonce += 1
|
|
|
|
self.rehash()
|
|
|
|
|
2021-08-02 15:51:42 +02:00
|
|
|
# it's just a helper that return vsize to reduce conflicts during backporting
|
|
|
|
def get_weight(self):
|
|
|
|
return len(self.serialize())
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __repr__(self):
|
|
|
|
return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \
|
|
|
|
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
|
|
|
|
time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx))
|
|
|
|
|
2022-03-11 20:39:12 +01:00
|
|
|
|
|
|
|
class CompressibleBlockHeader:
|
|
|
|
__slots__ = ("bitfield", "timeOffset", "nVersion", "hashPrevBlock", "hashMerkleRoot", "nTime", "nBits", "nNonce",
|
|
|
|
"hash", "sha256")
|
|
|
|
|
|
|
|
FLAG_VERSION_BIT_0 = 1 << 0
|
|
|
|
FLAG_VERSION_BIT_1 = 1 << 1
|
|
|
|
FLAG_VERSION_BIT_2 = 1 << 2
|
|
|
|
FLAG_PREV_BLOCK_HASH = 1 << 3
|
|
|
|
FLAG_TIMESTAMP = 1 << 4
|
|
|
|
FLAG_NBITS = 1 << 5
|
|
|
|
|
|
|
|
BITMASK_VERSION = FLAG_VERSION_BIT_0 | FLAG_VERSION_BIT_1 | FLAG_VERSION_BIT_2
|
|
|
|
|
|
|
|
def __init__(self, header=None):
|
|
|
|
if header is None:
|
|
|
|
self.set_null()
|
|
|
|
else:
|
|
|
|
self.bitfield = 0
|
|
|
|
self.timeOffset = 0
|
|
|
|
self.nVersion = header.nVersion
|
|
|
|
self.hashPrevBlock = header.hashPrevBlock
|
|
|
|
self.hashMerkleRoot = header.hashMerkleRoot
|
|
|
|
self.nTime = header.nTime
|
|
|
|
self.nBits = header.nBits
|
|
|
|
self.nNonce = header.nNonce
|
|
|
|
self.hash = None
|
|
|
|
self.sha256 = None
|
|
|
|
self.calc_sha256()
|
|
|
|
|
|
|
|
def set_null(self):
|
|
|
|
self.bitfield = 0
|
|
|
|
self.timeOffset = 0
|
|
|
|
self.nVersion = 0
|
|
|
|
self.hashPrevBlock = 0
|
|
|
|
self.hashMerkleRoot = 0
|
|
|
|
self.nTime = 0
|
|
|
|
self.nBits = 0
|
|
|
|
self.nNonce = 0
|
|
|
|
self.hash = None
|
|
|
|
self.sha256 = None
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.bitfield = struct.unpack("<B", f.read(1))[0]
|
|
|
|
if self.bitfield & self.BITMASK_VERSION == 0:
|
|
|
|
self.nVersion = struct.unpack("<i", f.read(4))[0]
|
|
|
|
if self.bitfield & self.FLAG_PREV_BLOCK_HASH:
|
|
|
|
self.hashPrevBlock = deser_uint256(f)
|
|
|
|
self.hashMerkleRoot = deser_uint256(f)
|
|
|
|
if self.bitfield & self.FLAG_TIMESTAMP:
|
|
|
|
self.nTime = struct.unpack("<I", f.read(4))[0]
|
|
|
|
else:
|
|
|
|
self.timeOffset = struct.unpack("<h", f.read(2))[0]
|
|
|
|
if self.bitfield & self.FLAG_NBITS:
|
|
|
|
self.nBits = struct.unpack("<I", f.read(4))[0]
|
|
|
|
self.nNonce = struct.unpack("<I", f.read(4))[0]
|
|
|
|
self.rehash()
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.bitfield)
|
|
|
|
if not self.bitfield & self.BITMASK_VERSION:
|
|
|
|
r += struct.pack("<i", self.nVersion)
|
|
|
|
if self.bitfield & self.FLAG_PREV_BLOCK_HASH:
|
|
|
|
r += ser_uint256(self.hashPrevBlock)
|
|
|
|
r += ser_uint256(self.hashMerkleRoot)
|
|
|
|
r += struct.pack("<I", self.nTime) if self.bitfield & self.FLAG_TIMESTAMP else struct.pack("<h", self.timeOffset)
|
|
|
|
if self.bitfield & self.FLAG_NBITS:
|
|
|
|
r += struct.pack("<I", self.nBits)
|
|
|
|
r += struct.pack("<I", self.nNonce)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def calc_sha256(self):
|
|
|
|
if self.sha256 is None:
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<i", self.nVersion)
|
|
|
|
r += ser_uint256(self.hashPrevBlock)
|
|
|
|
r += ser_uint256(self.hashMerkleRoot)
|
|
|
|
r += struct.pack("<I", self.nTime)
|
|
|
|
r += struct.pack("<I", self.nBits)
|
|
|
|
r += struct.pack("<I", self.nNonce)
|
|
|
|
self.sha256 = uint256_from_str(dashhash(r))
|
2023-07-05 20:34:58 +02:00
|
|
|
self.hash = int(dashhash(r)[::-1].hex(), 16)
|
2022-03-11 20:39:12 +01:00
|
|
|
|
|
|
|
def rehash(self):
|
|
|
|
self.sha256 = None
|
|
|
|
self.calc_sha256()
|
|
|
|
return self.sha256
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "BlockHeaderCompressed(bitfield=%064x, nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s " \
|
|
|
|
"nBits=%08x nNonce=%08x timeOffset=%i)" % \
|
|
|
|
(self.bitfield, self.nVersion, self.hashPrevBlock, self.hashMerkleRoot, time.ctime(self.nTime), self.nBits, self.nNonce, self.timeOffset)
|
|
|
|
|
|
|
|
def __save_version_as_most_recent(self, last_unique_versions):
|
|
|
|
last_unique_versions.insert(0, self.nVersion)
|
|
|
|
|
|
|
|
# Evict the oldest version
|
|
|
|
if len(last_unique_versions) > 7:
|
|
|
|
last_unique_versions.pop()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def __mark_version_as_most_recent(last_unique_versions, version_idx):
|
|
|
|
# Move version to the front of the list
|
|
|
|
last_unique_versions.insert(0, last_unique_versions.pop(version_idx))
|
|
|
|
|
|
|
|
def compress(self, last_blocks, last_unique_versions):
|
|
|
|
if not last_blocks:
|
|
|
|
# First block, everything must be uncompressed
|
|
|
|
self.bitfield &= (~CompressibleBlockHeader.BITMASK_VERSION)
|
|
|
|
self.bitfield |= CompressibleBlockHeader.FLAG_PREV_BLOCK_HASH
|
|
|
|
self.bitfield |= CompressibleBlockHeader.FLAG_TIMESTAMP
|
|
|
|
self.bitfield |= CompressibleBlockHeader.FLAG_NBITS
|
|
|
|
self.__save_version_as_most_recent(last_unique_versions)
|
|
|
|
return
|
|
|
|
|
|
|
|
# Compress version
|
|
|
|
try:
|
|
|
|
version_idx = last_unique_versions.index(self.nVersion)
|
|
|
|
version_offset = len(last_unique_versions) - version_idx
|
|
|
|
self.bitfield &= (~CompressibleBlockHeader.BITMASK_VERSION)
|
|
|
|
self.bitfield |= (version_offset & CompressibleBlockHeader.BITMASK_VERSION)
|
|
|
|
self.__mark_version_as_most_recent(last_unique_versions, version_idx)
|
|
|
|
except ValueError:
|
|
|
|
self.__save_version_as_most_recent(last_unique_versions)
|
|
|
|
|
|
|
|
# We have the previous block
|
|
|
|
last_block = last_blocks[-1]
|
|
|
|
|
|
|
|
# Compress time
|
|
|
|
self.timeOffset = self.nTime - last_block.nTime
|
|
|
|
if self.timeOffset > 32767 or self.timeOffset < -32768:
|
|
|
|
# Time diff overflows, we have to send it as 4 bytes (uncompressed)
|
|
|
|
self.bitfield |= CompressibleBlockHeader.FLAG_TIMESTAMP
|
|
|
|
|
|
|
|
# If nBits doesn't match previous block, we have to send it
|
|
|
|
if self.nBits != last_block.nBits:
|
|
|
|
self.bitfield |= CompressibleBlockHeader.FLAG_NBITS
|
|
|
|
|
|
|
|
def uncompress(self, last_compressed_blocks, last_unique_versions):
|
|
|
|
if not last_compressed_blocks:
|
|
|
|
# First block header is always uncompressed
|
|
|
|
self.__save_version_as_most_recent(last_unique_versions)
|
|
|
|
return
|
|
|
|
|
|
|
|
previous_block = last_compressed_blocks[-1]
|
|
|
|
|
|
|
|
# Uncompress version
|
|
|
|
version_idx = self.bitfield & self.BITMASK_VERSION
|
|
|
|
if version_idx != 0:
|
|
|
|
if version_idx <= len(last_unique_versions):
|
|
|
|
self.nVersion = last_unique_versions[version_idx - 1]
|
|
|
|
self.__mark_version_as_most_recent(last_unique_versions, version_idx - 1)
|
|
|
|
else:
|
|
|
|
self.__save_version_as_most_recent(last_unique_versions)
|
|
|
|
|
|
|
|
# Uncompress prev block hash
|
|
|
|
if not self.bitfield & self.FLAG_PREV_BLOCK_HASH:
|
|
|
|
self.hashPrevBlock = previous_block.hash
|
|
|
|
|
|
|
|
# Uncompress time
|
|
|
|
if not self.bitfield & self.FLAG_TIMESTAMP:
|
|
|
|
self.nTime = previous_block.nTime + self.timeOffset
|
|
|
|
|
|
|
|
# Uncompress time bits
|
|
|
|
if not self.bitfield & self.FLAG_NBITS:
|
|
|
|
self.nBits = previous_block.nBits
|
|
|
|
|
|
|
|
self.rehash()
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class PrefilledTransaction:
|
|
|
|
__slots__ = ("index", "tx")
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self, index=0, tx = None):
|
|
|
|
self.index = index
|
|
|
|
self.tx = tx
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.index = deser_compact_size(f)
|
|
|
|
self.tx = CTransaction()
|
|
|
|
self.tx.deserialize(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += ser_compact_size(self.index)
|
|
|
|
r += self.tx.serialize()
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "PrefilledTransaction(index=%d, tx=%s)" % (self.index, repr(self.tx))
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
# This is what we send on the wire, in a cmpctblock message.
|
2018-09-27 17:06:40 +02:00
|
|
|
class P2PHeaderAndShortIDs:
|
|
|
|
__slots__ = ("header", "nonce", "prefilled_txn", "prefilled_txn_length",
|
|
|
|
"shortids", "shortids_length")
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self):
|
|
|
|
self.header = CBlockHeader()
|
|
|
|
self.nonce = 0
|
|
|
|
self.shortids_length = 0
|
|
|
|
self.shortids = []
|
|
|
|
self.prefilled_txn_length = 0
|
|
|
|
self.prefilled_txn = []
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.header.deserialize(f)
|
|
|
|
self.nonce = struct.unpack("<Q", f.read(8))[0]
|
|
|
|
self.shortids_length = deser_compact_size(f)
|
2020-08-11 02:50:34 +02:00
|
|
|
for _ in range(self.shortids_length):
|
2020-03-30 14:21:47 +02:00
|
|
|
# shortids are defined to be 6 bytes in the spec, so append
|
|
|
|
# two zero bytes and read it in as an 8-byte number
|
|
|
|
self.shortids.append(struct.unpack("<Q", f.read(6) + b'\x00\x00')[0])
|
|
|
|
self.prefilled_txn = deser_vector(f, PrefilledTransaction)
|
|
|
|
self.prefilled_txn_length = len(self.prefilled_txn)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += self.header.serialize()
|
|
|
|
r += struct.pack("<Q", self.nonce)
|
|
|
|
r += ser_compact_size(self.shortids_length)
|
|
|
|
for x in self.shortids:
|
|
|
|
# We only want the first 6 bytes
|
|
|
|
r += struct.pack("<Q", x)[0:6]
|
|
|
|
r += ser_vector(self.prefilled_txn)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "P2PHeaderAndShortIDs(header=%s, nonce=%d, shortids_length=%d, shortids=%s, prefilled_txn_length=%d, prefilledtxn=%s" % (repr(self.header), self.nonce, self.shortids_length, repr(self.shortids), self.prefilled_txn_length, repr(self.prefilled_txn))
|
|
|
|
|
|
|
|
|
|
|
|
# Calculate the BIP 152-compact blocks shortid for a given transaction hash
|
|
|
|
def calculate_shortid(k0, k1, tx_hash):
|
|
|
|
expected_shortid = siphash256(k0, k1, tx_hash)
|
|
|
|
expected_shortid &= 0x0000ffffffffffff
|
|
|
|
return expected_shortid
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
# This version gets rid of the array lengths, and reinterprets the differential
|
|
|
|
# encoding into indices that can be used for lookup.
|
2018-09-27 17:06:40 +02:00
|
|
|
class HeaderAndShortIDs:
|
|
|
|
__slots__ = ("header", "nonce", "prefilled_txn", "shortids")
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self, p2pheaders_and_shortids = None):
|
|
|
|
self.header = CBlockHeader()
|
|
|
|
self.nonce = 0
|
|
|
|
self.shortids = []
|
|
|
|
self.prefilled_txn = []
|
|
|
|
|
2018-12-13 12:57:41 +01:00
|
|
|
if p2pheaders_and_shortids is not None:
|
2020-03-30 14:21:47 +02:00
|
|
|
self.header = p2pheaders_and_shortids.header
|
|
|
|
self.nonce = p2pheaders_and_shortids.nonce
|
|
|
|
self.shortids = p2pheaders_and_shortids.shortids
|
|
|
|
last_index = -1
|
|
|
|
for x in p2pheaders_and_shortids.prefilled_txn:
|
|
|
|
self.prefilled_txn.append(PrefilledTransaction(x.index + last_index + 1, x.tx))
|
|
|
|
last_index = self.prefilled_txn[-1].index
|
|
|
|
|
|
|
|
def to_p2p(self):
|
|
|
|
ret = P2PHeaderAndShortIDs()
|
|
|
|
ret.header = self.header
|
|
|
|
ret.nonce = self.nonce
|
|
|
|
ret.shortids_length = len(self.shortids)
|
|
|
|
ret.shortids = self.shortids
|
|
|
|
ret.prefilled_txn_length = len(self.prefilled_txn)
|
|
|
|
ret.prefilled_txn = []
|
|
|
|
last_index = -1
|
|
|
|
for x in self.prefilled_txn:
|
|
|
|
ret.prefilled_txn.append(PrefilledTransaction(x.index - last_index - 1, x.tx))
|
|
|
|
last_index = x.index
|
|
|
|
return ret
|
|
|
|
|
|
|
|
def get_siphash_keys(self):
|
|
|
|
header_nonce = self.header.serialize()
|
|
|
|
header_nonce += struct.pack("<Q", self.nonce)
|
|
|
|
hash_header_nonce_as_str = sha256(header_nonce)
|
|
|
|
key0 = struct.unpack("<Q", hash_header_nonce_as_str[0:8])[0]
|
|
|
|
key1 = struct.unpack("<Q", hash_header_nonce_as_str[8:16])[0]
|
|
|
|
return [ key0, key1 ]
|
|
|
|
|
Merge #16726: tests: Avoid common Python default parameter gotcha when mutable dict/list:s are used as default parameter values
e4f4ea47ebf7774fb6f445adde7bf7ea71fa05a1 lint: Catch use of [] or {} as default parameter values in Python functions (practicalswift)
25dd86715039586d92176eee16e9c6644d2547f0 Avoid using mutable default parameter values (practicalswift)
Pull request description:
Avoid common Python default parameter gotcha when mutable `dict`/`list`:s are used as default parameter values.
Examples of this gotcha caught during review:
* https://github.com/bitcoin/bitcoin/pull/16673#discussion_r317415261
* https://github.com/bitcoin/bitcoin/pull/14565#discussion_r241942304
Perhaps surprisingly this is how mutable list and dictionary default parameter values behave in Python:
```
>>> def f(i, j=[], k={}):
... j.append(i)
... k[i] = True
... return j, k
...
>>> f(1)
([1], {1: True})
>>> f(1)
([1, 1], {1: True})
>>> f(2)
([1, 1, 2], {1: True, 2: True})
```
In contrast to:
```
>>> def f(i, j=None, k=None):
... if j is None:
... j = []
... if k is None:
... k = {}
... j.append(i)
... k[i] = True
... return j, k
...
>>> f(1)
([1], {1: True})
>>> f(1)
([1], {1: True})
>>> f(2)
([2], {2: True})
```
The latter is typically the intended behaviour.
This PR fixes two instances of this and adds a check guarding against this gotcha going forward :-)
ACKs for top commit:
Sjors:
Oh Python... ACK e4f4ea47ebf7774fb6f445adde7bf7ea71fa05a1. Testing tip: swap the two commits.
Tree-SHA512: 56e14d24fc866211a20185c9fdb274ed046c3aed2dc0e07699e58b6f9fa3b79f6d0c880fb02d72b7fe5cc5eb7c0ff6da0ead33123344e1a872209370c2e49e3f
2019-08-28 19:34:22 +02:00
|
|
|
def initialize_from_block(self, block, nonce=0, prefill_list=None):
|
|
|
|
if prefill_list is None:
|
|
|
|
prefill_list = [0]
|
2020-03-30 14:21:47 +02:00
|
|
|
self.header = CBlockHeader(block)
|
|
|
|
self.nonce = nonce
|
|
|
|
self.prefilled_txn = [ PrefilledTransaction(i, block.vtx[i]) for i in prefill_list ]
|
|
|
|
self.shortids = []
|
|
|
|
[k0, k1] = self.get_siphash_keys()
|
|
|
|
for i in range(len(block.vtx)):
|
|
|
|
if i not in prefill_list:
|
|
|
|
self.shortids.append(calculate_shortid(k0, k1, block.vtx[i].sha256))
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "HeaderAndShortIDs(header=%s, nonce=%d, shortids=%s, prefilledtxn=%s" % (repr(self.header), self.nonce, repr(self.shortids), repr(self.prefilled_txn))
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class BlockTransactionsRequest:
|
|
|
|
__slots__ = ("blockhash", "indexes")
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self, blockhash=0, indexes = None):
|
|
|
|
self.blockhash = blockhash
|
2018-12-13 12:57:41 +01:00
|
|
|
self.indexes = indexes if indexes is not None else []
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.blockhash = deser_uint256(f)
|
|
|
|
indexes_length = deser_compact_size(f)
|
2020-08-11 02:50:34 +02:00
|
|
|
for _ in range(indexes_length):
|
2020-03-30 14:21:47 +02:00
|
|
|
self.indexes.append(deser_compact_size(f))
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += ser_uint256(self.blockhash)
|
|
|
|
r += ser_compact_size(len(self.indexes))
|
|
|
|
for x in self.indexes:
|
|
|
|
r += ser_compact_size(x)
|
|
|
|
return r
|
|
|
|
|
|
|
|
# helper to set the differentially encoded indexes from absolute ones
|
|
|
|
def from_absolute(self, absolute_indexes):
|
|
|
|
self.indexes = []
|
|
|
|
last_index = -1
|
|
|
|
for x in absolute_indexes:
|
|
|
|
self.indexes.append(x-last_index-1)
|
|
|
|
last_index = x
|
|
|
|
|
|
|
|
def to_absolute(self):
|
|
|
|
absolute_indexes = []
|
|
|
|
last_index = -1
|
|
|
|
for x in self.indexes:
|
|
|
|
absolute_indexes.append(x+last_index+1)
|
|
|
|
last_index = absolute_indexes[-1]
|
|
|
|
return absolute_indexes
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "BlockTransactionsRequest(hash=%064x indexes=%s)" % (self.blockhash, repr(self.indexes))
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class BlockTransactions:
|
|
|
|
__slots__ = ("blockhash", "transactions")
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self, blockhash=0, transactions = None):
|
|
|
|
self.blockhash = blockhash
|
2018-12-13 12:57:41 +01:00
|
|
|
self.transactions = transactions if transactions is not None else []
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.blockhash = deser_uint256(f)
|
|
|
|
self.transactions = deser_vector(f, CTransaction)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += ser_uint256(self.blockhash)
|
|
|
|
r += ser_vector(self.transactions)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "BlockTransactions(hash=%064x transactions=%s)" % (self.blockhash, repr(self.transactions))
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class CPartialMerkleTree:
|
2018-11-07 18:20:01 +01:00
|
|
|
__slots__ = ("nTransactions", "vBits", "vHash")
|
2018-09-27 17:06:40 +02:00
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self):
|
|
|
|
self.nTransactions = 0
|
|
|
|
self.vBits = []
|
|
|
|
self.vHash = []
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.nTransactions = struct.unpack("<I", f.read(4))[0]
|
|
|
|
self.vHash = deser_uint256_vector(f)
|
|
|
|
self.vBits = deser_dyn_bitset(f, True)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<I", self.nTransactions)
|
|
|
|
r += ser_uint256_vector(self.vHash)
|
|
|
|
r += ser_dyn_bitset(self.vBits, True)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "CPartialMerkleTree(nTransactions=%d vBits.size=%d vHash.size=%d)" % (self.nTransactions, len(self.vBits), len(self.vHash))
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class CMerkleBlock:
|
|
|
|
__slots__ = ("header", "txn")
|
|
|
|
|
2024-10-29 08:39:24 +01:00
|
|
|
def __init__(self, header=None, txn=None):
|
|
|
|
if header is None:
|
|
|
|
self.header = CBlockHeader()
|
|
|
|
else:
|
|
|
|
self.header = header
|
|
|
|
if txn is None:
|
|
|
|
self.txn = CPartialMerkleTree()
|
|
|
|
else:
|
|
|
|
self.txn = txn
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.header.deserialize(f)
|
|
|
|
self.txn.deserialize(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += self.header.serialize()
|
|
|
|
r += self.txn.serialize()
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "CMerkleBlock(header=%s txn=%s)" % (repr(self.header), repr(self.txn))
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class CCbTx:
|
2024-11-01 13:16:42 +01:00
|
|
|
__slots__ = ("nVersion", "nHeight", "merkleRootMNList", "merkleRootQuorums", "bestCLHeightDiff", "bestCLSignature", "assetLockedAmount")
|
2018-09-27 17:06:40 +02:00
|
|
|
|
2024-11-01 13:16:42 +01:00
|
|
|
def __init__(self, version=None, height=None, merkleRootMNList=None, merkleRootQuorums=None, bestCLHeightDiff=None, bestCLSignature=None, assetLockedAmount=None):
|
2020-03-30 14:21:47 +02:00
|
|
|
self.set_null()
|
|
|
|
if version is not None:
|
2024-11-01 13:16:42 +01:00
|
|
|
self.nVersion = version
|
2020-03-30 14:21:47 +02:00
|
|
|
if height is not None:
|
2024-11-01 13:16:42 +01:00
|
|
|
self.nHeight = height
|
2020-03-30 14:21:47 +02:00
|
|
|
if merkleRootMNList is not None:
|
|
|
|
self.merkleRootMNList = merkleRootMNList
|
|
|
|
if merkleRootQuorums is not None:
|
|
|
|
self.merkleRootQuorums = merkleRootQuorums
|
2023-05-09 05:34:26 +02:00
|
|
|
if bestCLHeightDiff is not None:
|
|
|
|
self.bestCLHeightDiff = bestCLHeightDiff
|
|
|
|
if bestCLSignature is not None:
|
|
|
|
self.bestCLSignature = bestCLSignature
|
2024-11-01 13:16:42 +01:00
|
|
|
if assetLockedAmount is not None:
|
|
|
|
self.assetLockedAmount = assetLockedAmount
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def set_null(self):
|
2024-11-01 13:16:42 +01:00
|
|
|
self.nVersion = 0
|
|
|
|
self.nHeight = 0
|
2020-03-30 14:21:47 +02:00
|
|
|
self.merkleRootMNList = None
|
2024-11-01 13:16:42 +01:00
|
|
|
self.merkleRootQuorums = None
|
2023-05-09 05:34:26 +02:00
|
|
|
self.bestCLHeightDiff = 0
|
|
|
|
self.bestCLSignature = b'\x00' * 96
|
2024-11-01 13:16:42 +01:00
|
|
|
self.assetLockedAmount = 0
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
2024-11-01 13:16:42 +01:00
|
|
|
self.nVersion = struct.unpack("<H", f.read(2))[0]
|
|
|
|
self.nHeight = struct.unpack("<i", f.read(4))[0]
|
2020-03-30 14:21:47 +02:00
|
|
|
self.merkleRootMNList = deser_uint256(f)
|
2024-11-01 13:16:42 +01:00
|
|
|
if self.nVersion >= 2:
|
2020-03-30 14:21:47 +02:00
|
|
|
self.merkleRootQuorums = deser_uint256(f)
|
2024-11-01 13:16:42 +01:00
|
|
|
if self.nVersion >= 3:
|
2023-05-09 05:34:26 +02:00
|
|
|
self.bestCLHeightDiff = deser_compact_size(f)
|
|
|
|
self.bestCLSignature = f.read(96)
|
2024-11-01 13:16:42 +01:00
|
|
|
self.assetLockedAmount = struct.unpack("<q", f.read(8))[0]
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
2024-11-01 13:16:42 +01:00
|
|
|
r += struct.pack("<H", self.nVersion)
|
|
|
|
r += struct.pack("<i", self.nHeight)
|
2020-03-30 14:21:47 +02:00
|
|
|
r += ser_uint256(self.merkleRootMNList)
|
2024-11-01 13:16:42 +01:00
|
|
|
if self.nVersion >= 2:
|
2020-03-30 14:21:47 +02:00
|
|
|
r += ser_uint256(self.merkleRootQuorums)
|
2024-11-01 13:16:42 +01:00
|
|
|
if self.nVersion >= 3:
|
2023-05-09 05:34:26 +02:00
|
|
|
r += ser_compact_size(self.bestCLHeightDiff)
|
|
|
|
r += self.bestCLSignature
|
2024-11-01 13:16:42 +01:00
|
|
|
r += struct.pack("<q", self.assetLockedAmount)
|
2020-03-30 14:21:47 +02:00
|
|
|
return r
|
|
|
|
|
2024-11-01 13:16:42 +01:00
|
|
|
def __repr__(self):
|
|
|
|
return "CCbTx(nVersion=%i nHeight=%i merkleRootMNList=%s merkleRootQuorums=%s bestCLHeightDiff=%i bestCLSignature=%s assetLockedAmount=%i)" \
|
|
|
|
% (self.nVersion, self.nHeight, self.merkleRootMNList.hex(), self.merkleRootQuorums.hex(), self.bestCLHeightDiff, self.bestCLSignature.hex(), self.assetLockedAmount)
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2023-07-24 18:39:38 +02:00
|
|
|
class CAssetLockTx:
|
|
|
|
__slots__ = ("version", "creditOutputs")
|
|
|
|
|
|
|
|
def __init__(self, version=None, creditOutputs=None):
|
|
|
|
self.set_null()
|
|
|
|
if version is not None:
|
|
|
|
self.version = version
|
|
|
|
self.creditOutputs = creditOutputs if creditOutputs is not None else []
|
|
|
|
|
|
|
|
def set_null(self):
|
|
|
|
self.version = 0
|
|
|
|
self.creditOutputs = None
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.version = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.creditOutputs = deser_vector(f, CTxOut)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.version)
|
|
|
|
r += ser_vector(self.creditOutputs)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "CAssetLockTx(version={} creditOutputs={}" \
|
|
|
|
.format(self.version, repr(self.creditOutputs))
|
|
|
|
|
|
|
|
|
|
|
|
class CAssetUnlockTx:
|
|
|
|
__slots__ = ("version", "index", "fee", "requestedHeight", "quorumHash", "quorumSig")
|
|
|
|
|
|
|
|
def __init__(self, version=None, index=None, fee=None, requestedHeight=None, quorumHash = 0, quorumSig = None):
|
|
|
|
self.set_null()
|
|
|
|
if version is not None:
|
|
|
|
self.version = version
|
|
|
|
if index is not None:
|
|
|
|
self.index = index
|
|
|
|
if fee is not None:
|
|
|
|
self.fee = fee
|
|
|
|
if requestedHeight is not None:
|
|
|
|
self.requestedHeight = requestedHeight
|
|
|
|
if quorumHash is not None:
|
|
|
|
self.quorumHash = quorumHash
|
|
|
|
if quorumSig is not None:
|
|
|
|
self.quorumSig = quorumSig
|
|
|
|
|
|
|
|
def set_null(self):
|
|
|
|
self.version = 0
|
|
|
|
self.index = 0
|
|
|
|
self.fee = None
|
|
|
|
self.requestedHeight = 0
|
|
|
|
self.quorumHash = 0
|
|
|
|
self.quorumSig = b'\x00' * 96
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.version = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.index = struct.unpack("<Q", f.read(8))[0]
|
|
|
|
self.fee = struct.unpack("<I", f.read(4))[0]
|
|
|
|
self.requestedHeight = struct.unpack("<I", f.read(4))[0]
|
|
|
|
self.quorumHash = deser_uint256(f)
|
|
|
|
self.quorumSig = f.read(96)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.version)
|
|
|
|
r += struct.pack("<Q", self.index)
|
|
|
|
r += struct.pack("<I", self.fee)
|
|
|
|
r += struct.pack("<I", self.requestedHeight)
|
|
|
|
r += ser_uint256(self.quorumHash)
|
|
|
|
r += self.quorumSig
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "CAssetUnlockTx(version={} index={} fee={} requestedHeight={} quorumHash={:x} quorumSig={}" \
|
|
|
|
.format(self.version, self.index, self.fee, self.requestedHeight, self.quorumHash, self.quorumSig.hex())
|
|
|
|
|
|
|
|
|
2023-07-25 20:46:55 +02:00
|
|
|
class CMnEhf:
|
|
|
|
__slots__ = ("version", "versionBit", "quorumHash", "quorumSig")
|
|
|
|
|
|
|
|
def __init__(self, version=None, versionBit=None, quorumHash = 0, quorumSig = None):
|
|
|
|
self.set_null()
|
|
|
|
if version is not None:
|
|
|
|
self.version = version
|
|
|
|
if versionBit is not None:
|
|
|
|
self.versionBit = versionBit
|
|
|
|
if quorumHash is not None:
|
|
|
|
self.quorumHash = quorumHash
|
|
|
|
if quorumSig is not None:
|
|
|
|
self.quorumSig = quorumSig
|
|
|
|
|
|
|
|
def set_null(self):
|
|
|
|
self.version = 0
|
|
|
|
self.versionBit = 0
|
|
|
|
self.quorumHash = 0
|
|
|
|
self.quorumSig = b'\x00' * 96
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.version = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.versionBit = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.quorumHash = deser_uint256(f)
|
|
|
|
self.quorumSig = f.read(96)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.version)
|
|
|
|
r += struct.pack("<B", self.versionBit)
|
|
|
|
r += ser_uint256(self.quorumHash)
|
|
|
|
r += self.quorumSig
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "CMnEhf(version={} versionBit={} quorumHash={:x} quorumSig={}" \
|
|
|
|
.format(self.version, self.versionBit, self.quorumHash, self.quorumSig.hex())
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class CSimplifiedMNListEntry:
|
2023-06-11 19:29:00 +02:00
|
|
|
__slots__ = ("proRegTxHash", "confirmedHash", "service", "pubKeyOperator", "keyIDVoting", "isValid", "nVersion", "type", "platformHTTPPort", "platformNodeID")
|
2018-09-27 17:06:40 +02:00
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self):
|
|
|
|
self.set_null()
|
|
|
|
|
|
|
|
def set_null(self):
|
|
|
|
self.proRegTxHash = 0
|
|
|
|
self.confirmedHash = 0
|
|
|
|
self.service = CService()
|
2022-09-19 11:04:20 +02:00
|
|
|
self.pubKeyOperator = b'\x00' * 48
|
2020-03-30 14:21:47 +02:00
|
|
|
self.keyIDVoting = 0
|
|
|
|
self.isValid = False
|
2023-06-11 19:29:00 +02:00
|
|
|
self.nVersion = 0
|
2023-02-19 18:33:18 +01:00
|
|
|
self.type = 0
|
|
|
|
self.platformHTTPPort = 0
|
|
|
|
self.platformNodeID = b'\x00' * 20
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2023-06-11 19:29:00 +02:00
|
|
|
def deserialize(self, f):
|
|
|
|
self.nVersion = struct.unpack("<H", f.read(2))[0]
|
2020-03-30 14:21:47 +02:00
|
|
|
self.proRegTxHash = deser_uint256(f)
|
|
|
|
self.confirmedHash = deser_uint256(f)
|
|
|
|
self.service.deserialize(f)
|
|
|
|
self.pubKeyOperator = f.read(48)
|
|
|
|
self.keyIDVoting = f.read(20)
|
|
|
|
self.isValid = struct.unpack("<?", f.read(1))[0]
|
2023-06-11 19:29:00 +02:00
|
|
|
if self.nVersion == 2:
|
2023-02-19 18:33:18 +01:00
|
|
|
self.type = struct.unpack("<H", f.read(2))[0]
|
|
|
|
if self.type == 1:
|
|
|
|
self.platformHTTPPort = struct.unpack("<H", f.read(2))[0]
|
|
|
|
self.platformNodeID = f.read(20)
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2023-06-11 19:29:00 +02:00
|
|
|
def serialize(self, with_version = True):
|
2020-03-30 14:21:47 +02:00
|
|
|
r = b""
|
2023-06-11 19:29:00 +02:00
|
|
|
if with_version:
|
|
|
|
r += struct.pack("<H", self.nVersion)
|
2020-03-30 14:21:47 +02:00
|
|
|
r += ser_uint256(self.proRegTxHash)
|
|
|
|
r += ser_uint256(self.confirmedHash)
|
|
|
|
r += self.service.serialize()
|
|
|
|
r += self.pubKeyOperator
|
|
|
|
r += self.keyIDVoting
|
|
|
|
r += struct.pack("<?", self.isValid)
|
2023-06-11 19:29:00 +02:00
|
|
|
if self.nVersion == 2:
|
2023-02-19 18:33:18 +01:00
|
|
|
r += struct.pack("<H", self.type)
|
|
|
|
if self.type == 1:
|
|
|
|
r += struct.pack("<H", self.platformHTTPPort)
|
|
|
|
r += self.platformNodeID
|
2020-03-30 14:21:47 +02:00
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
|
|
class CFinalCommitment:
|
2022-05-18 19:45:15 +02:00
|
|
|
__slots__ = ("nVersion", "llmqType", "quorumHash", "quorumIndex", "signers", "validMembers", "quorumPublicKey",
|
2018-09-27 17:06:40 +02:00
|
|
|
"quorumVvecHash", "quorumSig", "membersSig")
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
def __init__(self):
|
|
|
|
self.set_null()
|
|
|
|
|
|
|
|
def set_null(self):
|
|
|
|
self.nVersion = 0
|
|
|
|
self.llmqType = 0
|
|
|
|
self.quorumHash = 0
|
2022-05-18 19:45:15 +02:00
|
|
|
self.quorumIndex = 0
|
2020-03-30 14:21:47 +02:00
|
|
|
self.signers = []
|
|
|
|
self.validMembers = []
|
2022-09-19 11:04:20 +02:00
|
|
|
self.quorumPublicKey = b'\x00' * 48
|
2020-03-30 14:21:47 +02:00
|
|
|
self.quorumVvecHash = 0
|
2022-09-19 11:04:20 +02:00
|
|
|
self.quorumSig = b'\x00' * 96
|
|
|
|
self.membersSig = b'\x00' * 96
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.nVersion = struct.unpack("<H", f.read(2))[0]
|
|
|
|
self.llmqType = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.quorumHash = deser_uint256(f)
|
2022-12-30 06:45:31 +01:00
|
|
|
if self.nVersion == 2 or self.nVersion == 4:
|
2022-05-18 19:45:15 +02:00
|
|
|
self.quorumIndex = struct.unpack("<H", f.read(2))[0]
|
2020-03-30 14:21:47 +02:00
|
|
|
self.signers = deser_dyn_bitset(f, False)
|
|
|
|
self.validMembers = deser_dyn_bitset(f, False)
|
|
|
|
self.quorumPublicKey = f.read(48)
|
|
|
|
self.quorumVvecHash = deser_uint256(f)
|
|
|
|
self.quorumSig = f.read(96)
|
|
|
|
self.membersSig = f.read(96)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<H", self.nVersion)
|
|
|
|
r += struct.pack("<B", self.llmqType)
|
|
|
|
r += ser_uint256(self.quorumHash)
|
2022-12-30 06:45:31 +01:00
|
|
|
if self.nVersion == 2 or self.nVersion == 4:
|
2022-05-18 19:45:15 +02:00
|
|
|
r += struct.pack("<H", self.quorumIndex)
|
2020-03-30 14:21:47 +02:00
|
|
|
r += ser_dyn_bitset(self.signers, False)
|
|
|
|
r += ser_dyn_bitset(self.validMembers, False)
|
|
|
|
r += self.quorumPublicKey
|
|
|
|
r += ser_uint256(self.quorumVvecHash)
|
|
|
|
r += self.quorumSig
|
|
|
|
r += self.membersSig
|
|
|
|
return r
|
|
|
|
|
2022-05-18 19:45:15 +02:00
|
|
|
def __repr__(self):
|
|
|
|
return "CFinalCommitment(nVersion={} llmqType={} quorumHash={:x} quorumIndex={} signers={}" \
|
|
|
|
" validMembers={} quorumPublicKey={} quorumVvecHash={:x}) quorumSig={} membersSig={})" \
|
|
|
|
.format(self.nVersion, self.llmqType, self.quorumHash, self.quorumIndex, repr(self.signers),
|
|
|
|
repr(self.validMembers), self.quorumPublicKey.hex(), self.quorumVvecHash, self.quorumSig.hex(), self.membersSig.hex())
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2020-11-27 15:55:28 +01:00
|
|
|
class CGovernanceObject:
|
2018-09-27 17:06:40 +02:00
|
|
|
__slots__ = ("nHashParent", "nRevision", "nTime", "nCollateralHash", "vchData", "nObjectType",
|
|
|
|
"masternodeOutpoint", "vchSig")
|
|
|
|
|
2020-11-27 15:55:28 +01:00
|
|
|
def __init__(self):
|
|
|
|
self.nHashParent = 0
|
|
|
|
self.nRevision = 0
|
|
|
|
self.nTime = 0
|
|
|
|
self.nCollateralHash = 0
|
|
|
|
self.vchData = []
|
|
|
|
self.nObjectType = 0
|
|
|
|
self.masternodeOutpoint = COutPoint()
|
|
|
|
self.vchSig = []
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.nHashParent = deser_uint256(f)
|
|
|
|
self.nRevision = struct.unpack("<i", f.read(4))[0]
|
|
|
|
self.nTime = struct.unpack("<q", f.read(8))[0]
|
|
|
|
self.nCollateralHash = deser_uint256(f)
|
|
|
|
size = deser_compact_size(f)
|
|
|
|
if size > 0:
|
|
|
|
self.vchData = f.read(size)
|
|
|
|
self.nObjectType = struct.unpack("<i", f.read(4))[0]
|
|
|
|
self.masternodeOutpoint.deserialize(f)
|
|
|
|
size = deser_compact_size(f)
|
|
|
|
if size > 0:
|
|
|
|
self.vchSig = f.read(size)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += ser_uint256(self.nParentHash)
|
|
|
|
r += struct.pack("<i", self.nRevision)
|
|
|
|
r += struct.pack("<q", self.nTime)
|
|
|
|
r += deser_uint256(self.nCollateralHash)
|
|
|
|
r += deser_compact_size(len(self.vchData))
|
|
|
|
r += self.vchData
|
|
|
|
r += struct.pack("<i", self.nObjectType)
|
|
|
|
r += self.masternodeOutpoint.serialize()
|
|
|
|
r += deser_compact_size(len(self.vchSig))
|
|
|
|
r += self.vchSig
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
|
|
class CGovernanceVote:
|
2018-09-27 17:06:40 +02:00
|
|
|
__slots__ = ("masternodeOutpoint", "nParentHash", "nVoteOutcome", "nVoteSignal", "nTime", "vchSig")
|
|
|
|
|
2020-11-27 15:55:28 +01:00
|
|
|
def __init__(self):
|
|
|
|
self.masternodeOutpoint = COutPoint()
|
|
|
|
self.nParentHash = 0
|
|
|
|
self.nVoteOutcome = 0
|
|
|
|
self.nVoteSignal = 0
|
|
|
|
self.nTime = 0
|
|
|
|
self.vchSig = []
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.masternodeOutpoint.deserialize(f)
|
|
|
|
self.nParentHash = deser_uint256(f)
|
|
|
|
self.nVoteOutcome = struct.unpack("<i", f.read(4))[0]
|
|
|
|
self.nVoteSignal = struct.unpack("<i", f.read(4))[0]
|
|
|
|
self.nTime = struct.unpack("<q", f.read(8))[0]
|
|
|
|
size = deser_compact_size(f)
|
|
|
|
if size > 0:
|
|
|
|
self.vchSig = f.read(size)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += self.masternodeOutpoint.serialize()
|
|
|
|
r += ser_uint256(self.nParentHash)
|
|
|
|
r += struct.pack("<i", self.nVoteOutcome)
|
|
|
|
r += struct.pack("<i", self.nVoteSignal)
|
|
|
|
r += struct.pack("<q", self.nTime)
|
|
|
|
r += ser_compact_size(len(self.vchSig))
|
|
|
|
r += self.vchSig
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
|
|
class CRecoveredSig:
|
2018-09-27 17:06:40 +02:00
|
|
|
__slots__ = ("llmqType", "quorumHash", "id", "msgHash", "sig")
|
|
|
|
|
2020-11-27 15:55:28 +01:00
|
|
|
def __init__(self):
|
|
|
|
self.llmqType = 0
|
|
|
|
self.quorumHash = 0
|
|
|
|
self.id = 0
|
|
|
|
self.msgHash = 0
|
2022-09-19 11:04:20 +02:00
|
|
|
self.sig = b'\x00' * 96
|
2020-11-27 15:55:28 +01:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.llmqType = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.quorumHash = deser_uint256(f)
|
|
|
|
self.id = deser_uint256(f)
|
|
|
|
self.msgHash = deser_uint256(f)
|
|
|
|
self.sig = f.read(96)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.llmqType)
|
|
|
|
r += ser_uint256(self.quorumHash)
|
|
|
|
r += ser_uint256(self.id)
|
|
|
|
r += ser_uint256(self.msgHash)
|
|
|
|
r += self.sig
|
|
|
|
return r
|
|
|
|
|
2021-01-14 20:53:26 +01:00
|
|
|
|
|
|
|
class CSigShare:
|
2018-09-27 17:06:40 +02:00
|
|
|
__slots__ = ("llmqType", "quorumHash", "quorumMember", "id", "msgHash", "sigShare")
|
|
|
|
|
2021-01-14 20:53:26 +01:00
|
|
|
def __init__(self):
|
|
|
|
self.llmqType = 0
|
|
|
|
self.quorumHash = 0
|
|
|
|
self.quorumMember = 0
|
|
|
|
self.id = 0
|
|
|
|
self.msgHash = 0
|
2022-09-19 11:04:20 +02:00
|
|
|
self.sigShare = b'\x00' * 96
|
2021-01-14 20:53:26 +01:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.llmqType = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.quorumHash = deser_uint256(f)
|
|
|
|
self.quorumMember = struct.unpack("<H", f.read(2))[0]
|
|
|
|
self.id = deser_uint256(f)
|
|
|
|
self.msgHash = deser_uint256(f)
|
|
|
|
self.sigShare = f.read(96)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.llmqType)
|
|
|
|
r += ser_uint256(self.quorumHash)
|
|
|
|
r += struct.pack("<H", self.quorumMember)
|
|
|
|
r += ser_uint256(self.id)
|
|
|
|
r += ser_uint256(self.msgHash)
|
|
|
|
r += self.sigShare
|
|
|
|
return r
|
|
|
|
|
2021-01-28 23:33:18 +01:00
|
|
|
|
|
|
|
class CBLSPublicKey:
|
2018-09-27 17:06:40 +02:00
|
|
|
__slots__ = ("data")
|
|
|
|
|
2021-01-28 23:33:18 +01:00
|
|
|
def __init__(self):
|
2022-09-19 11:04:20 +02:00
|
|
|
self.data = b'\x00' * 48
|
2021-01-28 23:33:18 +01:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.data = f.read(48)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += self.data
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
|
|
class CBLSIESEncryptedSecretKey:
|
2018-09-27 17:06:40 +02:00
|
|
|
__slots__ = ("ephemeral_pubKey", "iv", "data")
|
|
|
|
|
2021-01-28 23:33:18 +01:00
|
|
|
def __init__(self):
|
2022-09-19 11:04:20 +02:00
|
|
|
self.ephemeral_pubKey = b'\x00' * 48
|
|
|
|
self.iv = b'\x00' * 32
|
|
|
|
self.data = b'\x00' * 32
|
2021-01-28 23:33:18 +01:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.ephemeral_pubKey = f.read(48)
|
|
|
|
self.iv = f.read(32)
|
|
|
|
data_size = deser_compact_size(f)
|
|
|
|
self.data = f.read(data_size)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += self.ephemeral_pubKey
|
|
|
|
r += self.iv
|
|
|
|
r += ser_compact_size(len(self.data))
|
|
|
|
r += self.data
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
# Objects that correspond to messages on the wire
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_version:
|
2024-08-27 20:53:33 +02:00
|
|
|
__slots__ = ("addrFrom", "addrTo", "nNonce", "relay", "nServices",
|
2018-09-27 17:06:40 +02:00
|
|
|
"nStartingHeight", "nTime", "nVersion", "strSubVer")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"version"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
2024-08-27 20:53:33 +02:00
|
|
|
self.nVersion = 0
|
|
|
|
self.nServices = 0
|
2020-03-30 14:21:47 +02:00
|
|
|
self.nTime = int(time.time())
|
|
|
|
self.addrTo = CAddress()
|
|
|
|
self.addrFrom = CAddress()
|
|
|
|
self.nNonce = random.getrandbits(64)
|
2024-08-27 20:53:33 +02:00
|
|
|
self.strSubVer = ''
|
2020-03-30 14:21:47 +02:00
|
|
|
self.nStartingHeight = -1
|
2024-08-27 20:53:33 +02:00
|
|
|
self.relay = 0
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.nVersion = struct.unpack("<i", f.read(4))[0]
|
|
|
|
self.nServices = struct.unpack("<Q", f.read(8))[0]
|
|
|
|
self.nTime = struct.unpack("<q", f.read(8))[0]
|
|
|
|
self.addrTo = CAddress()
|
2020-07-10 17:48:20 +02:00
|
|
|
self.addrTo.deserialize(f, with_time=False)
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2018-08-23 17:11:30 +02:00
|
|
|
self.addrFrom = CAddress()
|
2020-07-10 17:48:20 +02:00
|
|
|
self.addrFrom.deserialize(f, with_time=False)
|
2018-08-23 17:11:30 +02:00
|
|
|
self.nNonce = struct.unpack("<Q", f.read(8))[0]
|
2021-02-17 09:36:27 +01:00
|
|
|
self.strSubVer = deser_string(f).decode('utf-8')
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2018-08-23 17:11:30 +02:00
|
|
|
self.nStartingHeight = struct.unpack("<i", f.read(4))[0]
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2021-03-24 19:28:21 +01:00
|
|
|
# Relay field is optional for version 70001 onwards
|
|
|
|
# But, unconditionally check it to match behaviour in bitcoind
|
|
|
|
try:
|
2024-08-27 20:53:33 +02:00
|
|
|
self.relay = struct.unpack("<b", f.read(1))[0]
|
2021-03-24 19:28:21 +01:00
|
|
|
except struct.error:
|
2024-08-27 20:53:33 +02:00
|
|
|
self.relay = 0
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<i", self.nVersion)
|
|
|
|
r += struct.pack("<Q", self.nServices)
|
|
|
|
r += struct.pack("<q", self.nTime)
|
2020-07-10 17:48:20 +02:00
|
|
|
r += self.addrTo.serialize(with_time=False)
|
|
|
|
r += self.addrFrom.serialize(with_time=False)
|
2020-03-30 14:21:47 +02:00
|
|
|
r += struct.pack("<Q", self.nNonce)
|
2021-02-17 09:36:27 +01:00
|
|
|
r += ser_string(self.strSubVer.encode('utf-8'))
|
2020-03-30 14:21:47 +02:00
|
|
|
r += struct.pack("<i", self.nStartingHeight)
|
2024-08-27 20:53:33 +02:00
|
|
|
r += struct.pack("<b", self.relay)
|
2020-03-30 14:21:47 +02:00
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
2024-08-27 20:53:33 +02:00
|
|
|
return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i relay=%i)' \
|
2020-03-30 14:21:47 +02:00
|
|
|
% (self.nVersion, self.nServices, time.ctime(self.nTime),
|
|
|
|
repr(self.addrTo), repr(self.addrFrom), self.nNonce,
|
2024-08-27 20:53:33 +02:00
|
|
|
self.strSubVer, self.nStartingHeight, self.relay)
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_verack:
|
|
|
|
__slots__ = ()
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"verack"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return b""
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_verack()"
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_addr:
|
|
|
|
__slots__ = ("addrs",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"addr"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.addrs = []
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.addrs = deser_vector(f, CAddress)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return ser_vector(self.addrs)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_addr(addrs=%s)" % (repr(self.addrs))
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_addrv2:
|
|
|
|
__slots__ = ("addrs",)
|
2021-05-29 22:24:52 +02:00
|
|
|
# msgtype = b"addrv2"
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"addrv2"
|
2021-05-29 22:24:52 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.addrs = []
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.addrs = deser_vector(f, CAddress, "deserialize_v2")
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return ser_vector(self.addrs, "serialize_v2")
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_addrv2(addrs=%s)" % (repr(self.addrs))
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_sendaddrv2:
|
|
|
|
__slots__ = ()
|
2021-05-29 22:24:52 +02:00
|
|
|
# msgtype = b"sendaddrv2"
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"sendaddrv2"
|
2021-05-29 22:24:52 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return b""
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_sendaddrv2()"
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_inv:
|
|
|
|
__slots__ = ("inv",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"inv"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self, inv=None):
|
|
|
|
if inv is None:
|
|
|
|
self.inv = []
|
|
|
|
else:
|
|
|
|
self.inv = inv
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.inv = deser_vector(f, CInv)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return ser_vector(self.inv)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_inv(inv=%s)" % (repr(self.inv))
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_getdata:
|
|
|
|
__slots__ = ("inv",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"getdata"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self, inv=None):
|
2018-12-13 12:57:41 +01:00
|
|
|
self.inv = inv if inv is not None else []
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.inv = deser_vector(f, CInv)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return ser_vector(self.inv)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_getdata(inv=%s)" % (repr(self.inv))
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_getblocks:
|
|
|
|
__slots__ = ("locator", "hashstop")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"getblocks"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.locator = CBlockLocator()
|
|
|
|
self.hashstop = 0
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.locator = CBlockLocator()
|
|
|
|
self.locator.deserialize(f)
|
|
|
|
self.hashstop = deser_uint256(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += self.locator.serialize()
|
|
|
|
r += ser_uint256(self.hashstop)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_getblocks(locator=%s hashstop=%064x)" \
|
|
|
|
% (repr(self.locator), self.hashstop)
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_tx:
|
|
|
|
__slots__ = ("tx",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"tx"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2024-07-31 13:01:33 +02:00
|
|
|
def __init__(self, tx=None):
|
|
|
|
if tx is None:
|
|
|
|
self.tx = CTransaction()
|
|
|
|
else:
|
|
|
|
self.tx = tx
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.tx.deserialize(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return self.tx.serialize()
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_tx(tx=%s)" % (repr(self.tx))
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_block:
|
|
|
|
__slots__ = ("block",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"block"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self, block=None):
|
|
|
|
if block is None:
|
|
|
|
self.block = CBlock()
|
|
|
|
else:
|
|
|
|
self.block = block
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.block.deserialize(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return self.block.serialize()
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_block(block=%s)" % (repr(self.block))
|
|
|
|
|
|
|
|
# for cases where a user needs tighter control over what is sent over the wire
|
2023-08-17 19:32:05 +02:00
|
|
|
# note that the user must supply the name of the msgtype, and the data
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_generic:
|
Merge #19509: Per-Peer Message Capture
bff7c66e67aa2f18ef70139338643656a54444fe Add documentation to contrib folder (Troy Giorshev)
381f77be858d7417209b6de0b7cd23cb7eb99261 Add Message Capture Test (Troy Giorshev)
e4f378a505922c0f544b4cfbfdb169e884e02be9 Add capture parser (Troy Giorshev)
4d1a582549bc982d55e24585b0ba06f92f21e9da Call CaptureMessage at appropriate locations (Troy Giorshev)
f2a77ff97bec09dd5fcc043d8659d8ec5dfb87c2 Add CaptureMessage (Troy Giorshev)
dbf779d5deb04f55c6e8493ce4e12ed4628638f3 Clean PushMessage and ProcessMessages (Troy Giorshev)
Pull request description:
This PR introduces per-peer message capture into Bitcoin Core. 📓
## Purpose
The purpose and scope of this feature is intentionally limited. It answers a question anyone new to Bitcoin's P2P protocol has had: "Can I see what messages my node is sending and receiving?".
## Functionality
When a new debug-only command line argument `capturemessages` is set, any message that the node receives or sends is captured. The capture occurs in the MessageHandler thread. When receiving a message, it is captured as soon as the MessageHandler thread takes the message off of the vProcessMsg queue. When sending, the message is captured just before the message is pushed onto the vSendMsg queue.
The message capture is as minimal as possible to reduce the performance impact on the node. Messages are captured to a new `message_capture` folder in the datadir. Each node has their own subfolder named with their IP address and port. Inside, received and sent messages are captured into two binary files, msgs_recv.dat and msgs_sent.dat, like so:
```
message_capture/203.0.113.7:56072/msgs_recv.dat
message_capture/203.0.113.7:56072/msgs_sent.dat
```
Because the messages are raw binary dumps, included in this PR is a Python parsing tool to convert the binary files into human-readable JSON. This script has been placed on its own and out of the way in the new `contrib/message-capture` folder. Its usage is simple and easily discovered by the autogenerated `-h` option.
## Future Maintenance
I sympathize greatly with anyone who says "the best code is no code".
The future maintenance of this feature will be minimal. The logic to deserialize the payload of the p2p messages exists in our testing framework. As long as our testing framework works, so will this tool.
Additionally, I hope that the simplicity of this tool will mean that it gets used frequently, so that problems will be discovered and solved when they are small.
## FAQ
"Why not just use Wireshark"
Yes, Wireshark has the ability to filter and decode Bitcoin messages. However, the purpose of the message capture added in this PR is to assist with debugging, primarily for new developers looking to improve their knowledge of the Bitcoin Protocol. This drives the design in a different direction than Wireshark, in two different ways. First, this tool must be convenient and simple to use. Using an external tool, like Wireshark, requires setup and interpretation of the results. To a new user who doesn't necessarily know what to expect, this is unnecessary difficulty. This tool, on the other hand, "just works". Turn on the command line flag, run your node, run the script, read the JSON. Second, because this tool is being used for debugging, we want it to be as close to the true behavior of the node as possible. A lot can happen in the SocketHandler thread that would be missed by Wireshark.
Additionally, if we are to use Wireshark, we are at the mercy of whoever it maintaining the protocol in Wireshark, both as to it being accurate and recent. As can be seen by the **many** previous attempts to include Bitcoin in Wireshark (google "bitcoin dissector") this is easier said than done.
Lastly, I truly believe that this tool will be used significantly more by being included in the codebase. It's just that much more discoverable.
ACKs for top commit:
MarcoFalke:
re-ACK bff7c66e67aa2f18ef70139338643656a54444fe only some minor changes: 👚
jnewbery:
utACK bff7c66e67aa2f18ef70139338643656a54444fe
theStack:
re-ACK bff7c66e67aa2f18ef70139338643656a54444fe
Tree-SHA512: e59e3160422269221f70f98720b47842775781c247c064071d546c24fa7a35a0e5534e8baa4b4591a750d7eb16de6b4ecf54cbee6d193b261f4f104e28c15f47
2021-02-02 13:11:14 +01:00
|
|
|
__slots__ = ("data")
|
2018-09-27 17:06:40 +02:00
|
|
|
|
2023-08-17 19:32:05 +02:00
|
|
|
def __init__(self, msgtype, data=None):
|
|
|
|
self.msgtype = msgtype
|
2020-03-30 14:21:47 +02:00
|
|
|
self.data = data
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return self.data
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_generic()"
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
|
|
|
|
class msg_getaddr:
|
|
|
|
__slots__ = ()
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"getaddr"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return b""
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_getaddr()"
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_ping:
|
|
|
|
__slots__ = ("nonce",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"ping"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self, nonce=0):
|
|
|
|
self.nonce = nonce
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.nonce = struct.unpack("<Q", f.read(8))[0]
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<Q", self.nonce)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_ping(nonce=%08x)" % self.nonce
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_pong:
|
|
|
|
__slots__ = ("nonce",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"pong"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self, nonce=0):
|
|
|
|
self.nonce = nonce
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.nonce = struct.unpack("<Q", f.read(8))[0]
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<Q", self.nonce)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_pong(nonce=%08x)" % self.nonce
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_mempool:
|
|
|
|
__slots__ = ()
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"mempool"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return b""
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_mempool()"
|
|
|
|
|
2018-10-27 13:17:59 +02:00
|
|
|
class msg_notfound:
|
|
|
|
__slots__ = ("vec", )
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"notfound"
|
2018-10-27 13:17:59 +02:00
|
|
|
|
|
|
|
def __init__(self, vec=None):
|
|
|
|
self.vec = vec or []
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.vec = deser_vector(f, CInv)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return ser_vector(self.vec)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_notfound(vec=%s)" % (repr(self.vec))
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_sendheaders:
|
|
|
|
__slots__ = ()
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"sendheaders"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return b""
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_sendheaders()"
|
|
|
|
|
|
|
|
|
2022-03-11 20:39:12 +01:00
|
|
|
class msg_sendheaders2:
|
|
|
|
__slots__ = ()
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"sendheaders2"
|
2022-03-11 20:39:12 +01:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return b""
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_sendheaders2()"
|
|
|
|
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
# getheaders message has
|
|
|
|
# number of entries
|
|
|
|
# vector of hashes
|
|
|
|
# hash_stop (hash of last desired block header, 0 to get as many as possible)
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_getheaders:
|
|
|
|
__slots__ = ("hashstop", "locator",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"getheaders"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.locator = CBlockLocator()
|
|
|
|
self.hashstop = 0
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.locator = CBlockLocator()
|
|
|
|
self.locator.deserialize(f)
|
|
|
|
self.hashstop = deser_uint256(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += self.locator.serialize()
|
|
|
|
r += ser_uint256(self.hashstop)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_getheaders(locator=%s, stop=%064x)" \
|
|
|
|
% (repr(self.locator), self.hashstop)
|
|
|
|
|
|
|
|
|
2022-03-11 20:39:12 +01:00
|
|
|
# same as msg_getheaders, but to request the headers compressed
|
|
|
|
class msg_getheaders2:
|
|
|
|
__slots__ = ("hashstop", "locator",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"getheaders2"
|
2022-03-11 20:39:12 +01:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.locator = CBlockLocator()
|
|
|
|
self.hashstop = 0
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.locator = CBlockLocator()
|
|
|
|
self.locator.deserialize(f)
|
|
|
|
self.hashstop = deser_uint256(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += self.locator.serialize()
|
|
|
|
r += ser_uint256(self.hashstop)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_getheaders2(locator=%s, stop=%064x)" \
|
|
|
|
% (repr(self.locator), self.hashstop)
|
|
|
|
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
# headers message has
|
|
|
|
# <count> <vector of block headers>
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_headers:
|
|
|
|
__slots__ = ("headers",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"headers"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self, headers=None):
|
|
|
|
self.headers = headers if headers is not None else []
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
# comment in dashd indicates these should be deserialized as blocks
|
|
|
|
blocks = deser_vector(f, CBlock)
|
|
|
|
for x in blocks:
|
|
|
|
self.headers.append(CBlockHeader(x))
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
blocks = [CBlock(x) for x in self.headers]
|
|
|
|
return ser_vector(blocks)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_headers(headers=%s)" % repr(self.headers)
|
2022-03-11 20:39:12 +01:00
|
|
|
|
|
|
|
|
|
|
|
# headers message has
|
|
|
|
# <count> <vector of compressed block headers>
|
|
|
|
class msg_headers2:
|
|
|
|
__slots__ = ("headers",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"headers2"
|
2022-03-11 20:39:12 +01:00
|
|
|
|
|
|
|
def __init__(self, headers=None):
|
|
|
|
self.headers = headers if headers is not None else []
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.headers = deser_vector(f, CompressibleBlockHeader)
|
|
|
|
last_unique_versions = []
|
|
|
|
for idx in range(len(self.headers)):
|
|
|
|
self.headers[idx].uncompress(self.headers[:idx], last_unique_versions)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
last_unique_versions = []
|
|
|
|
for idx in range(len(self.headers)):
|
|
|
|
self.headers[idx].compress(self.headers[:idx], last_unique_versions)
|
|
|
|
return ser_vector(self.headers)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_headers2(headers=%s)" % repr(self.headers)
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2020-03-30 21:27:54 +02:00
|
|
|
class msg_merkleblock:
|
2020-04-13 01:33:27 +02:00
|
|
|
__slots__ = ("merkleblock",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"merkleblock"
|
2020-03-30 21:27:54 +02:00
|
|
|
|
2020-04-13 01:33:27 +02:00
|
|
|
def __init__(self, merkleblock=None):
|
|
|
|
if merkleblock is None:
|
|
|
|
self.merkleblock = CMerkleBlock()
|
|
|
|
else:
|
|
|
|
self.merkleblock = merkleblock
|
|
|
|
|
2020-03-30 21:27:54 +02:00
|
|
|
def deserialize(self, f):
|
2020-04-13 01:33:27 +02:00
|
|
|
self.merkleblock.deserialize(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return self.merkleblock.serialize()
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_merkleblock(merkleblock=%s)" % (repr(self.merkleblock))
|
2020-03-30 21:27:54 +02:00
|
|
|
|
|
|
|
|
|
|
|
class msg_filterload:
|
|
|
|
__slots__ = ("data", "nHashFuncs", "nTweak", "nFlags")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"filterload"
|
2020-03-30 21:27:54 +02:00
|
|
|
|
|
|
|
def __init__(self, data=b'00', nHashFuncs=0, nTweak=0, nFlags=0):
|
|
|
|
self.data = data
|
|
|
|
self.nHashFuncs = nHashFuncs
|
|
|
|
self.nTweak = nTweak
|
|
|
|
self.nFlags = nFlags
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.data = deser_string(f)
|
|
|
|
self.nHashFuncs = struct.unpack("<I", f.read(4))[0]
|
|
|
|
self.nTweak = struct.unpack("<I", f.read(4))[0]
|
|
|
|
self.nFlags = struct.unpack("<B", f.read(1))[0]
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += ser_string(self.data)
|
|
|
|
r += struct.pack("<I", self.nHashFuncs)
|
|
|
|
r += struct.pack("<I", self.nTweak)
|
|
|
|
r += struct.pack("<B", self.nFlags)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_filterload(data={}, nHashFuncs={}, nTweak={}, nFlags={})".format(
|
|
|
|
self.data, self.nHashFuncs, self.nTweak, self.nFlags)
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
|
Merge #18515: test: add BIP37 remote crash bug [CVE-2013-5700] test to p2p_filter.py
0ed2d8e07d3806d78d03a77d2153f22f9d733a07 test: add BIP37 remote crash bug [CVE-2013-5700] test to p2p_filter.py (Sebastian Falbesoner)
Pull request description:
Integrates the missing message type `filteradd` to the test framework and checks that the BIP37 implementation is not vulnerable to the "remote crash bug" [CVE-2013-5700](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2013-5700) anymore. Prior to v.0.8.4, it was possible to trigger a division-by-zero error on the following line in the function `CBloomFilter::Hash()`:
https://github.com/bitcoin/bitcoin/blob/f0d6487e290761a4fb03798240a351b5fddfdb38/src/bloom.cpp#L45
By setting a zero-length filter via `filterload`, `vData.size()` is 0, so the modulo operation above, called on any .insert() or .contains() operation then crashed the node. The test uses the approach of just sending an arbitrary `filteradd` message after, which calls `CBloomFilter::insert()` (and in turn `CBloomFilter::Hash()`) on the node. The vulnerability was fixed by commit https://github.com/bitcoin/bitcoin/commit/37c6389c5a0ca63ae3573440ecdfe95d28ad8f07 (an intentional covert fix, [according to gmaxwell](https://github.com/bitcoin/bitcoin/issues/18483#issuecomment-608224095)), which introduced flags `isEmpty`/`isFull` that wouldn't call the `Hash()` member function if `isFull` is true (set to true by default constructor).
To validate that the test fails if the implementation is vulnerable, one can simply set the flags to false in the member function `UpdateEmptyFull()` (that is called after a filter received via `filterload` is constructed), which activates the vulnerable code path calling `Hash` in any case on adding or testing for data in the filter:
```diff
diff --git a/src/bloom.cpp b/src/bloom.cpp
index bd6069b..ef294a3 100644
--- a/src/bloom.cpp
+++ b/src/bloom.cpp
@@ -199,8 +199,8 @@ void CBloomFilter::UpdateEmptyFull()
full &= vData[i] == 0xff;
empty &= vData[i] == 0;
}
- isFull = full;
- isEmpty = empty;
+ isFull = false;
+ isEmpty = false;
}
```
Resulting in:
```
$ ./p2p_filter.py
[...]
2020-04-03T14:38:59.593000Z TestFramework (INFO): Check that division-by-zero remote crash bug [CVE-2013-5700] is fixed
2020-04-03T14:38:59.695000Z TestFramework (ERROR): Assertion failed
[...]
[... some exceptions following ...]
```
ACKs for top commit:
naumenkogs:
utACK 0ed2d8e07d3806d78d03a77d2153f22f9d733a07
Tree-SHA512: 02d0253d13eab70c4bd007b0750c56a5a92d05d419d53033523eeb3ed80318bc95196ab90f7745ea3ac9ebae7caee3adbf2a055a40a4124e0915226e49018fe8
2020-04-05 15:17:50 +02:00
|
|
|
class msg_filteradd:
|
|
|
|
__slots__ = ("data")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"filteradd"
|
Merge #18515: test: add BIP37 remote crash bug [CVE-2013-5700] test to p2p_filter.py
0ed2d8e07d3806d78d03a77d2153f22f9d733a07 test: add BIP37 remote crash bug [CVE-2013-5700] test to p2p_filter.py (Sebastian Falbesoner)
Pull request description:
Integrates the missing message type `filteradd` to the test framework and checks that the BIP37 implementation is not vulnerable to the "remote crash bug" [CVE-2013-5700](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2013-5700) anymore. Prior to v.0.8.4, it was possible to trigger a division-by-zero error on the following line in the function `CBloomFilter::Hash()`:
https://github.com/bitcoin/bitcoin/blob/f0d6487e290761a4fb03798240a351b5fddfdb38/src/bloom.cpp#L45
By setting a zero-length filter via `filterload`, `vData.size()` is 0, so the modulo operation above, called on any .insert() or .contains() operation then crashed the node. The test uses the approach of just sending an arbitrary `filteradd` message after, which calls `CBloomFilter::insert()` (and in turn `CBloomFilter::Hash()`) on the node. The vulnerability was fixed by commit https://github.com/bitcoin/bitcoin/commit/37c6389c5a0ca63ae3573440ecdfe95d28ad8f07 (an intentional covert fix, [according to gmaxwell](https://github.com/bitcoin/bitcoin/issues/18483#issuecomment-608224095)), which introduced flags `isEmpty`/`isFull` that wouldn't call the `Hash()` member function if `isFull` is true (set to true by default constructor).
To validate that the test fails if the implementation is vulnerable, one can simply set the flags to false in the member function `UpdateEmptyFull()` (that is called after a filter received via `filterload` is constructed), which activates the vulnerable code path calling `Hash` in any case on adding or testing for data in the filter:
```diff
diff --git a/src/bloom.cpp b/src/bloom.cpp
index bd6069b..ef294a3 100644
--- a/src/bloom.cpp
+++ b/src/bloom.cpp
@@ -199,8 +199,8 @@ void CBloomFilter::UpdateEmptyFull()
full &= vData[i] == 0xff;
empty &= vData[i] == 0;
}
- isFull = full;
- isEmpty = empty;
+ isFull = false;
+ isEmpty = false;
}
```
Resulting in:
```
$ ./p2p_filter.py
[...]
2020-04-03T14:38:59.593000Z TestFramework (INFO): Check that division-by-zero remote crash bug [CVE-2013-5700] is fixed
2020-04-03T14:38:59.695000Z TestFramework (ERROR): Assertion failed
[...]
[... some exceptions following ...]
```
ACKs for top commit:
naumenkogs:
utACK 0ed2d8e07d3806d78d03a77d2153f22f9d733a07
Tree-SHA512: 02d0253d13eab70c4bd007b0750c56a5a92d05d419d53033523eeb3ed80318bc95196ab90f7745ea3ac9ebae7caee3adbf2a055a40a4124e0915226e49018fe8
2020-04-05 15:17:50 +02:00
|
|
|
|
|
|
|
def __init__(self, data):
|
|
|
|
self.data = data
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.data = deser_string(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += ser_string(self.data)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_filteradd(data={})".format(self.data)
|
|
|
|
|
|
|
|
|
2020-03-31 15:35:53 +02:00
|
|
|
class msg_filterclear:
|
|
|
|
__slots__ = ()
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"filterclear"
|
2020-03-31 15:35:53 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return b""
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_filterclear()"
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_sendcmpct:
|
|
|
|
__slots__ = ("announce", "version")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"sendcmpct"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2020-09-20 11:13:49 +02:00
|
|
|
def __init__(self, announce=False, version=1):
|
|
|
|
self.announce = announce
|
|
|
|
self.version = version
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.announce = struct.unpack("<?", f.read(1))[0]
|
|
|
|
self.version = struct.unpack("<Q", f.read(8))[0]
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<?", self.announce)
|
|
|
|
r += struct.pack("<Q", self.version)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_sendcmpct(announce=%s, version=%lu)" % (self.announce, self.version)
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
|
|
|
|
class msg_cmpctblock:
|
|
|
|
__slots__ = ("header_and_shortids",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"cmpctblock"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self, header_and_shortids = None):
|
|
|
|
self.header_and_shortids = header_and_shortids
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.header_and_shortids = P2PHeaderAndShortIDs()
|
|
|
|
self.header_and_shortids.deserialize(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += self.header_and_shortids.serialize()
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_cmpctblock(HeaderAndShortIDs=%s)" % repr(self.header_and_shortids)
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
|
|
|
|
class msg_getblocktxn:
|
|
|
|
__slots__ = ("block_txn_request",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"getblocktxn"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.block_txn_request = None
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.block_txn_request = BlockTransactionsRequest()
|
|
|
|
self.block_txn_request.deserialize(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += self.block_txn_request.serialize()
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_getblocktxn(block_txn_request=%s)" % (repr(self.block_txn_request))
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
|
|
|
|
class msg_blocktxn:
|
|
|
|
__slots__ = ("block_transactions",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"blocktxn"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.block_transactions = BlockTransactions()
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.block_transactions.deserialize(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += self.block_transactions.serialize()
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_blocktxn(block_transactions=%s)" % (repr(self.block_transactions))
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
|
|
|
|
class msg_getmnlistd:
|
|
|
|
__slots__ = ("baseBlockHash", "blockHash",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"getmnlistd"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self, baseBlockHash=0, blockHash=0):
|
|
|
|
self.baseBlockHash = baseBlockHash
|
|
|
|
self.blockHash = blockHash
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.baseBlockHash = deser_uint256(f)
|
|
|
|
self.blockHash = deser_uint256(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += ser_uint256(self.baseBlockHash)
|
|
|
|
r += ser_uint256(self.blockHash)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_getmnlistd(baseBlockHash=%064x, blockHash=%064x)" % (self.baseBlockHash, self.blockHash)
|
|
|
|
|
|
|
|
QuorumId = namedtuple('QuorumId', ['llmqType', 'quorumHash'])
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_mnlistdiff:
|
2023-07-10 18:23:09 +02:00
|
|
|
__slots__ = ("baseBlockHash", "blockHash", "merkleProof", "cbTx", "nVersion", "deletedMNs", "mnList", "deletedQuorums", "newQuorums", "quorumsCLSigs")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"mnlistdiff"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.baseBlockHash = 0
|
|
|
|
self.blockHash = 0
|
|
|
|
self.merkleProof = CPartialMerkleTree()
|
|
|
|
self.cbTx = None
|
2023-06-11 19:29:00 +02:00
|
|
|
self.nVersion = 0
|
2020-03-30 14:21:47 +02:00
|
|
|
self.deletedMNs = []
|
|
|
|
self.mnList = []
|
|
|
|
self.deletedQuorums = []
|
|
|
|
self.newQuorums = []
|
2023-07-10 18:23:09 +02:00
|
|
|
self.quorumsCLSigs = {}
|
|
|
|
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
2023-06-26 07:01:17 +02:00
|
|
|
self.nVersion = struct.unpack("<H", f.read(2))[0]
|
2020-03-30 14:21:47 +02:00
|
|
|
self.baseBlockHash = deser_uint256(f)
|
|
|
|
self.blockHash = deser_uint256(f)
|
|
|
|
self.merkleProof.deserialize(f)
|
|
|
|
self.cbTx = CTransaction()
|
|
|
|
self.cbTx.deserialize(f)
|
|
|
|
self.cbTx.rehash()
|
|
|
|
self.deletedMNs = deser_uint256_vector(f)
|
|
|
|
self.mnList = []
|
2020-08-11 02:50:34 +02:00
|
|
|
for _ in range(deser_compact_size(f)):
|
2020-03-30 14:21:47 +02:00
|
|
|
e = CSimplifiedMNListEntry()
|
2023-06-11 19:29:00 +02:00
|
|
|
e.deserialize(f)
|
2020-03-30 14:21:47 +02:00
|
|
|
self.mnList.append(e)
|
|
|
|
|
|
|
|
self.deletedQuorums = []
|
2020-08-11 02:50:34 +02:00
|
|
|
for _ in range(deser_compact_size(f)):
|
2020-03-30 14:21:47 +02:00
|
|
|
llmqType = struct.unpack("<B", f.read(1))[0]
|
|
|
|
quorumHash = deser_uint256(f)
|
|
|
|
self.deletedQuorums.append(QuorumId(llmqType, quorumHash))
|
|
|
|
self.newQuorums = []
|
2020-08-11 02:50:34 +02:00
|
|
|
for _ in range(deser_compact_size(f)):
|
2020-03-30 14:21:47 +02:00
|
|
|
qc = CFinalCommitment()
|
|
|
|
qc.deserialize(f)
|
|
|
|
self.newQuorums.append(qc)
|
2023-07-10 18:23:09 +02:00
|
|
|
self.quorumsCLSigs = {}
|
2020-08-11 02:50:34 +02:00
|
|
|
for _ in range(deser_compact_size(f)):
|
2023-07-10 18:23:09 +02:00
|
|
|
signature = f.read(96)
|
|
|
|
idx_set = set()
|
2020-08-11 02:50:34 +02:00
|
|
|
for _ in range(deser_compact_size(f)):
|
2023-07-10 18:23:09 +02:00
|
|
|
set_element = struct.unpack('H', f.read(2))[0]
|
|
|
|
idx_set.add(set_element)
|
|
|
|
self.quorumsCLSigs[signature] = idx_set
|
2020-03-30 14:21:47 +02:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_mnlistdiff(baseBlockHash=%064x, blockHash=%064x)" % (self.baseBlockHash, self.blockHash)
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_clsig:
|
|
|
|
__slots__ = ("height", "blockHash", "sig",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"clsig"
|
2020-03-30 14:21:47 +02:00
|
|
|
|
2022-09-19 11:04:20 +02:00
|
|
|
def __init__(self, height=0, blockHash=0, sig=b'\x00' * 96):
|
2020-03-30 14:21:47 +02:00
|
|
|
self.height = height
|
|
|
|
self.blockHash = blockHash
|
|
|
|
self.sig = sig
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.height = struct.unpack('<i', f.read(4))[0]
|
|
|
|
self.blockHash = deser_uint256(f)
|
|
|
|
self.sig = f.read(96)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack('<i', self.height)
|
|
|
|
r += ser_uint256(self.blockHash)
|
|
|
|
r += self.sig
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_clsig(height=%d, blockHash=%064x)" % (self.height, self.blockHash)
|
|
|
|
|
|
|
|
|
2021-10-05 19:42:34 +02:00
|
|
|
class msg_isdlock:
|
|
|
|
__slots__ = ("nVersion", "inputs", "txid", "cycleHash", "sig")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"isdlock"
|
2021-10-05 19:42:34 +02:00
|
|
|
|
Merge #16726: tests: Avoid common Python default parameter gotcha when mutable dict/list:s are used as default parameter values
e4f4ea47ebf7774fb6f445adde7bf7ea71fa05a1 lint: Catch use of [] or {} as default parameter values in Python functions (practicalswift)
25dd86715039586d92176eee16e9c6644d2547f0 Avoid using mutable default parameter values (practicalswift)
Pull request description:
Avoid common Python default parameter gotcha when mutable `dict`/`list`:s are used as default parameter values.
Examples of this gotcha caught during review:
* https://github.com/bitcoin/bitcoin/pull/16673#discussion_r317415261
* https://github.com/bitcoin/bitcoin/pull/14565#discussion_r241942304
Perhaps surprisingly this is how mutable list and dictionary default parameter values behave in Python:
```
>>> def f(i, j=[], k={}):
... j.append(i)
... k[i] = True
... return j, k
...
>>> f(1)
([1], {1: True})
>>> f(1)
([1, 1], {1: True})
>>> f(2)
([1, 1, 2], {1: True, 2: True})
```
In contrast to:
```
>>> def f(i, j=None, k=None):
... if j is None:
... j = []
... if k is None:
... k = {}
... j.append(i)
... k[i] = True
... return j, k
...
>>> f(1)
([1], {1: True})
>>> f(1)
([1], {1: True})
>>> f(2)
([2], {2: True})
```
The latter is typically the intended behaviour.
This PR fixes two instances of this and adds a check guarding against this gotcha going forward :-)
ACKs for top commit:
Sjors:
Oh Python... ACK e4f4ea47ebf7774fb6f445adde7bf7ea71fa05a1. Testing tip: swap the two commits.
Tree-SHA512: 56e14d24fc866211a20185c9fdb274ed046c3aed2dc0e07699e58b6f9fa3b79f6d0c880fb02d72b7fe5cc5eb7c0ff6da0ead33123344e1a872209370c2e49e3f
2019-08-28 19:34:22 +02:00
|
|
|
def __init__(self, nVersion=1, inputs=None, txid=0, cycleHash=0, sig=b'\x00' * 96):
|
2021-10-05 19:42:34 +02:00
|
|
|
self.nVersion = nVersion
|
Merge #16726: tests: Avoid common Python default parameter gotcha when mutable dict/list:s are used as default parameter values
e4f4ea47ebf7774fb6f445adde7bf7ea71fa05a1 lint: Catch use of [] or {} as default parameter values in Python functions (practicalswift)
25dd86715039586d92176eee16e9c6644d2547f0 Avoid using mutable default parameter values (practicalswift)
Pull request description:
Avoid common Python default parameter gotcha when mutable `dict`/`list`:s are used as default parameter values.
Examples of this gotcha caught during review:
* https://github.com/bitcoin/bitcoin/pull/16673#discussion_r317415261
* https://github.com/bitcoin/bitcoin/pull/14565#discussion_r241942304
Perhaps surprisingly this is how mutable list and dictionary default parameter values behave in Python:
```
>>> def f(i, j=[], k={}):
... j.append(i)
... k[i] = True
... return j, k
...
>>> f(1)
([1], {1: True})
>>> f(1)
([1, 1], {1: True})
>>> f(2)
([1, 1, 2], {1: True, 2: True})
```
In contrast to:
```
>>> def f(i, j=None, k=None):
... if j is None:
... j = []
... if k is None:
... k = {}
... j.append(i)
... k[i] = True
... return j, k
...
>>> f(1)
([1], {1: True})
>>> f(1)
([1], {1: True})
>>> f(2)
([2], {2: True})
```
The latter is typically the intended behaviour.
This PR fixes two instances of this and adds a check guarding against this gotcha going forward :-)
ACKs for top commit:
Sjors:
Oh Python... ACK e4f4ea47ebf7774fb6f445adde7bf7ea71fa05a1. Testing tip: swap the two commits.
Tree-SHA512: 56e14d24fc866211a20185c9fdb274ed046c3aed2dc0e07699e58b6f9fa3b79f6d0c880fb02d72b7fe5cc5eb7c0ff6da0ead33123344e1a872209370c2e49e3f
2019-08-28 19:34:22 +02:00
|
|
|
self.inputs = inputs if inputs is not None else []
|
2021-10-05 19:42:34 +02:00
|
|
|
self.txid = txid
|
|
|
|
self.cycleHash = cycleHash
|
|
|
|
self.sig = sig
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.nVersion = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.inputs = deser_vector(f, COutPoint)
|
|
|
|
self.txid = deser_uint256(f)
|
|
|
|
self.cycleHash = deser_uint256(f)
|
|
|
|
self.sig = f.read(96)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.nVersion)
|
|
|
|
r += ser_vector(self.inputs)
|
|
|
|
r += ser_uint256(self.txid)
|
|
|
|
r += ser_uint256(self.cycleHash)
|
|
|
|
r += self.sig
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_isdlock(nVersion=%d, inputs=%s, txid=%064x, cycleHash=%064x)" % \
|
|
|
|
(self.nVersion, repr(self.inputs), self.txid, self.cycleHash)
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_qsigshare:
|
|
|
|
__slots__ = ("sig_shares",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"qsigshare"
|
2021-01-14 20:53:26 +01:00
|
|
|
|
Merge #16726: tests: Avoid common Python default parameter gotcha when mutable dict/list:s are used as default parameter values
e4f4ea47ebf7774fb6f445adde7bf7ea71fa05a1 lint: Catch use of [] or {} as default parameter values in Python functions (practicalswift)
25dd86715039586d92176eee16e9c6644d2547f0 Avoid using mutable default parameter values (practicalswift)
Pull request description:
Avoid common Python default parameter gotcha when mutable `dict`/`list`:s are used as default parameter values.
Examples of this gotcha caught during review:
* https://github.com/bitcoin/bitcoin/pull/16673#discussion_r317415261
* https://github.com/bitcoin/bitcoin/pull/14565#discussion_r241942304
Perhaps surprisingly this is how mutable list and dictionary default parameter values behave in Python:
```
>>> def f(i, j=[], k={}):
... j.append(i)
... k[i] = True
... return j, k
...
>>> f(1)
([1], {1: True})
>>> f(1)
([1, 1], {1: True})
>>> f(2)
([1, 1, 2], {1: True, 2: True})
```
In contrast to:
```
>>> def f(i, j=None, k=None):
... if j is None:
... j = []
... if k is None:
... k = {}
... j.append(i)
... k[i] = True
... return j, k
...
>>> f(1)
([1], {1: True})
>>> f(1)
([1], {1: True})
>>> f(2)
([2], {2: True})
```
The latter is typically the intended behaviour.
This PR fixes two instances of this and adds a check guarding against this gotcha going forward :-)
ACKs for top commit:
Sjors:
Oh Python... ACK e4f4ea47ebf7774fb6f445adde7bf7ea71fa05a1. Testing tip: swap the two commits.
Tree-SHA512: 56e14d24fc866211a20185c9fdb274ed046c3aed2dc0e07699e58b6f9fa3b79f6d0c880fb02d72b7fe5cc5eb7c0ff6da0ead33123344e1a872209370c2e49e3f
2019-08-28 19:34:22 +02:00
|
|
|
def __init__(self, sig_shares=None):
|
|
|
|
self.sig_shares = sig_shares if sig_shares is not None else []
|
2021-01-14 20:53:26 +01:00
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.sig_shares = deser_vector(f, CSigShare)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += ser_vector(self.sig_shares)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_qsigshare(sigShares=%d)" % (len(self.sig_shares))
|
2021-01-28 23:33:18 +01:00
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_qwatch:
|
|
|
|
__slots__ = ()
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"qwatch"
|
2021-01-28 23:33:18 +01:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return b""
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_qwatch()"
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_qgetdata:
|
|
|
|
__slots__ = ("quorum_hash", "quorum_type", "data_mask", "protx_hash")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"qgetdata"
|
2021-01-28 23:33:18 +01:00
|
|
|
|
|
|
|
def __init__(self, quorum_hash=0, quorum_type=-1, data_mask=0, protx_hash=0):
|
|
|
|
self.quorum_hash = quorum_hash
|
|
|
|
self.quorum_type = quorum_type
|
|
|
|
self.data_mask = data_mask
|
|
|
|
self.protx_hash = protx_hash
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.quorum_type = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.quorum_hash = deser_uint256(f)
|
|
|
|
self.data_mask = struct.unpack("<H", f.read(2))[0]
|
|
|
|
self.protx_hash = deser_uint256(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.quorum_type)
|
|
|
|
r += ser_uint256(self.quorum_hash)
|
|
|
|
r += struct.pack("<H", self.data_mask)
|
|
|
|
r += ser_uint256(self.protx_hash)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_qgetdata(quorum_hash=%064x, quorum_type=%d, data_mask=%d, protx_hash=%064x)" % (
|
|
|
|
self.quorum_hash,
|
|
|
|
self.quorum_type,
|
|
|
|
self.data_mask,
|
|
|
|
self.protx_hash)
|
|
|
|
|
|
|
|
|
2018-09-27 17:06:40 +02:00
|
|
|
class msg_qdata:
|
|
|
|
__slots__ = ("quorum_hash", "quorum_type", "data_mask", "protx_hash", "error", "quorum_vvec", "enc_contributions",)
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"qdata"
|
2021-01-28 23:33:18 +01:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.quorum_type = 0
|
|
|
|
self.quorum_hash = 0
|
|
|
|
self.data_mask = 0
|
|
|
|
self.protx_hash = 0
|
|
|
|
self.error = 0
|
|
|
|
self.quorum_vvec = list()
|
|
|
|
self.enc_contributions = list()
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.quorum_type = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.quorum_hash = deser_uint256(f)
|
|
|
|
self.data_mask = struct.unpack("<H", f.read(2))[0]
|
|
|
|
self.protx_hash = deser_uint256(f)
|
|
|
|
self.error = struct.unpack("<B", f.read(1))[0]
|
|
|
|
if self.error == 0:
|
|
|
|
if self.data_mask & 0x01:
|
|
|
|
self.quorum_vvec = deser_vector(f, CBLSPublicKey)
|
|
|
|
if self.data_mask & 0x02:
|
|
|
|
self.enc_contributions = deser_vector(f, CBLSIESEncryptedSecretKey)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.quorum_type)
|
|
|
|
r += ser_uint256(self.quorum_hash)
|
|
|
|
r += struct.pack("<H", self.data_mask)
|
|
|
|
r += ser_uint256(self.protx_hash)
|
|
|
|
r += struct.pack("<B", self.error)
|
|
|
|
if self.error == 0:
|
|
|
|
if self.data_mask & 0x01:
|
|
|
|
r += ser_vector(self.quorum_vvec)
|
|
|
|
if self.data_mask & 0x02:
|
|
|
|
r += ser_vector(self.enc_contributions)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_qdata(error=%d, quorum_vvec=%d, enc_contributions=%d)" % (self.error, len(self.quorum_vvec),
|
|
|
|
len(self.enc_contributions))
|
2021-09-16 16:01:04 +02:00
|
|
|
|
|
|
|
class msg_getcfilters:
|
|
|
|
__slots__ = ("filter_type", "start_height", "stop_hash")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"getcfilters"
|
2021-09-16 16:01:04 +02:00
|
|
|
|
2022-05-18 19:08:44 +02:00
|
|
|
def __init__(self, filter_type=None, start_height=None, stop_hash=None):
|
2021-09-16 16:01:04 +02:00
|
|
|
self.filter_type = filter_type
|
|
|
|
self.start_height = start_height
|
|
|
|
self.stop_hash = stop_hash
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.filter_type = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.start_height = struct.unpack("<I", f.read(4))[0]
|
|
|
|
self.stop_hash = deser_uint256(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.filter_type)
|
|
|
|
r += struct.pack("<I", self.start_height)
|
|
|
|
r += ser_uint256(self.stop_hash)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_getcfilters(filter_type={:#x}, start_height={}, stop_hash={:x})".format(
|
|
|
|
self.filter_type, self.start_height, self.stop_hash)
|
|
|
|
|
|
|
|
class msg_cfilter:
|
|
|
|
__slots__ = ("filter_type", "block_hash", "filter_data")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"cfilter"
|
2021-09-16 16:01:04 +02:00
|
|
|
|
|
|
|
def __init__(self, filter_type=None, block_hash=None, filter_data=None):
|
|
|
|
self.filter_type = filter_type
|
|
|
|
self.block_hash = block_hash
|
|
|
|
self.filter_data = filter_data
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.filter_type = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.block_hash = deser_uint256(f)
|
|
|
|
self.filter_data = deser_string(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.filter_type)
|
|
|
|
r += ser_uint256(self.block_hash)
|
|
|
|
r += ser_string(self.filter_data)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_cfilter(filter_type={:#x}, block_hash={:x})".format(
|
|
|
|
self.filter_type, self.block_hash)
|
|
|
|
|
2021-09-16 15:58:52 +02:00
|
|
|
class msg_getcfheaders:
|
|
|
|
__slots__ = ("filter_type", "start_height", "stop_hash")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"getcfheaders"
|
2021-09-16 15:58:52 +02:00
|
|
|
|
2022-05-18 19:08:44 +02:00
|
|
|
def __init__(self, filter_type=None, start_height=None, stop_hash=None):
|
2021-09-16 15:58:52 +02:00
|
|
|
self.filter_type = filter_type
|
|
|
|
self.start_height = start_height
|
|
|
|
self.stop_hash = stop_hash
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.filter_type = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.start_height = struct.unpack("<I", f.read(4))[0]
|
|
|
|
self.stop_hash = deser_uint256(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.filter_type)
|
|
|
|
r += struct.pack("<I", self.start_height)
|
|
|
|
r += ser_uint256(self.stop_hash)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_getcfheaders(filter_type={:#x}, start_height={}, stop_hash={:x})".format(
|
|
|
|
self.filter_type, self.start_height, self.stop_hash)
|
|
|
|
|
|
|
|
class msg_cfheaders:
|
|
|
|
__slots__ = ("filter_type", "stop_hash", "prev_header", "hashes")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"cfheaders"
|
2021-09-16 15:58:52 +02:00
|
|
|
|
|
|
|
def __init__(self, filter_type=None, stop_hash=None, prev_header=None, hashes=None):
|
|
|
|
self.filter_type = filter_type
|
|
|
|
self.stop_hash = stop_hash
|
|
|
|
self.prev_header = prev_header
|
|
|
|
self.hashes = hashes
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.filter_type = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.stop_hash = deser_uint256(f)
|
|
|
|
self.prev_header = deser_uint256(f)
|
|
|
|
self.hashes = deser_uint256_vector(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.filter_type)
|
|
|
|
r += ser_uint256(self.stop_hash)
|
|
|
|
r += ser_uint256(self.prev_header)
|
|
|
|
r += ser_uint256_vector(self.hashes)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_cfheaders(filter_type={:#x}, stop_hash={:x})".format(
|
|
|
|
self.filter_type, self.stop_hash)
|
|
|
|
|
2021-09-19 06:31:43 +02:00
|
|
|
class msg_getcfcheckpt:
|
|
|
|
__slots__ = ("filter_type", "stop_hash")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"getcfcheckpt"
|
2021-09-19 06:31:43 +02:00
|
|
|
|
2022-05-18 19:08:44 +02:00
|
|
|
def __init__(self, filter_type=None, stop_hash=None):
|
2021-09-19 06:31:43 +02:00
|
|
|
self.filter_type = filter_type
|
|
|
|
self.stop_hash = stop_hash
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.filter_type = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.stop_hash = deser_uint256(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.filter_type)
|
|
|
|
r += ser_uint256(self.stop_hash)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_getcfcheckpt(filter_type={:#x}, stop_hash={:x})".format(
|
|
|
|
self.filter_type, self.stop_hash)
|
|
|
|
|
|
|
|
class msg_cfcheckpt:
|
|
|
|
__slots__ = ("filter_type", "stop_hash", "headers")
|
2023-08-17 19:32:05 +02:00
|
|
|
msgtype = b"cfcheckpt"
|
2021-09-19 06:31:43 +02:00
|
|
|
|
|
|
|
def __init__(self, filter_type=None, stop_hash=None, headers=None):
|
|
|
|
self.filter_type = filter_type
|
|
|
|
self.stop_hash = stop_hash
|
|
|
|
self.headers = headers
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.filter_type = struct.unpack("<B", f.read(1))[0]
|
|
|
|
self.stop_hash = deser_uint256(f)
|
|
|
|
self.headers = deser_uint256_vector(f)
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<B", self.filter_type)
|
|
|
|
r += ser_uint256(self.stop_hash)
|
|
|
|
r += ser_uint256_vector(self.headers)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "msg_cfcheckpt(filter_type={:#x}, stop_hash={:x})".format(
|
|
|
|
self.filter_type, self.stop_hash)
|
2024-10-20 11:22:25 +02:00
|
|
|
|
|
|
|
class msg_sendtxrcncl:
|
2022-11-08 11:18:40 +01:00
|
|
|
__slots__ = ("version", "salt")
|
2024-10-20 11:22:25 +02:00
|
|
|
msgtype = b"sendtxrcncl"
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.version = 0
|
|
|
|
self.salt = 0
|
|
|
|
|
|
|
|
def deserialize(self, f):
|
|
|
|
self.version = struct.unpack("<I", f.read(4))[0]
|
|
|
|
self.salt = struct.unpack("<Q", f.read(8))[0]
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
r = b""
|
|
|
|
r += struct.pack("<I", self.version)
|
|
|
|
r += struct.pack("<Q", self.salt)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def __repr__(self):
|
2022-11-08 11:18:40 +01:00
|
|
|
return "msg_sendtxrcncl(version=%lu, salt=%lu)" %\
|
|
|
|
(self.version, self.salt)
|