mirror of
https://github.com/dashpay/dash.git
synced 2024-12-24 11:32:46 +01:00
Merge #12987: tests/tools: Enable additional Python flake8 rules for automatic linting via Travis
643aad17fa
Enable additional flake8 rules (practicalswift)f020aca297
Minor Python cleanups to make flake8 pass with the new rules enabled (practicalswift) Pull request description: Enabled rules: ``` * E242: tab after ',' * E266: too many leading '#' for block comment * E401: multiple imports on one line * E402: module level import not at top of file * E701: multiple statements on one line (colon) * E901: SyntaxError: invalid syntax * E902: TokenError: EOF in multi-line string * F821: undefined name 'Foo' * W293: blank line contains whitespace * W606: 'async' and 'await' are reserved keywords starting with Python 3.7 ``` Note to reviewers: * In general we don't allow whitespace cleanups to existing code, but in order to allow for enabling Travis checking for these rules a few smaller whitespace cleanups had to made as part of this PR. * Use [this `?w=1` link](https://github.com/bitcoin/bitcoin/pull/12987/files?w=1) to show a diff without whitespace changes. Before this commit: ``` $ flake8 -qq --statistics --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E242,E266,E271,E272,E273,E274,E275,E304,E306,E401,E402,E502,E701,E702,E703,E714,E721,E741,E742,E743,F401,E901,E902,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F821,F822,F823,F831,F841,W292,W293,W504,W601,W602,W603,W604,W605,W606 . 5 E266 too many leading '#' for block comment 4 E401 multiple imports on one line 6 E402 module level import not at top of file 5 E701 multiple statements on one line (colon) 1 F812 list comprehension redefines 'n' from line 159 4 F821 undefined name 'ConnectionRefusedError' 28 W293 blank line contains whitespace ``` After this commit: ``` $ flake8 -qq --statistics --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E242,E266,E271,E272,E273,E274,E275,E304,E306,E401,E402,E502,E701,E702,E703,E714,E721,E741,E742,E743,F401,E901,E902,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F821,F822,F823,F831,F841,W292,W293,W504,W601,W602,W603,W604,W605,W606 . $ ``` Tree-SHA512: fc7d5e752298a50d4248afc620ee2c173135b4ca008e48e02913ac968e5a24a5fd5396926047ec62f1d580d537434ccae01f249bb2f3338fa59dc630bf97ca7a Signed-off-by: pasta <pasta@dashboost.org>
This commit is contained in:
parent
83726d286e
commit
17acd6b472
@ -3,9 +3,9 @@
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
sys.path.append(os.getcwd())
|
|
||||||
import stl_containers
|
import stl_containers
|
||||||
import simple_class_obj
|
import simple_class_obj
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
|
||||||
SIZE_OF_INT = 4
|
SIZE_OF_INT = 4
|
||||||
SIZE_OF_BOOL = 1
|
SIZE_OF_BOOL = 1
|
||||||
|
@ -9,8 +9,8 @@ import traceback
|
|||||||
import datetime
|
import datetime
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
sys.path.append(os.getcwd())
|
|
||||||
import common_helpers
|
import common_helpers
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
|
||||||
|
|
||||||
class LogSizeCommand (gdb.Command):
|
class LogSizeCommand (gdb.Command):
|
||||||
|
@ -3,8 +3,8 @@
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
sys.path.append(os.getcwd())
|
|
||||||
import common_helpers
|
import common_helpers
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
|
||||||
|
|
||||||
simple_types = ["CMasternode", "CMasternodeVerification",
|
simple_types = ["CMasternode", "CMasternodeVerification",
|
||||||
|
@ -7,8 +7,8 @@ except ImportError as e:
|
|||||||
raise ImportError("This script must be run in GDB: ", str(e))
|
raise ImportError("This script must be run in GDB: ", str(e))
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
sys.path.append(os.getcwd())
|
|
||||||
import common_helpers
|
import common_helpers
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
|
||||||
|
|
||||||
def find_type(orig, name):
|
def find_type(orig, name):
|
||||||
|
@ -8,8 +8,8 @@ except ImportError as e:
|
|||||||
import traceback
|
import traceback
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
sys.path.append(os.getcwd())
|
|
||||||
import common_helpers
|
import common_helpers
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
|
||||||
|
|
||||||
class UsedSizeCommand (gdb.Command):
|
class UsedSizeCommand (gdb.Command):
|
||||||
|
@ -288,7 +288,7 @@ Arguments:
|
|||||||
def report_cmd(argv):
|
def report_cmd(argv):
|
||||||
if len(argv) == 2:
|
if len(argv) == 2:
|
||||||
sys.exit(REPORT_USAGE)
|
sys.exit(REPORT_USAGE)
|
||||||
|
|
||||||
base_directory = argv[2]
|
base_directory = argv[2]
|
||||||
if not os.path.exists(base_directory):
|
if not os.path.exists(base_directory):
|
||||||
sys.exit("*** bad <base_directory>: %s" % base_directory)
|
sys.exit("*** bad <base_directory>: %s" % base_directory)
|
||||||
@ -446,7 +446,7 @@ def print_file_action_message(filename, action):
|
|||||||
def update_cmd(argv):
|
def update_cmd(argv):
|
||||||
if len(argv) != 3:
|
if len(argv) != 3:
|
||||||
sys.exit(UPDATE_USAGE)
|
sys.exit(UPDATE_USAGE)
|
||||||
|
|
||||||
base_directory = argv[2]
|
base_directory = argv[2]
|
||||||
if not os.path.exists(base_directory):
|
if not os.path.exists(base_directory):
|
||||||
sys.exit("*** bad base_directory: %s" % base_directory)
|
sys.exit("*** bad base_directory: %s" % base_directory)
|
||||||
@ -572,13 +572,13 @@ def insert_cmd(argv):
|
|||||||
_, extension = os.path.splitext(filename)
|
_, extension = os.path.splitext(filename)
|
||||||
if extension not in ['.h', '.cpp', '.cc', '.c', '.py']:
|
if extension not in ['.h', '.cpp', '.cc', '.c', '.py']:
|
||||||
sys.exit("*** cannot insert for file extension %s" % extension)
|
sys.exit("*** cannot insert for file extension %s" % extension)
|
||||||
|
|
||||||
if extension == '.py':
|
if extension == '.py':
|
||||||
style = 'python'
|
style = 'python'
|
||||||
else:
|
else:
|
||||||
style = 'cpp'
|
style = 'cpp'
|
||||||
exec_insert_header(filename, style)
|
exec_insert_header(filename, style)
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
# UI
|
# UI
|
||||||
################################################################################
|
################################################################################
|
||||||
|
@ -21,7 +21,8 @@ import argparse
|
|||||||
import hashlib
|
import hashlib
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import json,codecs
|
import json
|
||||||
|
import codecs
|
||||||
try:
|
try:
|
||||||
from urllib.request import Request,urlopen
|
from urllib.request import Request,urlopen
|
||||||
except:
|
except:
|
||||||
|
@ -15,6 +15,8 @@
|
|||||||
# E133 closing bracket is missing indentation
|
# E133 closing bracket is missing indentation
|
||||||
# E223 tab before operator
|
# E223 tab before operator
|
||||||
# E224 tab after operator
|
# E224 tab after operator
|
||||||
|
# E242 tab after ','
|
||||||
|
# E266 too many leading '#' for block comment
|
||||||
# E271 multiple spaces after keyword
|
# E271 multiple spaces after keyword
|
||||||
# E272 multiple spaces before keyword
|
# E272 multiple spaces before keyword
|
||||||
# E273 tab after keyword
|
# E273 tab after keyword
|
||||||
@ -22,7 +24,10 @@
|
|||||||
# E275 missing whitespace after keyword
|
# E275 missing whitespace after keyword
|
||||||
# E304 blank lines found after function decorator
|
# E304 blank lines found after function decorator
|
||||||
# E306 expected 1 blank line before a nested definition
|
# E306 expected 1 blank line before a nested definition
|
||||||
|
# E401 multiple imports on one line
|
||||||
|
# E402 module level import not at top of file
|
||||||
# E502 the backslash is redundant between brackets
|
# E502 the backslash is redundant between brackets
|
||||||
|
# E701 multiple statements on one line (colon)
|
||||||
# E702 multiple statements on one line (semicolon)
|
# E702 multiple statements on one line (semicolon)
|
||||||
# E703 statement ends with a semicolon
|
# E703 statement ends with a semicolon
|
||||||
# E714 test for object identity should be "is not"
|
# E714 test for object identity should be "is not"
|
||||||
@ -30,6 +35,8 @@
|
|||||||
# E741 do not use variables named "l", "O", or "I" # disabled
|
# E741 do not use variables named "l", "O", or "I" # disabled
|
||||||
# E742 do not define classes named "l", "O", or "I"
|
# E742 do not define classes named "l", "O", or "I"
|
||||||
# E743 do not define functions named "l", "O", or "I"
|
# E743 do not define functions named "l", "O", or "I"
|
||||||
|
# E901 SyntaxError: invalid syntax
|
||||||
|
# E902 TokenError: EOF in multi-line string
|
||||||
# F401 module imported but unused
|
# F401 module imported but unused
|
||||||
# F402 import module from line N shadowed by loop variable
|
# F402 import module from line N shadowed by loop variable
|
||||||
# F404 future import(s) name after other statements
|
# F404 future import(s) name after other statements
|
||||||
@ -49,15 +56,18 @@
|
|||||||
# F707 an except: block as not the last exception handler
|
# F707 an except: block as not the last exception handler
|
||||||
# F811 redefinition of unused name from line N
|
# F811 redefinition of unused name from line N
|
||||||
# F812 list comprehension redefines 'foo' from line N
|
# F812 list comprehension redefines 'foo' from line N
|
||||||
|
# F821 undefined name 'Foo'
|
||||||
# F822 undefined name name in __all__
|
# F822 undefined name name in __all__
|
||||||
# F823 local variable name … referenced before assignment
|
# F823 local variable name … referenced before assignment
|
||||||
# F831 duplicate argument name in function definition
|
# F831 duplicate argument name in function definition
|
||||||
# W292 no newline at end of file
|
# W292 no newline at end of file
|
||||||
|
# W293 blank line contains whitespace
|
||||||
# W504 line break after binary operator # disabled
|
# W504 line break after binary operator # disabled
|
||||||
# W601 .has_key() is deprecated, use "in"
|
# W601 .has_key() is deprecated, use "in"
|
||||||
# W602 deprecated form of raising exception
|
# W602 deprecated form of raising exception
|
||||||
# W603 "<>" is deprecated, use "!="
|
# W603 "<>" is deprecated, use "!="
|
||||||
# W604 backticks are deprecated, use "repr()"
|
# W604 backticks are deprecated, use "repr()"
|
||||||
# W605 invalid escape sequence "x" # disabled
|
# W605 invalid escape sequence "x" # disabled
|
||||||
|
# W606 'async' and 'await' are reserved keywords starting with Python 3.7
|
||||||
|
|
||||||
git ls-files "*.py" | xargs flake8 --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E271,E272,E273,E274,E275,E304,E306,E502,E702,E703,E714,E721,E742,E743,F401,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F822,F823,F831,W292,W601,W602,W603,W604 #,E741,W504,W605
|
git ls-files "*.py" | xargs flake8 --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E242,E266,E271,E272,E273,E274,E275,E304,E306,E401,E402,E502,E701,E702,E703,E714,E721,E742,E743,F401,E901,E902,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F821,F822,F823,F831,W292,W293,W601,W602,W603,W604,W606 #,E741,W504,W605
|
@ -46,19 +46,19 @@ for folder in folders:
|
|||||||
file_path = os.path.join(absFolder, file)
|
file_path = os.path.join(absFolder, file)
|
||||||
fileMetaMap = {'file' : file, 'osize': os.path.getsize(file_path), 'sha256Old' : file_hash(file_path)}
|
fileMetaMap = {'file' : file, 'osize': os.path.getsize(file_path), 'sha256Old' : file_hash(file_path)}
|
||||||
fileMetaMap['contentHashPre'] = content_hash(file_path)
|
fileMetaMap['contentHashPre'] = content_hash(file_path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
subprocess.call([pngcrush, "-brute", "-ow", "-rem", "gAMA", "-rem", "cHRM", "-rem", "iCCP", "-rem", "sRGB", "-rem", "alla", "-rem", "text", file_path],
|
subprocess.call([pngcrush, "-brute", "-ow", "-rem", "gAMA", "-rem", "cHRM", "-rem", "iCCP", "-rem", "sRGB", "-rem", "alla", "-rem", "text", file_path],
|
||||||
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
except:
|
except:
|
||||||
print("pngcrush is not installed, aborting...")
|
print("pngcrush is not installed, aborting...")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
#verify
|
#verify
|
||||||
if "Not a PNG file" in subprocess.check_output([pngcrush, "-n", "-v", file_path], stderr=subprocess.STDOUT, universal_newlines=True):
|
if "Not a PNG file" in subprocess.check_output([pngcrush, "-n", "-v", file_path], stderr=subprocess.STDOUT, universal_newlines=True):
|
||||||
print("PNG file "+file+" is corrupted after crushing, check out pngcursh version")
|
print("PNG file "+file+" is corrupted after crushing, check out pngcursh version")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
fileMetaMap['sha256New'] = file_hash(file_path)
|
fileMetaMap['sha256New'] = file_hash(file_path)
|
||||||
fileMetaMap['contentHashPost'] = content_hash(file_path)
|
fileMetaMap['contentHashPost'] = content_hash(file_path)
|
||||||
|
|
||||||
@ -77,5 +77,5 @@ for fileDict in outputArray:
|
|||||||
totalSaveBytes += fileDict['osize'] - fileDict['psize']
|
totalSaveBytes += fileDict['osize'] - fileDict['psize']
|
||||||
noHashChange = noHashChange and (oldHash == newHash)
|
noHashChange = noHashChange and (oldHash == newHash)
|
||||||
print(fileDict['file']+"\n size diff from: "+str(fileDict['osize'])+" to: "+str(fileDict['psize'])+"\n old sha256: "+oldHash+"\n new sha256: "+newHash+"\n")
|
print(fileDict['file']+"\n size diff from: "+str(fileDict['osize'])+" to: "+str(fileDict['psize'])+"\n old sha256: "+oldHash+"\n new sha256: "+newHash+"\n")
|
||||||
|
|
||||||
print("completed. Checksum stable: "+str(noHashChange)+". Total reduction: "+str(totalSaveBytes)+" bytes")
|
print("completed. Checksum stable: "+str(noHashChange)+". Total reduction: "+str(totalSaveBytes)+" bytes")
|
||||||
|
@ -21,7 +21,6 @@ from binascii import hexlify, unhexlify
|
|||||||
|
|
||||||
settings = {}
|
settings = {}
|
||||||
|
|
||||||
##### Switch endian-ness #####
|
|
||||||
def hex_switchEndian(s):
|
def hex_switchEndian(s):
|
||||||
""" Switches the endianness of a hex string (in pairs of hex chars) """
|
""" Switches the endianness of a hex string (in pairs of hex chars) """
|
||||||
pairList = [s[i:i+2].encode() for i in range(0, len(s), 2)]
|
pairList = [s[i:i+2].encode() for i in range(0, len(s), 2)]
|
||||||
|
@ -21,7 +21,6 @@ import os.path
|
|||||||
|
|
||||||
settings = {}
|
settings = {}
|
||||||
|
|
||||||
##### Switch endian-ness #####
|
|
||||||
def hex_switchEndian(s):
|
def hex_switchEndian(s):
|
||||||
""" Switches the endianness of a hex string (in pairs of hex chars) """
|
""" Switches the endianness of a hex string (in pairs of hex chars) """
|
||||||
pairList = [s[i:i+2].encode() for i in range(0, len(s), 2)]
|
pairList = [s[i:i+2].encode() for i in range(0, len(s), 2)]
|
||||||
|
@ -34,7 +34,8 @@ These should be pasted into `src/chainparamsseeds.h`.
|
|||||||
|
|
||||||
from base64 import b32decode
|
from base64 import b32decode
|
||||||
from binascii import a2b_hex
|
from binascii import a2b_hex
|
||||||
import sys, os
|
import sys
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
# ipv4 in ipv6 prefix
|
# ipv4 in ipv6 prefix
|
||||||
@ -46,7 +47,7 @@ def name_to_ipv6(addr):
|
|||||||
if len(addr)>6 and addr.endswith('.onion'):
|
if len(addr)>6 and addr.endswith('.onion'):
|
||||||
vchAddr = b32decode(addr[0:-6], True)
|
vchAddr = b32decode(addr[0:-6], True)
|
||||||
if len(vchAddr) != 16-len(pchOnionCat):
|
if len(vchAddr) != 16-len(pchOnionCat):
|
||||||
raise ValueError('Invalid onion %s' % s)
|
raise ValueError('Invalid onion %s' % vchAddr)
|
||||||
return pchOnionCat + vchAddr
|
return pchOnionCat + vchAddr
|
||||||
elif '.' in addr: # IPv4
|
elif '.' in addr: # IPv4
|
||||||
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
|
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
|
||||||
|
@ -6,6 +6,13 @@
|
|||||||
# Generate seeds.txt from "protx list valid 1"
|
# Generate seeds.txt from "protx list valid 1"
|
||||||
#
|
#
|
||||||
|
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import dns.resolver
|
||||||
|
import collections
|
||||||
|
import json
|
||||||
|
import multiprocessing
|
||||||
|
|
||||||
NSEEDS=512
|
NSEEDS=512
|
||||||
|
|
||||||
MAX_SEEDS_PER_ASN=4
|
MAX_SEEDS_PER_ASN=4
|
||||||
@ -15,13 +22,6 @@ MAX_SEEDS_PER_ASN=4
|
|||||||
SUSPICIOUS_HOSTS = {
|
SUSPICIOUS_HOSTS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import dns.resolver
|
|
||||||
import collections
|
|
||||||
import json
|
|
||||||
import multiprocessing
|
|
||||||
|
|
||||||
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
|
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
|
||||||
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
|
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
|
||||||
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
|
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
|
||||||
|
@ -43,8 +43,10 @@ def b58encode(v):
|
|||||||
# leading 0-bytes in the input become leading-1s
|
# leading 0-bytes in the input become leading-1s
|
||||||
nPad = 0
|
nPad = 0
|
||||||
for c in v:
|
for c in v:
|
||||||
if c == 0: nPad += 1
|
if c == 0:
|
||||||
else: break
|
nPad += 1
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
return (__b58chars[0]*nPad) + result
|
return (__b58chars[0]*nPad) + result
|
||||||
|
|
||||||
@ -98,7 +100,8 @@ def b58decode_chk(v):
|
|||||||
def get_bcaddress_version(strAddress):
|
def get_bcaddress_version(strAddress):
|
||||||
""" Returns None if strAddress is invalid. Otherwise returns integer version of address. """
|
""" Returns None if strAddress is invalid. Otherwise returns integer version of address. """
|
||||||
addr = b58decode_chk(strAddress)
|
addr = b58decode_chk(strAddress)
|
||||||
if addr is None or len(addr)!=21: return None
|
if addr is None or len(addr)!=21:
|
||||||
|
return None
|
||||||
version = addr[0]
|
version = addr[0]
|
||||||
return ord(version)
|
return ord(version)
|
||||||
|
|
||||||
|
@ -74,12 +74,12 @@ def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt
|
|||||||
prefix = os.urandom(1)
|
prefix = os.urandom(1)
|
||||||
else:
|
else:
|
||||||
prefix = bytearray(template[0])
|
prefix = bytearray(template[0])
|
||||||
|
|
||||||
if randomize_payload_size:
|
if randomize_payload_size:
|
||||||
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
|
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
|
||||||
else:
|
else:
|
||||||
payload = os.urandom(template[1])
|
payload = os.urandom(template[1])
|
||||||
|
|
||||||
if corrupt_suffix:
|
if corrupt_suffix:
|
||||||
suffix = os.urandom(len(template[2]))
|
suffix = os.urandom(len(template[2]))
|
||||||
else:
|
else:
|
||||||
@ -114,7 +114,8 @@ def gen_invalid_vectors():
|
|||||||
yield val,
|
yield val,
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import sys, json
|
import sys
|
||||||
|
import json
|
||||||
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
|
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
|
||||||
try:
|
try:
|
||||||
uiter = iters[sys.argv[1]]
|
uiter = iters[sys.argv[1]]
|
||||||
@ -124,7 +125,7 @@ if __name__ == '__main__':
|
|||||||
count = int(sys.argv[2])
|
count = int(sys.argv[2])
|
||||||
except IndexError:
|
except IndexError:
|
||||||
count = 0
|
count = 0
|
||||||
|
|
||||||
data = list(islice(uiter(), count))
|
data = list(islice(uiter(), count))
|
||||||
json.dump(data, sys.stdout, sort_keys=True, indent=4)
|
json.dump(data, sys.stdout, sort_keys=True, indent=4)
|
||||||
sys.stdout.write('\n')
|
sys.stdout.write('\n')
|
||||||
|
@ -25,7 +25,7 @@ salt = "".join([x[2:] for x in hexseq])
|
|||||||
|
|
||||||
#Create 32 byte b64 password
|
#Create 32 byte b64 password
|
||||||
password = base64.urlsafe_b64encode(os.urandom(32)).decode("utf-8")
|
password = base64.urlsafe_b64encode(os.urandom(32)).decode("utf-8")
|
||||||
|
|
||||||
m = hmac.new(bytearray(salt, 'utf-8'), bytearray(password, 'utf-8'), "SHA256")
|
m = hmac.new(bytearray(salt, 'utf-8'), bytearray(password, 'utf-8'), "SHA256")
|
||||||
result = m.hexdigest()
|
result = m.hexdigest()
|
||||||
|
|
||||||
|
@ -129,7 +129,7 @@ class BIP68Test(BitcoinTestFramework):
|
|||||||
|
|
||||||
# Track whether any sequence locks used should fail
|
# Track whether any sequence locks used should fail
|
||||||
should_pass = True
|
should_pass = True
|
||||||
|
|
||||||
# Track whether this transaction was built with sequence locks
|
# Track whether this transaction was built with sequence locks
|
||||||
using_sequence_locks = False
|
using_sequence_locks = False
|
||||||
|
|
||||||
@ -343,7 +343,7 @@ class BIP68Test(BitcoinTestFramework):
|
|||||||
tx2.rehash()
|
tx2.rehash()
|
||||||
|
|
||||||
self.nodes[0].sendrawtransaction(ToHex(tx2))
|
self.nodes[0].sendrawtransaction(ToHex(tx2))
|
||||||
|
|
||||||
# Now make an invalid spend of tx2 according to BIP68
|
# Now make an invalid spend of tx2 according to BIP68
|
||||||
sequence_value = 100 # 100 block relative locktime
|
sequence_value = 100 # 100 block relative locktime
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ class TestP2PConn(P2PInterface):
|
|||||||
self.block_receive_map[message.block.sha256] += 1
|
self.block_receive_map[message.block.sha256] += 1
|
||||||
|
|
||||||
class MaxUploadTest(BitcoinTestFramework):
|
class MaxUploadTest(BitcoinTestFramework):
|
||||||
|
|
||||||
def set_test_params(self):
|
def set_test_params(self):
|
||||||
self.setup_clean_chain = True
|
self.setup_clean_chain = True
|
||||||
self.num_nodes = 1
|
self.num_nodes = 1
|
||||||
|
@ -182,7 +182,7 @@ class ProxyTest(BitcoinTestFramework):
|
|||||||
assert_equal(n1['onion']['proxy'], '%s:%i' % (self.conf2.addr))
|
assert_equal(n1['onion']['proxy'], '%s:%i' % (self.conf2.addr))
|
||||||
assert_equal(n1['onion']['proxy_randomize_credentials'], False)
|
assert_equal(n1['onion']['proxy_randomize_credentials'], False)
|
||||||
assert_equal(n1['onion']['reachable'], True)
|
assert_equal(n1['onion']['reachable'], True)
|
||||||
|
|
||||||
n2 = networks_dict(self.nodes[2].getnetworkinfo())
|
n2 = networks_dict(self.nodes[2].getnetworkinfo())
|
||||||
for net in ['ipv4','ipv6','onion']:
|
for net in ['ipv4','ipv6','onion']:
|
||||||
assert_equal(n2[net]['proxy'], '%s:%i' % (self.conf2.addr))
|
assert_equal(n2[net]['proxy'], '%s:%i' % (self.conf2.addr))
|
||||||
|
@ -30,6 +30,6 @@ class P2PMempoolTests(BitcoinTestFramework):
|
|||||||
|
|
||||||
#mininode must be disconnected at this point
|
#mininode must be disconnected at this point
|
||||||
assert_equal(len(self.nodes[0].getpeerinfo()), 0)
|
assert_equal(len(self.nodes[0].getpeerinfo()), 0)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
P2PMempoolTests().main()
|
P2PMempoolTests().main()
|
||||||
|
@ -54,7 +54,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
|||||||
resp = conn.getresponse()
|
resp = conn.getresponse()
|
||||||
assert_equal(resp.status, 200)
|
assert_equal(resp.status, 200)
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
#Use new authpair to confirm both work
|
#Use new authpair to confirm both work
|
||||||
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
|
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
|
||||||
|
|
||||||
|
@ -4,12 +4,14 @@
|
|||||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||||
"""Dummy Socks5 server for testing."""
|
"""Dummy Socks5 server for testing."""
|
||||||
|
|
||||||
import socket, threading, queue
|
import socket
|
||||||
|
import threading
|
||||||
|
import queue
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logger = logging.getLogger("TestFramework.socks5")
|
logger = logging.getLogger("TestFramework.socks5")
|
||||||
|
|
||||||
### Protocol constants
|
# Protocol constants
|
||||||
class Command:
|
class Command:
|
||||||
CONNECT = 0x01
|
CONNECT = 0x01
|
||||||
|
|
||||||
@ -18,7 +20,7 @@ class AddressType:
|
|||||||
DOMAINNAME = 0x03
|
DOMAINNAME = 0x03
|
||||||
IPV6 = 0x04
|
IPV6 = 0x04
|
||||||
|
|
||||||
### Utility functions
|
# Utility functions
|
||||||
def recvall(s, n):
|
def recvall(s, n):
|
||||||
"""Receive n bytes from a socket, or fail."""
|
"""Receive n bytes from a socket, or fail."""
|
||||||
rv = bytearray()
|
rv = bytearray()
|
||||||
@ -30,7 +32,7 @@ def recvall(s, n):
|
|||||||
n -= len(d)
|
n -= len(d)
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
### Implementation classes
|
# Implementation classes
|
||||||
class Socks5Configuration():
|
class Socks5Configuration():
|
||||||
"""Proxy configuration."""
|
"""Proxy configuration."""
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -141,7 +143,7 @@ class Socks5Server():
|
|||||||
thread = threading.Thread(None, conn.handle)
|
thread = threading.Thread(None, conn.handle)
|
||||||
thread.daemon = True
|
thread.daemon = True
|
||||||
thread.start()
|
thread.start()
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
assert(not self.running)
|
assert(not self.running)
|
||||||
self.running = True
|
self.running = True
|
||||||
|
@ -114,7 +114,7 @@ class WalletAccountsTest(BitcoinTestFramework):
|
|||||||
expected_account_balances[account.name] = 0
|
expected_account_balances[account.name] = 0
|
||||||
assert_equal(node.listaccounts(), expected_account_balances)
|
assert_equal(node.listaccounts(), expected_account_balances)
|
||||||
assert_equal(node.getbalance(""), 52000)
|
assert_equal(node.getbalance(""), 52000)
|
||||||
|
|
||||||
# Check that setaccount can assign an account to a new unused address.
|
# Check that setaccount can assign an account to a new unused address.
|
||||||
for account in accounts:
|
for account in accounts:
|
||||||
address = node.getaccountaddress("")
|
address = node.getaccountaddress("")
|
||||||
@ -122,7 +122,7 @@ class WalletAccountsTest(BitcoinTestFramework):
|
|||||||
account.add_address(address)
|
account.add_address(address)
|
||||||
account.verify(node)
|
account.verify(node)
|
||||||
assert(address not in node.getaddressesbyaccount(""))
|
assert(address not in node.getaddressesbyaccount(""))
|
||||||
|
|
||||||
# Check that addmultisigaddress can assign accounts.
|
# Check that addmultisigaddress can assign accounts.
|
||||||
for account in accounts:
|
for account in accounts:
|
||||||
addresses = []
|
addresses = []
|
||||||
|
@ -16,7 +16,7 @@ class ImportPrunedFundsTest(BitcoinTestFramework):
|
|||||||
self.nodes[0].generate(101)
|
self.nodes[0].generate(101)
|
||||||
|
|
||||||
self.sync_all()
|
self.sync_all()
|
||||||
|
|
||||||
# address
|
# address
|
||||||
address1 = self.nodes[0].getnewaddress()
|
address1 = self.nodes[0].getnewaddress()
|
||||||
# pubkey
|
# pubkey
|
||||||
|
@ -85,7 +85,8 @@ class TxnMallTest(BitcoinTestFramework):
|
|||||||
# Node0's balance should be starting balance, plus 500DASH for another
|
# Node0's balance should be starting balance, plus 500DASH for another
|
||||||
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
|
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
|
||||||
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
|
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
|
||||||
if self.options.mine_block: expected += 500
|
if self.options.mine_block:
|
||||||
|
expected += 500
|
||||||
expected += tx1["amount"] + tx1["fee"]
|
expected += tx1["amount"] + tx1["fee"]
|
||||||
expected += tx2["amount"] + tx2["fee"]
|
expected += tx2["amount"] + tx2["fee"]
|
||||||
assert_equal(self.nodes[0].getbalance(), expected)
|
assert_equal(self.nodes[0].getbalance(), expected)
|
||||||
@ -120,7 +121,7 @@ class TxnMallTest(BitcoinTestFramework):
|
|||||||
tx1 = self.nodes[0].gettransaction(txid1)
|
tx1 = self.nodes[0].gettransaction(txid1)
|
||||||
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
|
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
|
||||||
tx2 = self.nodes[0].gettransaction(txid2)
|
tx2 = self.nodes[0].gettransaction(txid2)
|
||||||
|
|
||||||
# Verify expected confirmations
|
# Verify expected confirmations
|
||||||
assert_equal(tx1["confirmations"], -2)
|
assert_equal(tx1["confirmations"], -2)
|
||||||
assert_equal(tx1_clone["confirmations"], 2)
|
assert_equal(tx1_clone["confirmations"], 2)
|
||||||
|
@ -27,7 +27,7 @@ class TxnMallTest(BitcoinTestFramework):
|
|||||||
for i in range(4):
|
for i in range(4):
|
||||||
assert_equal(self.nodes[i].getbalance(), starting_balance)
|
assert_equal(self.nodes[i].getbalance(), starting_balance)
|
||||||
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
|
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
|
||||||
|
|
||||||
# Assign coins to foo and bar accounts:
|
# Assign coins to foo and bar accounts:
|
||||||
node0_address_foo = self.nodes[0].getnewaddress("foo")
|
node0_address_foo = self.nodes[0].getnewaddress("foo")
|
||||||
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 12190)
|
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 12190)
|
||||||
@ -64,7 +64,7 @@ class TxnMallTest(BitcoinTestFramework):
|
|||||||
# Create two spends using 1 500 DASH coin each
|
# Create two spends using 1 500 DASH coin each
|
||||||
txid1 = self.nodes[0].sendfrom("foo", node1_address, 400, 0)
|
txid1 = self.nodes[0].sendfrom("foo", node1_address, 400, 0)
|
||||||
txid2 = self.nodes[0].sendfrom("bar", node1_address, 200, 0)
|
txid2 = self.nodes[0].sendfrom("bar", node1_address, 200, 0)
|
||||||
|
|
||||||
# Have node0 mine a block:
|
# Have node0 mine a block:
|
||||||
if (self.options.mine_block):
|
if (self.options.mine_block):
|
||||||
self.nodes[0].generate(1)
|
self.nodes[0].generate(1)
|
||||||
@ -76,7 +76,8 @@ class TxnMallTest(BitcoinTestFramework):
|
|||||||
# Node0's balance should be starting balance, plus 500DASH for another
|
# Node0's balance should be starting balance, plus 500DASH for another
|
||||||
# matured block, minus 400, minus 200, and minus transaction fees:
|
# matured block, minus 400, minus 200, and minus transaction fees:
|
||||||
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
|
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
|
||||||
if self.options.mine_block: expected += 500
|
if self.options.mine_block:
|
||||||
|
expected += 500
|
||||||
expected += tx1["amount"] + tx1["fee"]
|
expected += tx1["amount"] + tx1["fee"]
|
||||||
expected += tx2["amount"] + tx2["fee"]
|
expected += tx2["amount"] + tx2["fee"]
|
||||||
assert_equal(self.nodes[0].getbalance(), expected)
|
assert_equal(self.nodes[0].getbalance(), expected)
|
||||||
@ -93,7 +94,7 @@ class TxnMallTest(BitcoinTestFramework):
|
|||||||
else:
|
else:
|
||||||
assert_equal(tx1["confirmations"], 0)
|
assert_equal(tx1["confirmations"], 0)
|
||||||
assert_equal(tx2["confirmations"], 0)
|
assert_equal(tx2["confirmations"], 0)
|
||||||
|
|
||||||
# Now give doublespend and its parents to miner:
|
# Now give doublespend and its parents to miner:
|
||||||
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
|
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
|
||||||
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
|
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
|
||||||
|
Loading…
Reference in New Issue
Block a user