fix lint-python.sh after 11835 and 12295

Signed-off-by: Pasta <pasta@dashboost.org>
This commit is contained in:
Pasta 2020-06-13 20:58:03 -05:00 committed by pasta
parent 89cf527e57
commit b40dc8f1ec
33 changed files with 34 additions and 62 deletions

View File

@ -123,7 +123,6 @@ install:
- export HOST_SRC_DIR=$TRAVIS_BUILD_DIR
- export HOST_CACHE_DIR=$HOME/cache
- export TRAVIS_COMMIT_LOG=`git log --format=fuller -1`
- export PYTHON_DEBUG=1
- source ./ci/matrix.sh
- mkdir -p $HOST_CACHE_DIR/docker && mkdir -p $HOST_CACHE_DIR/ccache && mkdir -p $HOST_CACHE_DIR/depends && mkdir -p $HOST_CACHE_DIR/sdk-sources
# Keep this as it makes caching related debugging easier

View File

@ -15,6 +15,7 @@ RUN apt-get update && apt-get install $APT_ARGS python3-pip python3-setuptools &
# Python stuff
RUN pip3 install pyzmq # really needed?
RUN pip3 install jinja2
RUN pip3 install flake8
# dash_hash
RUN git clone https://github.com/dashpay/dash_hash

View File

@ -32,7 +32,7 @@ echo "Using socketevents mode: $SOCKETEVENTS"
EXTRA_ARGS="--dashd-arg=-socketevents=$SOCKETEVENTS"
set +e
./test/functional/test_runner.py --ci --coverage --failfast --nocleanup --tmpdir=$(pwd)/testdatadirs $PASS_ARGS $EXTRA_ARGS
./test/functional/test_runner.py --ci --combinedlogslen=4000 --coverage --failfast --nocleanup --tmpdir=$(pwd)/testdatadirs $PASS_ARGS $EXTRA_ARGS
RESULT=$?
set -e

View File

@ -1,10 +1,6 @@
#!/usr/bin/python
#
try:
import gdb
except ImportError as e:
raise ImportError("This script must be run in GDB: ", str(e))
import sys
import os
sys.path.append(os.getcwd())

View File

@ -1,10 +1,6 @@
#!/usr/bin/python
#
try:
import gdb
except ImportError as e:
raise ImportError("This script must be run in GDB: ", str(e))
import sys
import os
sys.path.append(os.getcwd())

View File

@ -71,7 +71,6 @@ import argparse
import difflib
import io
import re
import string
import subprocess
import sys

View File

@ -27,7 +27,7 @@
# E703 statement ends with a semicolon
# E714 test for object identity should be "is not"
# E721 do not compare types, use "isinstance()"
# E741 do not use variables named "l", "O", or "I"
# E741 do not use variables named "l", "O", or "I" # disabled
# E742 do not define classes named "l", "O", or "I"
# E743 do not define functions named "l", "O", or "I"
# F401 module imported but unused
@ -53,11 +53,11 @@
# F823 local variable name … referenced before assignment
# F831 duplicate argument name in function definition
# W292 no newline at end of file
# W504 line break after binary operator
# W504 line break after binary operator # disabled
# W601 .has_key() is deprecated, use "in"
# W602 deprecated form of raising exception
# W603 "<>" is deprecated, use "!="
# W604 backticks are deprecated, use "repr()"
# W605 invalid escape sequence "x"
# W605 invalid escape sequence "x" # disabled
flake8 --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E271,E272,E273,E274,E275,E304,E306,E502,E702,E703,E714,E721,E741,E742,E743,F401,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F822,F823,F831,W292,W504,W601,W602,W603,W604,W605 .
git ls-files "*.py" | xargs flake8 --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E271,E272,E273,E274,E275,E304,E306,E502,E702,E703,E714,E721,E742,E743,F401,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F822,F823,F831,W292,W601,W602,W603,W604 #,E741,W504,W605

View File

@ -13,7 +13,6 @@ import re
import os
import os.path
import sys
import hashlib
import dash_hash
import datetime
import time

View File

@ -20,7 +20,6 @@ import sys
import dns.resolver
import collections
import json
import time
import multiprocessing
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")

View File

@ -283,8 +283,8 @@ class AddressIndexTest(BitcoinTestFramework):
assert_equal(mempool[2]["txid"], memtxid2)
assert_equal(mempool[2]["index"], 1)
self.nodes[2].generate(1);
self.sync_all();
self.nodes[2].generate(1)
self.sync_all()
mempool2 = self.nodes[2].getaddressmempool({"addresses": [address3]})
assert_equal(len(mempool2), 0)

View File

@ -6,10 +6,8 @@
#
# Test deterministic masternodes
#
import sys
from test_framework.blocktools import create_block, create_coinbase, get_masternode_payment
from test_framework.mininode import CTransaction, ToHex, FromHex, CTxOut, COIN, CCbTx
from test_framework.mininode import CTransaction, ToHex, FromHex, COIN, CCbTx
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *

View File

@ -2,11 +2,9 @@
# Copyright (c) 2015-2020 The Dash Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from collections import namedtuple
from test_framework.mininode import *
from test_framework.test_framework import DashTestFramework
from test_framework.util import p2p_port, assert_equal, sync_blocks
from test_framework.util import assert_equal
'''
dip4-coinbasemerkleroots.py

View File

@ -63,6 +63,7 @@ class GetBlockTemplateLPTest(BitcoinTestFramework):
# min_relay_fee is fee per 1000 bytes, which should be more than enough.
(txid, txhex, fee) = random_transaction(self.nodes, Decimal("1.1"), min_relay_fee, Decimal("0.001"), 20)
# after one minute, every 10 seconds the mempool is probed, so in 80 seconds it should have returned
def check():
self.bump_mocktime(1)
return not thr.is_alive()

View File

@ -20,12 +20,11 @@ happened previously.
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (assert_raises_rpc_error, connect_nodes, sync_blocks, assert_equal, set_node_times)
from test_framework.util import (assert_raises_rpc_error, connect_nodes, assert_equal, set_node_times)
import collections
import enum
import itertools
import sys
Call = enum.Enum("Call", "single multi")
Data = enum.Enum("Data", "address pub priv")

View File

@ -17,7 +17,6 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes_bi,
sync_blocks,
)
class KeypoolRestoreTest(BitcoinTestFramework):

View File

@ -6,7 +6,7 @@
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, COIN
from test_framework.mininode import CTransaction
from io import BytesIO
def txFromHex(hexstring):

View File

@ -4,7 +4,6 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import DashTestFramework
from test_framework.util import *
'''
llmq-dkgerrors.py

View File

@ -5,11 +5,10 @@
import time
from decimal import Decimal
from test_framework import mininode
from test_framework.blocktools import get_masternode_payment, create_coinbase, create_block
from test_framework.mininode import *
from test_framework.test_framework import DashTestFramework
from test_framework.util import sync_blocks, sync_mempools, p2p_port, assert_raises_rpc_error
from test_framework.util import assert_raises_rpc_error
'''
llmq-is-cl-conflicts.py

View File

@ -5,8 +5,7 @@
from test_framework.mininode import *
from test_framework.test_framework import DashTestFramework
from test_framework.util import sync_blocks, set_node_times, \
isolate_node, reconnect_isolated_node
from test_framework.util import set_node_times, isolate_node, reconnect_isolated_node
'''
llmq-is-retroactive.py

View File

@ -11,7 +11,6 @@ if uploadtarget has been reached.
* Verify that the upload counters are reset after 24 hours.
"""
from collections import defaultdict
import time
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework

View File

@ -9,7 +9,7 @@ and that it responds to getdata requests for blocks correctly:
- send a block within 288 + 2 of the tip
- disconnect peers who request blocks older than that."""
from test_framework.messages import CInv, msg_getdata, NODE_BLOOM, NODE_NETWORK_LIMITED, msg_verack
from test_framework.mininode import NetworkThread, P2PInterface, wait_until, mininode_lock, network_thread_start, network_thread_join
from test_framework.mininode import P2PInterface, wait_until, mininode_lock, network_thread_start, network_thread_join
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, disconnect_nodes, connect_nodes_bi, sync_blocks

View File

@ -54,7 +54,6 @@ Node1 is unused in tests 3-7:
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import time
from test_framework.blocktools import create_block, create_coinbase, create_transaction
class AcceptBlockTest(BitcoinTestFramework):

View File

@ -7,8 +7,6 @@
If an stale block more than a month old or its header are requested by a peer,
the node should pretend that it does not have it to avoid fingerprinting.
"""
import threading
import time
from test_framework.blocktools import (create_block, create_coinbase)

View File

@ -5,7 +5,6 @@
"""Test block processing."""
import copy
import struct
import time
from test_framework.blocktools import create_block, create_coinbase, create_transaction, get_legacy_sigopcount_block
from test_framework.key import CECKey

View File

@ -5,7 +5,7 @@
from test_framework.mininode import *
from test_framework.test_framework import DashTestFramework
from test_framework.util import isolate_node, sync_mempools, reconnect_isolated_node, assert_equal, \
from test_framework.util import isolate_node, reconnect_isolated_node, assert_equal, \
assert_raises_rpc_error
'''

View File

@ -9,7 +9,6 @@ from test_framework.util import (
assert_equal,
connect_nodes_bi,
sync_chain,
sync_blocks,
)
def unidirectional_node_sync_via_rpc(node_src, node_dest):

View File

@ -13,7 +13,6 @@ from test_framework.util import (
)
import json
import os
import time
TESTSDIR = os.path.dirname(os.path.realpath(__file__))

View File

@ -103,7 +103,6 @@ from test_framework.mininode import (
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
sync_blocks,
wait_until,
)

View File

@ -15,8 +15,6 @@ from test_framework.util import (
assert_greater_than_or_equal,
connect_nodes,
satoshi_round,
sync_blocks,
sync_mempools,
)
# Construct 2 trivial P2SH's and the ScriptSigs that spend them

View File

@ -3,7 +3,6 @@
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import connect_nodes, wait_until

View File

@ -198,6 +198,7 @@ def main():
epilog='''
Help text and arguments for individual test script:''',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--combinedlogslen', '-c', type=int, default=0, help='print a combined log (of length n lines) from all test nodes and test framework to the console on failure.')
parser.add_argument('--coverage', action='store_true', help='generate a basic coverage report for the RPC interface')
parser.add_argument('--ci', action='store_true', help='Run checks and code that are usually only enabled in a continuous integration environment')
parser.add_argument('--exclude', '-x', help='specify a comma-separated-list of scripts to exclude.')
@ -303,9 +304,10 @@ def main():
args=passon_args,
failfast=args.failfast,
runs_ci=args.ci,
combined_logs_len=args.combinedlogslen,
)
def run_tests(*, test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_coverage=False, args=None, failfast=False, runs_ci):
def run_tests(*, test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_coverage=False, args=None, failfast=False, runs_ci, combined_logs_len=0):
args = args or []
# Warn if dashd is already running (unix only)
@ -373,14 +375,14 @@ def run_tests(*, test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_c
print("\n%s%s%s failed, Duration: %s s\n" % (BOLD[1], test_result.name, BOLD[0], test_result.time))
print(BOLD[1] + 'stdout:\n' + BOLD[0] + stdout + '\n')
print(BOLD[1] + 'stderr:\n' + BOLD[0] + stderr + '\n')
if os.getenv("PYTHON_DEBUG", "") and os.path.isdir(testdir):
# Print the logs on travis, so they are preserved when the vm is disposed
print('{}Combine the logs and print the last {} lines ...{}'.format(BOLD[1], 4000, BOLD[0]))
if combined_logs_len and os.path.isdir(testdir):
# Print the final `combinedlogslen` lines of the combined logs
print('{}Combine the logs and print the last {} lines ...{}'.format(BOLD[1], combined_logs_len, BOLD[0]))
print('\n============')
print('{}Combined log for {}:{}'.format(BOLD[1], testdir, BOLD[0]))
print('============\n')
combined_logs, _ = subprocess.Popen([sys.executable, os.path.join(tests_dir, 'combine_logs.py'), '-c', testdir], universal_newlines=True, stdout=subprocess.PIPE).communicate()
print("\n".join(deque(combined_logs.splitlines(), 4000)))
print("\n".join(deque(combined_logs.splitlines(), combined_logs_len)))
if failfast:
logging.debug("Early exiting after test failure")

View File

@ -75,8 +75,8 @@ class TxIndexTest(BitcoinTestFramework):
# Check verbose raw transaction results
verbose = self.nodes[3].getrawtransaction(txid, 1)
assert_equal(verbose["vout"][0]["valueSat"], 50000000000 - tx_fee_sat);
assert_equal(verbose["vout"][0]["value"] * 100000000, 50000000000 - tx_fee_sat);
assert_equal(verbose["vout"][0]["valueSat"], 50000000000 - tx_fee_sat)
assert_equal(verbose["vout"][0]["value"] * 100000000, 50000000000 - tx_fee_sat)
self.log.info("Passed")