2017-02-11 08:46:00 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# Copyright (c) 2013-2016 The Bitcoin Core developers
|
2016-09-19 18:54:19 +02:00
|
|
|
# Distributed under the MIT software license, see the accompanying
|
|
|
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
2013-01-30 04:17:56 +01:00
|
|
|
#
|
2019-12-10 16:18:44 +01:00
|
|
|
# Generate seeds.txt from "protx list valid 1"
|
2013-01-30 04:17:56 +01:00
|
|
|
#
|
|
|
|
|
2014-12-23 17:43:32 +01:00
|
|
|
NSEEDS=512
|
|
|
|
|
2018-02-05 16:39:26 +01:00
|
|
|
MAX_SEEDS_PER_ASN=4
|
2014-12-23 17:43:32 +01:00
|
|
|
|
|
|
|
# These are hosts that have been observed to be behaving strangely (e.g.
|
|
|
|
# aggressively connecting to every node).
|
2017-02-11 08:46:00 +01:00
|
|
|
SUSPICIOUS_HOSTS = {
|
|
|
|
}
|
2013-01-30 04:17:56 +01:00
|
|
|
|
|
|
|
import re
|
|
|
|
import sys
|
2014-12-23 17:43:32 +01:00
|
|
|
import dns.resolver
|
2015-06-25 07:53:15 +02:00
|
|
|
import collections
|
2018-02-05 16:39:26 +01:00
|
|
|
import json
|
|
|
|
import time
|
2018-11-10 17:00:24 +01:00
|
|
|
import multiprocessing
|
2014-12-23 17:43:32 +01:00
|
|
|
|
2015-06-25 07:53:15 +02:00
|
|
|
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
|
|
|
|
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
|
|
|
|
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
|
2014-12-23 17:43:32 +01:00
|
|
|
|
2019-12-10 16:18:44 +01:00
|
|
|
def parseip(ip):
|
|
|
|
m = PATTERN_IPV4.match(ip)
|
2015-06-23 21:31:47 +02:00
|
|
|
sortkey = None
|
|
|
|
ip = None
|
2014-12-23 17:43:32 +01:00
|
|
|
if m is None:
|
2019-12-10 16:18:44 +01:00
|
|
|
m = PATTERN_IPV6.match(ip)
|
2015-06-23 21:31:47 +02:00
|
|
|
if m is None:
|
2019-12-10 16:18:44 +01:00
|
|
|
m = PATTERN_ONION.match(ip)
|
2015-06-23 21:31:47 +02:00
|
|
|
if m is None:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
net = 'onion'
|
2015-06-25 07:53:15 +02:00
|
|
|
ipstr = sortkey = m.group(1)
|
|
|
|
port = int(m.group(2))
|
2015-06-23 21:31:47 +02:00
|
|
|
else:
|
|
|
|
net = 'ipv6'
|
|
|
|
if m.group(1) in ['::']: # Not interested in localhost
|
|
|
|
return None
|
2015-06-25 07:53:15 +02:00
|
|
|
ipstr = m.group(1)
|
|
|
|
sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
|
|
|
|
port = int(m.group(2))
|
2015-06-23 21:31:47 +02:00
|
|
|
else:
|
|
|
|
# Do IPv4 sanity check
|
|
|
|
ip = 0
|
|
|
|
for i in range(0,4):
|
|
|
|
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
|
|
|
|
return None
|
|
|
|
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
|
|
|
|
if ip == 0:
|
2014-12-23 17:43:32 +01:00
|
|
|
return None
|
2015-06-23 21:31:47 +02:00
|
|
|
net = 'ipv4'
|
|
|
|
sortkey = ip
|
2015-06-25 07:53:15 +02:00
|
|
|
ipstr = m.group(1)
|
|
|
|
port = int(m.group(6))
|
2018-02-05 16:39:26 +01:00
|
|
|
|
2014-12-23 17:43:32 +01:00
|
|
|
return {
|
2018-02-05 16:39:26 +01:00
|
|
|
"net": net,
|
|
|
|
"ip": ipstr,
|
|
|
|
"port": port,
|
|
|
|
"ipnum": ip,
|
|
|
|
"sortkey": sortkey
|
2014-12-23 17:43:32 +01:00
|
|
|
}
|
|
|
|
|
2019-12-10 16:18:44 +01:00
|
|
|
def filtermulticollateralhash(mns):
|
|
|
|
'''Filter out MNs sharing the same collateral hash'''
|
2015-06-25 07:53:15 +02:00
|
|
|
hist = collections.defaultdict(list)
|
2019-12-10 16:18:44 +01:00
|
|
|
for mn in mns:
|
|
|
|
hist[mn['collateralHash']].append(mn)
|
|
|
|
return [mn for mn in mns if len(hist[mn['collateralHash']]) == 1]
|
|
|
|
|
|
|
|
def filtermulticollateraladdress(mns):
|
|
|
|
'''Filter out MNs sharing the same collateral address'''
|
|
|
|
hist = collections.defaultdict(list)
|
|
|
|
for mn in mns:
|
|
|
|
hist[mn['collateralAddress']].append(mn)
|
|
|
|
return [mn for mn in mns if len(hist[mn['collateralAddress']]) == 1]
|
|
|
|
|
|
|
|
def filtermultipayoutaddress(mns):
|
|
|
|
'''Filter out MNs sharing the same payout address'''
|
|
|
|
hist = collections.defaultdict(list)
|
|
|
|
for mn in mns:
|
|
|
|
hist[mn['state']['payoutAddress']].append(mn)
|
|
|
|
return [mn for mn in mns if len(hist[mn['state']['payoutAddress']]) == 1]
|
2015-06-25 07:53:15 +02:00
|
|
|
|
2018-11-10 17:00:24 +01:00
|
|
|
def resolveasn(resolver, ip):
|
|
|
|
asn = int([x.to_text() for x in resolver.query('.'.join(reversed(ip.split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
|
|
|
|
return asn
|
|
|
|
|
2014-12-23 17:43:32 +01:00
|
|
|
# Based on Greg Maxwell's seed_filter.py
|
|
|
|
def filterbyasn(ips, max_per_asn, max_total):
|
2015-06-23 21:31:47 +02:00
|
|
|
# Sift out ips by type
|
|
|
|
ips_ipv4 = [ip for ip in ips if ip['net'] == 'ipv4']
|
|
|
|
ips_ipv6 = [ip for ip in ips if ip['net'] == 'ipv6']
|
|
|
|
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
|
|
|
|
|
2018-02-05 16:39:26 +01:00
|
|
|
my_resolver = dns.resolver.Resolver()
|
|
|
|
|
2018-11-10 17:00:24 +01:00
|
|
|
pool = multiprocessing.Pool(processes=16)
|
|
|
|
|
2018-02-05 16:39:26 +01:00
|
|
|
# OpenDNS servers
|
|
|
|
my_resolver.nameservers = ['208.67.222.222', '208.67.220.220']
|
|
|
|
|
2018-11-10 17:00:24 +01:00
|
|
|
# Resolve ASNs in parallel
|
|
|
|
asns = [pool.apply_async(resolveasn, args=(my_resolver, ip['ip'])) for ip in ips_ipv4]
|
|
|
|
|
2015-06-23 21:31:47 +02:00
|
|
|
# Filter IPv4 by ASN
|
2014-12-23 17:43:32 +01:00
|
|
|
result = []
|
|
|
|
asn_count = {}
|
2018-11-10 17:00:24 +01:00
|
|
|
for i in range(len(ips_ipv4)):
|
|
|
|
ip = ips_ipv4[i]
|
2014-12-23 17:43:32 +01:00
|
|
|
if len(result) == max_total:
|
|
|
|
break
|
|
|
|
try:
|
2018-11-10 17:00:24 +01:00
|
|
|
asn = asns[i].get()
|
2014-12-23 17:43:32 +01:00
|
|
|
if asn not in asn_count:
|
|
|
|
asn_count[asn] = 0
|
|
|
|
if asn_count[asn] == max_per_asn:
|
|
|
|
continue
|
|
|
|
asn_count[asn] += 1
|
|
|
|
result.append(ip)
|
|
|
|
except:
|
|
|
|
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
|
2015-06-23 21:31:47 +02:00
|
|
|
|
|
|
|
# TODO: filter IPv6 by ASN
|
|
|
|
|
|
|
|
# Add back non-IPv4
|
|
|
|
result.extend(ips_ipv6)
|
|
|
|
result.extend(ips_onion)
|
2014-12-23 17:43:32 +01:00
|
|
|
return result
|
2013-01-30 04:17:56 +01:00
|
|
|
|
|
|
|
def main():
|
2019-12-10 16:18:44 +01:00
|
|
|
# This expects a json as outputted by "protx list valid 1"
|
2018-11-10 17:00:24 +01:00
|
|
|
if len(sys.argv) > 1:
|
|
|
|
with open(sys.argv[1], 'r') as f:
|
2019-12-10 16:18:44 +01:00
|
|
|
mns = json.load(f)
|
2018-11-10 17:00:24 +01:00
|
|
|
else:
|
2019-12-10 16:18:44 +01:00
|
|
|
mns = json.load(sys.stdin)
|
|
|
|
|
|
|
|
# Skip PoSe banned MNs
|
|
|
|
mns = [mn for mn in mns if mn['state']['PoSeBanHeight'] == -1]
|
|
|
|
# Skip MNs with < 10000 confirmations
|
|
|
|
mns = [mn for mn in mns if mn['confirmations'] >= 10000]
|
|
|
|
# Filter out MNs which are definitely from the same person/operator
|
|
|
|
mns = filtermulticollateralhash(mns)
|
|
|
|
mns = filtermulticollateraladdress(mns)
|
|
|
|
mns = filtermultipayoutaddress(mns)
|
|
|
|
# Extract IPs
|
|
|
|
ips = [parseip(mn['state']['service']) for mn in mns]
|
2014-12-23 17:43:32 +01:00
|
|
|
# Look up ASNs and limit results, both per ASN and globally.
|
|
|
|
ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
|
|
|
|
# Sort the results by IP address (for deterministic output).
|
2015-06-23 21:31:47 +02:00
|
|
|
ips.sort(key=lambda x: (x['net'], x['sortkey']))
|
2013-01-30 04:17:56 +01:00
|
|
|
|
2014-12-23 17:43:32 +01:00
|
|
|
for ip in ips:
|
2015-06-25 07:53:15 +02:00
|
|
|
if ip['net'] == 'ipv6':
|
2017-02-11 08:46:00 +01:00
|
|
|
print('[%s]:%i' % (ip['ip'], ip['port']))
|
2015-06-25 07:53:15 +02:00
|
|
|
else:
|
2017-02-11 08:46:00 +01:00
|
|
|
print('%s:%i' % (ip['ip'], ip['port']))
|
2013-01-30 04:17:56 +01:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|