fix linearization tool to make it work with Dash

This commit is contained in:
UdjinM6 2015-09-09 05:09:12 +03:00 committed by Holger Schinzel
parent 9d7fccf6a7
commit a31df6e4ae
4 changed files with 48 additions and 20 deletions

View File

@ -1,6 +1,10 @@
# Linearize # Linearize
Construct a linear, no-fork, best version of the blockchain. Construct a linear, no-fork, best version of the blockchain.
## Step 0: Install dash_hash
https://github.com/UdjinM6/dash_hash
## Step 1: Download hash list ## Step 1: Download hash list
$ ./linearize-hashes.py linearize.cfg > hashlist.txt $ ./linearize-hashes.py linearize.cfg > hashlist.txt
@ -20,13 +24,12 @@ Required configuration file settings:
* "input": bitcoind blocks/ directory containing blkNNNNN.dat * "input": bitcoind blocks/ directory containing blkNNNNN.dat
* "hashlist": text file containing list of block hashes, linearized-hashes.py * "hashlist": text file containing list of block hashes, linearized-hashes.py
output. output.
* "output_file": bootstrap.dat * "output_file" for bootstrap.dat or "output" for output directory for linearized blocks/blkNNNNN.dat output
or
* "output": output directory for linearized blocks/blkNNNNN.dat output
Optional config file setting for linearize-data: Optional config file setting for linearize-data:
* "netmagic": network magic number * "netmagic": network magic number (default is 'cee2caff', testnet)
* "max_out_sz": maximum output file size (default 1000*1000*1000) * "genesis": genesis block hash (default is '00000bafbc94add76cb75e2ec92894837288a481e5c005f6563d91623bf8bc2c', testnet)
* "max_out_sz": maximum output file size (default 100 \* 1000 \* 1000)
* "split_timestamp": Split files when a new month is first seen, in addition to * "split_timestamp": Split files when a new month is first seen, in addition to
reaching a maximum file size. reaching a maximum file size.
* "file_timestamp": Set each file's last-modified time to that of the * "file_timestamp": Set each file's last-modified time to that of the

View File

@ -0,0 +1,20 @@
# bitcoind RPC settings (linearize-hashes)
rpcuser=someuser
rpcpassword=somepassword
host=127.0.0.1
port=19998
# bootstrap.dat hashlist settings (linearize-hashes)
max_height=3130000
# bootstrap.dat input/output settings (linearize-data)
netmagic=cee2caff
input=/home/example/.dash/testnet3/blocks
output_file=/home/example/Downloads/bootstrap.dat
hashlist=hashlist.txt
split_year=1
genesis=00000bafbc94add76cb75e2ec92894837288a481e5c005f6563d91623bf8bc2c
# Maxmimum size in bytes of out-of-order blocks cache in memory
out_of_order_cache_sz = 10000000

View File

@ -6,14 +6,15 @@ host=127.0.0.1
port=9998 port=9998
# bootstrap.dat hashlist settings (linearize-hashes) # bootstrap.dat hashlist settings (linearize-hashes)
max_height=313000 max_height=3130000
# bootstrap.dat input/output settings (linearize-data) # bootstrap.dat input/output settings (linearize-data)
netmagic=f9beb4d9 netmagic=bf0c6bbd
input=/home/example/.bitcoin/blocks input=/home/example/.dash/blocks
output_file=/home/example/Downloads/bootstrap.dat output_file=/home/example/Downloads/bootstrap.dat
hashlist=hashlist.txt hashlist=hashlist.txt
split_year=1 split_year=1
genesis=00000ffd590b1485b3caadc19b22e6379c733355108f107a430458cdf3407ab6
# Maxmimum size in bytes of out-of-order blocks cache in memory # Maxmimum size in bytes of out-of-order blocks cache in memory
out_of_order_cache_sz = 100000000 out_of_order_cache_sz = 10000000

View File

@ -16,6 +16,7 @@ import base64
import httplib import httplib
import sys import sys
import hashlib import hashlib
import dash_hash
import datetime import datetime
import time import time
from collections import namedtuple from collections import namedtuple
@ -44,15 +45,17 @@ def wordreverse(in_buf):
return ''.join(out_words) return ''.join(out_words)
def calc_hdr_hash(blk_hdr): def calc_hdr_hash(blk_hdr):
hash1 = hashlib.sha256() #hash1 = hashlib.sha256()
hash1.update(blk_hdr) #hash1.update(blk_hdr)
hash1_o = hash1.digest() #hash1_o = hash1.digest()
hash2 = hashlib.sha256() #hash2 = hashlib.sha256()
hash2.update(hash1_o) #hash2.update(hash1_o)
hash2_o = hash2.digest() #hash2_o = hash2.digest()
return hash2_o #return hash2_o
pow_hash = dash_hash.getPoWHash(blk_hdr)
return pow_hash
def calc_hash_str(blk_hdr): def calc_hash_str(blk_hdr):
hash = calc_hdr_hash(blk_hdr) hash = calc_hdr_hash(blk_hdr)
@ -264,7 +267,9 @@ if __name__ == '__main__':
f.close() f.close()
if 'netmagic' not in settings: if 'netmagic' not in settings:
settings['netmagic'] = 'f9beb4d9' settings['netmagic'] = 'cee2caff'
if 'genesis' not in settings:
settings['genesis'] = '00000bafbc94add76cb75e2ec92894837288a481e5c005f6563d91623bf8bc2c'
if 'input' not in settings: if 'input' not in settings:
settings['input'] = 'input' settings['input'] = 'input'
if 'hashlist' not in settings: if 'hashlist' not in settings:
@ -291,9 +296,8 @@ if __name__ == '__main__':
blkindex = get_block_hashes(settings) blkindex = get_block_hashes(settings)
blkmap = mkblockmap(blkindex) blkmap = mkblockmap(blkindex)
if not "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f" in blkmap: if not settings['genesis'] in blkmap:
print("not found") print("genesis not found")
else: else:
BlockDataCopier(settings, blkindex, blkmap).run() BlockDataCopier(settings, blkindex, blkmap).run()