Merge #9580: Fix various minor linearization script issues

b9d95bd Fix various minor linearization script issues (Douglas Roark)
This commit is contained in:
Wladimir J. van der Laan 2017-02-02 10:19:06 +01:00 committed by Alexander Block
parent dcc251e2b7
commit 3dcbfc744a
3 changed files with 42 additions and 18 deletions

View File

@ -36,8 +36,11 @@ Required configuration file settings:
* `output`: Output directory for linearized `blocks/blkNNNNN.dat` output.
Optional config file setting for linearize-data:
* `file_timestamp`: Set each file's last-modified time to that of the most
recent block in that file.
* `debug_output`: Some printouts may not always be desired. If true, such output
will be printed.
* `file_timestamp`: Set each file's last-accessed and last-modified times,
respectively, to the current time and to the timestamp of the most recent block
written to the script's blockchain.
* `genesis`: The hash of the genesis block in the blockchain. (default is '00000ffd590b1485b3caadc19b22e6379c733355108f107a430458cdf3407ab6', mainnet)
* `input`: dashd blocks/ directory containing blkNNNNN.dat
* `hashlist`: text file containing list of block hashes created by
@ -45,6 +48,9 @@ linearize-hashes.py.
* `max_out_sz`: Maximum size for files created by the `output_file` option.
(Default: `1000*1000*1000 bytes`)
* `netmagic`: Network magic number. (default is 'bf0c6bbd', mainnet)
* `out_of_order_cache_sz`: If out-of-order blocks are being read, the block can
be written to a cache so that the blockchain doesn't have to be seeked again.
This option specifies the cache size. (Default: `100*1000*1000 bytes`)
* `rev_hash_bytes`: If true, the block hash list written by linearize-hashes.py
will be byte-reversed when read by linearize-data.py. See the linearize-hashes
entry for more information.

View File

@ -1,4 +1,3 @@
# bitcoind RPC settings (linearize-hashes)
rpcuser=someuser
rpcpassword=somepassword
@ -11,6 +10,10 @@ max_height=3130000
# bootstrap.dat input/output settings (linearize-data)
netmagic=bf0c6bbd
input=/home/example/.dashcore/blocks
# "output" option causes blockchain files to be written to the given location,
# with "output_file" ignored. If not used, "output_file" is used instead.
# output=/home/example/blockchain_directory
output_file=/home/example/Downloads/bootstrap.dat
hashlist=hashlist.txt
genesis=00000ffd590b1485b3caadc19b22e6379c733355108f107a430458cdf3407ab6
@ -20,3 +23,12 @@ out_of_order_cache_sz = 10000000
# Do we want the reverse the hash bytes coming from getblockhash?
rev_hash_bytes = False
# On a new month, do we want to set the access and modify times of the new
# blockchain file?
file_timestamp = 0
# Do we want to split the blockchain files given a new month or specific height?
split_timestamp = 0
# Do we want debug printouts?
debug_output = False

View File

@ -137,7 +137,7 @@ class BlockDataCopier:
if not self.fileOutput and ((self.outsz + blockSizeOnDisk) > self.maxOutSz):
self.outF.close()
if self.setFileTime:
os.utime(outFname, (int(time.time()), highTS))
os.utime(self.outFname, (int(time.time()), self.highTS))
self.outF = None
self.outFname = None
self.outFn = self.outFn + 1
@ -145,12 +145,12 @@ class BlockDataCopier:
(blkDate, blkTS) = get_blk_dt(blk_hdr)
if self.timestampSplit and (blkDate > self.lastDate):
print("New month " + blkDate.strftime("%Y-%m") + " @ " + hash_str)
lastDate = blkDate
if outF:
outF.close()
if setFileTime:
os.utime(outFname, (int(time.time()), highTS))
print("New month " + blkDate.strftime("%Y-%m") + " @ " + self.hash_str)
self.lastDate = blkDate
if self.outF:
self.outF.close()
if self.setFileTime:
os.utime(self.outFname, (int(time.time()), self.highTS))
self.outF = None
self.outFname = None
self.outFn = self.outFn + 1
@ -158,11 +158,11 @@ class BlockDataCopier:
if not self.outF:
if self.fileOutput:
outFname = self.settings['output_file']
self.outFname = self.settings['output_file']
else:
outFname = os.path.join(self.settings['output'], "blk%05d.dat" % self.outFn)
print("Output file " + outFname)
self.outF = open(outFname, "wb")
self.outFname = os.path.join(self.settings['output'], "blk%05d.dat" % self.outFn)
print("Output file " + self.outFname)
self.outF = open(self.outFname, "wb")
self.outF.write(inhdr)
self.outF.write(blk_hdr)
@ -226,13 +226,16 @@ class BlockDataCopier:
blk_hdr = self.inF.read(80)
inExtent = BlockExtent(self.inFn, self.inF.tell(), inhdr, blk_hdr, inLen)
hash_str = calc_hash_str(blk_hdr)
if not hash_str in blkmap:
print("Skipping unknown block " + hash_str)
self.hash_str = calc_hash_str(blk_hdr)
if not self.hash_str in blkmap:
# Because blocks can be written to files out-of-order as of 0.10, the script
# may encounter blocks it doesn't know about. Treat as debug output.
if settings['debug_output'] == 'true':
print("Skipping unknown block " + self.hash_str)
self.inF.seek(inLen, os.SEEK_CUR)
continue
blkHeight = self.blkmap[hash_str]
blkHeight = self.blkmap[self.hash_str]
self.blkCountIn += 1
if self.blkCountOut == blkHeight:
@ -298,12 +301,15 @@ if __name__ == '__main__':
settings['max_out_sz'] = 1000 * 1000 * 1000
if 'out_of_order_cache_sz' not in settings:
settings['out_of_order_cache_sz'] = 100 * 1000 * 1000
if 'debug_output' not in settings:
settings['debug_output'] = 'false'
settings['max_out_sz'] = int(settings['max_out_sz'])
settings['split_timestamp'] = int(settings['split_timestamp'])
settings['file_timestamp'] = int(settings['file_timestamp'])
settings['netmagic'] = unhexlify(settings['netmagic'].encode('utf-8'))
settings['out_of_order_cache_sz'] = int(settings['out_of_order_cache_sz'])
settings['debug_output'] = settings['debug_output'].lower()
if 'output_file' not in settings and 'output' not in settings:
print("Missing output file / directory")