mirror of
https://github.com/dashpay/dash.git
synced 2024-12-25 20:12:57 +01:00
17acd6b472
643aad17fa
Enable additional flake8 rules (practicalswift)f020aca297
Minor Python cleanups to make flake8 pass with the new rules enabled (practicalswift) Pull request description: Enabled rules: ``` * E242: tab after ',' * E266: too many leading '#' for block comment * E401: multiple imports on one line * E402: module level import not at top of file * E701: multiple statements on one line (colon) * E901: SyntaxError: invalid syntax * E902: TokenError: EOF in multi-line string * F821: undefined name 'Foo' * W293: blank line contains whitespace * W606: 'async' and 'await' are reserved keywords starting with Python 3.7 ``` Note to reviewers: * In general we don't allow whitespace cleanups to existing code, but in order to allow for enabling Travis checking for these rules a few smaller whitespace cleanups had to made as part of this PR. * Use [this `?w=1` link](https://github.com/bitcoin/bitcoin/pull/12987/files?w=1) to show a diff without whitespace changes. Before this commit: ``` $ flake8 -qq --statistics --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E242,E266,E271,E272,E273,E274,E275,E304,E306,E401,E402,E502,E701,E702,E703,E714,E721,E741,E742,E743,F401,E901,E902,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F821,F822,F823,F831,F841,W292,W293,W504,W601,W602,W603,W604,W605,W606 . 5 E266 too many leading '#' for block comment 4 E401 multiple imports on one line 6 E402 module level import not at top of file 5 E701 multiple statements on one line (colon) 1 F812 list comprehension redefines 'n' from line 159 4 F821 undefined name 'ConnectionRefusedError' 28 W293 blank line contains whitespace ``` After this commit: ``` $ flake8 -qq --statistics --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E242,E266,E271,E272,E273,E274,E275,E304,E306,E401,E402,E502,E701,E702,E703,E714,E721,E741,E742,E743,F401,E901,E902,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F821,F822,F823,F831,F841,W292,W293,W504,W601,W602,W603,W604,W605,W606 . $ ``` Tree-SHA512: fc7d5e752298a50d4248afc620ee2c173135b4ca008e48e02913ac968e5a24a5fd5396926047ec62f1d580d537434ccae01f249bb2f3338fa59dc630bf97ca7a Signed-off-by: pasta <pasta@dashboost.org>
82 lines
3.4 KiB
Python
Executable File
82 lines
3.4 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
# Copyright (c) 2014-2015 The Bitcoin Core developers
|
|
# Distributed under the MIT software license, see the accompanying
|
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
|
'''
|
|
Run this script every time you change one of the png files. Using pngcrush, it will optimize the png files, remove various color profiles, remove ancillary chunks (alla) and text chunks (text).
|
|
#pngcrush -brute -ow -rem gAMA -rem cHRM -rem iCCP -rem sRGB -rem alla -rem text
|
|
'''
|
|
import os
|
|
import sys
|
|
import subprocess
|
|
import hashlib
|
|
from PIL import Image # pip3 install Pillow
|
|
|
|
def file_hash(filename):
|
|
'''Return hash of raw file contents'''
|
|
with open(filename, 'rb') as f:
|
|
return hashlib.sha256(f.read()).hexdigest()
|
|
|
|
def content_hash(filename):
|
|
'''Return hash of RGBA contents of image'''
|
|
i = Image.open(filename)
|
|
i = i.convert('RGBA')
|
|
data = i.tobytes()
|
|
return hashlib.sha256(data).hexdigest()
|
|
|
|
pngcrush = 'pngcrush'
|
|
git = 'git'
|
|
folders = [
|
|
"src/qt/res/movies",
|
|
"src/qt/res/icons",
|
|
"src/qt/res/images",
|
|
"share/pixmaps"
|
|
]
|
|
basePath = subprocess.check_output([git, 'rev-parse', '--show-toplevel'], universal_newlines=True).rstrip('\n')
|
|
totalSaveBytes = 0
|
|
noHashChange = True
|
|
|
|
outputArray = []
|
|
for folder in folders:
|
|
absFolder=os.path.join(basePath, folder)
|
|
for file in os.listdir(absFolder):
|
|
extension = os.path.splitext(file)[1]
|
|
if extension.lower() == '.png':
|
|
print("optimizing {}...".format(file), end =' ')
|
|
file_path = os.path.join(absFolder, file)
|
|
fileMetaMap = {'file' : file, 'osize': os.path.getsize(file_path), 'sha256Old' : file_hash(file_path)}
|
|
fileMetaMap['contentHashPre'] = content_hash(file_path)
|
|
|
|
try:
|
|
subprocess.call([pngcrush, "-brute", "-ow", "-rem", "gAMA", "-rem", "cHRM", "-rem", "iCCP", "-rem", "sRGB", "-rem", "alla", "-rem", "text", file_path],
|
|
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
except:
|
|
print("pngcrush is not installed, aborting...")
|
|
sys.exit(0)
|
|
|
|
#verify
|
|
if "Not a PNG file" in subprocess.check_output([pngcrush, "-n", "-v", file_path], stderr=subprocess.STDOUT, universal_newlines=True):
|
|
print("PNG file "+file+" is corrupted after crushing, check out pngcursh version")
|
|
sys.exit(1)
|
|
|
|
fileMetaMap['sha256New'] = file_hash(file_path)
|
|
fileMetaMap['contentHashPost'] = content_hash(file_path)
|
|
|
|
if fileMetaMap['contentHashPre'] != fileMetaMap['contentHashPost']:
|
|
print("Image contents of PNG file {} before and after crushing don't match".format(file))
|
|
sys.exit(1)
|
|
|
|
fileMetaMap['psize'] = os.path.getsize(file_path)
|
|
outputArray.append(fileMetaMap)
|
|
print("done")
|
|
|
|
print("summary:\n+++++++++++++++++")
|
|
for fileDict in outputArray:
|
|
oldHash = fileDict['sha256Old']
|
|
newHash = fileDict['sha256New']
|
|
totalSaveBytes += fileDict['osize'] - fileDict['psize']
|
|
noHashChange = noHashChange and (oldHash == newHash)
|
|
print(fileDict['file']+"\n size diff from: "+str(fileDict['osize'])+" to: "+str(fileDict['psize'])+"\n old sha256: "+oldHash+"\n new sha256: "+newHash+"\n")
|
|
|
|
print("completed. Checksum stable: "+str(noHashChange)+". Total reduction: "+str(totalSaveBytes)+" bytes")
|