2016-03-29 11:08:13 +02:00
|
|
|
#!/usr/bin/env python3
|
2019-06-29 17:58:52 +02:00
|
|
|
# Copyright (c) 2016-2017 The Bitcoin Core developers
|
2016-01-20 13:39:19 +01:00
|
|
|
# Distributed under the MIT software license, see the accompanying
|
|
|
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
|
|
|
|
|
|
|
# This script will locally construct a merge commit for a pull request on a
|
|
|
|
# github repository, inspect it, sign it and optionally push it.
|
|
|
|
|
|
|
|
# The following temporary branches are created/overwritten and deleted:
|
|
|
|
# * pull/$PULL/base (the current master we're merging onto)
|
|
|
|
# * pull/$PULL/head (the current state of the remote pull request)
|
|
|
|
# * pull/$PULL/merge (github's merge)
|
|
|
|
# * pull/$PULL/local-merge (our merge)
|
|
|
|
|
|
|
|
# In case of a clean merge that is accepted by the user, the local branch with
|
|
|
|
# name $BRANCH is overwritten with the merged result, and optionally pushed.
|
2017-05-11 19:26:54 +02:00
|
|
|
import os
|
2016-01-20 13:39:19 +01:00
|
|
|
from sys import stdin,stdout,stderr
|
|
|
|
import argparse
|
2017-03-01 10:03:33 +01:00
|
|
|
import hashlib
|
2016-01-20 13:39:19 +01:00
|
|
|
import subprocess
|
2017-08-28 22:53:34 +02:00
|
|
|
import sys
|
Merge #12987: tests/tools: Enable additional Python flake8 rules for automatic linting via Travis
643aad17fa Enable additional flake8 rules (practicalswift)
f020aca297 Minor Python cleanups to make flake8 pass with the new rules enabled (practicalswift)
Pull request description:
Enabled rules:
```
* E242: tab after ','
* E266: too many leading '#' for block comment
* E401: multiple imports on one line
* E402: module level import not at top of file
* E701: multiple statements on one line (colon)
* E901: SyntaxError: invalid syntax
* E902: TokenError: EOF in multi-line string
* F821: undefined name 'Foo'
* W293: blank line contains whitespace
* W606: 'async' and 'await' are reserved keywords starting with Python 3.7
```
Note to reviewers:
* In general we don't allow whitespace cleanups to existing code, but in order to allow for enabling Travis checking for these rules a few smaller whitespace cleanups had to made as part of this PR.
* Use [this `?w=1` link](https://github.com/bitcoin/bitcoin/pull/12987/files?w=1) to show a diff without whitespace changes.
Before this commit:
```
$ flake8 -qq --statistics --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E242,E266,E271,E272,E273,E274,E275,E304,E306,E401,E402,E502,E701,E702,E703,E714,E721,E741,E742,E743,F401,E901,E902,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F821,F822,F823,F831,F841,W292,W293,W504,W601,W602,W603,W604,W605,W606 .
5 E266 too many leading '#' for block comment
4 E401 multiple imports on one line
6 E402 module level import not at top of file
5 E701 multiple statements on one line (colon)
1 F812 list comprehension redefines 'n' from line 159
4 F821 undefined name 'ConnectionRefusedError'
28 W293 blank line contains whitespace
```
After this commit:
```
$ flake8 -qq --statistics --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E242,E266,E271,E272,E273,E274,E275,E304,E306,E401,E402,E502,E701,E702,E703,E714,E721,E741,E742,E743,F401,E901,E902,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F821,F822,F823,F831,F841,W292,W293,W504,W601,W602,W603,W604,W605,W606 .
$
```
Tree-SHA512: fc7d5e752298a50d4248afc620ee2c173135b4ca008e48e02913ac968e5a24a5fd5396926047ec62f1d580d537434ccae01f249bb2f3338fa59dc630bf97ca7a
Signed-off-by: pasta <pasta@dashboost.org>
2018-04-16 17:49:49 +02:00
|
|
|
import json
|
|
|
|
import codecs
|
2018-12-13 18:35:00 +01:00
|
|
|
from urllib.request import Request, urlopen
|
2019-01-16 12:12:02 +01:00
|
|
|
from urllib.error import HTTPError
|
2016-01-20 13:39:19 +01:00
|
|
|
|
|
|
|
# External tools (can be overridden using environment)
|
|
|
|
GIT = os.getenv('GIT','git')
|
|
|
|
BASH = os.getenv('BASH','bash')
|
|
|
|
|
2016-01-25 14:58:02 +01:00
|
|
|
# OS specific configuration for terminal attributes
|
|
|
|
ATTR_RESET = ''
|
|
|
|
ATTR_PR = ''
|
2019-06-24 20:46:08 +02:00
|
|
|
ATTR_NAME = ''
|
|
|
|
ATTR_WARN = ''
|
2019-06-14 07:26:59 +02:00
|
|
|
COMMIT_FORMAT = '%H %s (%an)%d'
|
2016-01-25 14:58:02 +01:00
|
|
|
if os.name == 'posix': # if posix, assume we can use basic terminal escapes
|
|
|
|
ATTR_RESET = '\033[0m'
|
|
|
|
ATTR_PR = '\033[1;36m'
|
2019-06-24 20:46:08 +02:00
|
|
|
ATTR_NAME = '\033[0;36m'
|
|
|
|
ATTR_WARN = '\033[1;31m'
|
2019-06-14 07:26:59 +02:00
|
|
|
COMMIT_FORMAT = '%C(bold blue)%H%Creset %s %C(cyan)(%an)%Creset%C(green)%d%Creset'
|
2016-01-25 14:58:02 +01:00
|
|
|
|
2016-01-20 13:39:19 +01:00
|
|
|
def git_config_get(option, default=None):
|
|
|
|
'''
|
|
|
|
Get named configuration option from git repository.
|
|
|
|
'''
|
|
|
|
try:
|
2016-03-29 11:08:13 +02:00
|
|
|
return subprocess.check_output([GIT,'config','--get',option]).rstrip().decode('utf-8')
|
2018-04-02 00:23:23 +02:00
|
|
|
except subprocess.CalledProcessError:
|
2016-01-20 13:39:19 +01:00
|
|
|
return default
|
|
|
|
|
2019-04-18 19:54:22 +02:00
|
|
|
def get_response(req_url, ghtoken):
|
|
|
|
req = Request(req_url)
|
|
|
|
if ghtoken is not None:
|
|
|
|
req.add_header('Authorization', 'token ' + ghtoken)
|
|
|
|
return urlopen(req)
|
|
|
|
|
|
|
|
def retrieve_json(req_url, ghtoken, use_pagination=False):
|
2016-01-20 13:39:19 +01:00
|
|
|
'''
|
2019-03-27 11:54:54 +01:00
|
|
|
Retrieve json from github.
|
|
|
|
Return None if an error happens.
|
2016-01-20 13:39:19 +01:00
|
|
|
'''
|
|
|
|
try:
|
2016-03-29 11:08:13 +02:00
|
|
|
reader = codecs.getreader('utf-8')
|
2019-04-18 19:54:22 +02:00
|
|
|
if not use_pagination:
|
|
|
|
return json.load(reader(get_response(req_url, ghtoken)))
|
|
|
|
|
|
|
|
obj = []
|
|
|
|
page_num = 1
|
|
|
|
while True:
|
|
|
|
req_url_page = '{}?page={}'.format(req_url, page_num)
|
|
|
|
result = get_response(req_url_page, ghtoken)
|
|
|
|
obj.extend(json.load(reader(result)))
|
|
|
|
|
|
|
|
link = result.headers.get('link', None)
|
|
|
|
if link is not None:
|
|
|
|
link_next = [l for l in link.split(',') if 'rel="next"' in l]
|
|
|
|
if len(link_next) > 0:
|
|
|
|
page_num = int(link_next[0][link_next[0].find("page=")+5:link_next[0].find(">")])
|
|
|
|
continue
|
|
|
|
break
|
2016-04-05 15:41:13 +02:00
|
|
|
return obj
|
2019-01-16 12:12:02 +01:00
|
|
|
except HTTPError as e:
|
|
|
|
error_message = e.read()
|
|
|
|
print('Warning: unable to retrieve pull information from github: %s' % e)
|
|
|
|
print('Detailed error: %s' % error_message)
|
|
|
|
return None
|
2016-01-20 13:39:19 +01:00
|
|
|
except Exception as e:
|
2016-04-05 15:41:13 +02:00
|
|
|
print('Warning: unable to retrieve pull information from github: %s' % e)
|
2016-01-20 13:39:19 +01:00
|
|
|
return None
|
|
|
|
|
2019-03-27 11:54:54 +01:00
|
|
|
def retrieve_pr_info(repo,pull,ghtoken):
|
2019-04-18 19:54:22 +02:00
|
|
|
req_url = "https://api.github.com/repos/"+repo+"/pulls/"+pull
|
|
|
|
return retrieve_json(req_url,ghtoken)
|
2019-03-27 11:54:54 +01:00
|
|
|
|
|
|
|
def retrieve_pr_comments(repo,pull,ghtoken):
|
2019-04-18 19:54:22 +02:00
|
|
|
req_url = "https://api.github.com/repos/"+repo+"/issues/"+pull+"/comments"
|
|
|
|
return retrieve_json(req_url,ghtoken,use_pagination=True)
|
2019-03-27 11:54:54 +01:00
|
|
|
|
|
|
|
def retrieve_pr_reviews(repo,pull,ghtoken):
|
2019-04-18 19:54:22 +02:00
|
|
|
req_url = "https://api.github.com/repos/"+repo+"/pulls/"+pull+"/reviews"
|
|
|
|
return retrieve_json(req_url,ghtoken,use_pagination=True)
|
2019-03-27 11:54:54 +01:00
|
|
|
|
2016-01-20 13:39:19 +01:00
|
|
|
def ask_prompt(text):
|
|
|
|
print(text,end=" ",file=stderr)
|
2016-03-29 11:08:13 +02:00
|
|
|
stderr.flush()
|
2016-01-20 13:39:19 +01:00
|
|
|
reply = stdin.readline().rstrip()
|
|
|
|
print("",file=stderr)
|
|
|
|
return reply
|
|
|
|
|
2017-03-06 17:19:15 +01:00
|
|
|
def get_symlink_files():
|
|
|
|
files = sorted(subprocess.check_output([GIT, 'ls-tree', '--full-tree', '-r', 'HEAD']).splitlines())
|
|
|
|
ret = []
|
|
|
|
for f in files:
|
|
|
|
if (int(f.decode('utf-8').split(" ")[0], 8) & 0o170000) == 0o120000:
|
|
|
|
ret.append(f.decode('utf-8').split("\t")[1])
|
|
|
|
return ret
|
|
|
|
|
2017-03-14 07:33:43 +01:00
|
|
|
def tree_sha512sum(commit='HEAD'):
|
|
|
|
# request metadata for entire tree, recursively
|
|
|
|
files = []
|
|
|
|
blob_by_name = {}
|
|
|
|
for line in subprocess.check_output([GIT, 'ls-tree', '--full-tree', '-r', commit]).splitlines():
|
|
|
|
name_sep = line.index(b'\t')
|
|
|
|
metadata = line[:name_sep].split() # perms, 'blob', blobid
|
|
|
|
assert(metadata[1] == b'blob')
|
|
|
|
name = line[name_sep+1:]
|
|
|
|
files.append(name)
|
|
|
|
blob_by_name[name] = metadata[2]
|
|
|
|
|
|
|
|
files.sort()
|
|
|
|
# open connection to git-cat-file in batch mode to request data for all blobs
|
|
|
|
# this is much faster than launching it per file
|
|
|
|
p = subprocess.Popen([GIT, 'cat-file', '--batch'], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
2017-03-01 10:03:33 +01:00
|
|
|
overall = hashlib.sha512()
|
|
|
|
for f in files:
|
2017-03-14 07:33:43 +01:00
|
|
|
blob = blob_by_name[f]
|
|
|
|
# request blob
|
|
|
|
p.stdin.write(blob + b'\n')
|
|
|
|
p.stdin.flush()
|
|
|
|
# read header: blob, "blob", size
|
|
|
|
reply = p.stdout.readline().split()
|
|
|
|
assert(reply[0] == blob and reply[1] == b'blob')
|
|
|
|
size = int(reply[2])
|
|
|
|
# hash the blob data
|
2017-03-01 10:03:33 +01:00
|
|
|
intern = hashlib.sha512()
|
2017-03-14 07:33:43 +01:00
|
|
|
ptr = 0
|
|
|
|
while ptr < size:
|
|
|
|
bs = min(65536, size - ptr)
|
|
|
|
piece = p.stdout.read(bs)
|
|
|
|
if len(piece) == bs:
|
2017-03-01 10:03:33 +01:00
|
|
|
intern.update(piece)
|
|
|
|
else:
|
2017-03-14 07:33:43 +01:00
|
|
|
raise IOError('Premature EOF reading git cat-file output')
|
|
|
|
ptr += bs
|
2017-03-01 10:03:33 +01:00
|
|
|
dig = intern.hexdigest()
|
2017-03-14 07:33:43 +01:00
|
|
|
assert(p.stdout.read(1) == b'\n') # ignore LF that follows blob data
|
|
|
|
# update overall hash with file hash
|
2017-03-01 10:03:33 +01:00
|
|
|
overall.update(dig.encode("utf-8"))
|
|
|
|
overall.update(" ".encode("utf-8"))
|
|
|
|
overall.update(f)
|
|
|
|
overall.update("\n".encode("utf-8"))
|
2017-03-14 07:33:43 +01:00
|
|
|
p.stdin.close()
|
|
|
|
if p.wait():
|
|
|
|
raise IOError('Non-zero return value executing git cat-file')
|
2017-03-01 10:03:33 +01:00
|
|
|
return overall.hexdigest()
|
|
|
|
|
2019-03-27 11:54:54 +01:00
|
|
|
def get_acks_from_comments(head_commit, comments):
|
2019-06-24 20:46:08 +02:00
|
|
|
# Look for abbreviated commit id, because not everyone wants to type/paste
|
|
|
|
# the whole thing and the chance of collisions within a PR is small enough
|
|
|
|
head_abbrev = head_commit[0:6]
|
|
|
|
acks = []
|
2019-03-27 11:54:54 +01:00
|
|
|
for c in comments:
|
2019-06-24 20:46:08 +02:00
|
|
|
review = [l for l in c['body'].split('\r\n') if 'ACK' in l and head_abbrev in l]
|
2019-03-27 11:54:54 +01:00
|
|
|
if review:
|
2019-06-24 20:46:08 +02:00
|
|
|
acks.append((c['user']['login'], review[0]))
|
|
|
|
return acks
|
|
|
|
|
|
|
|
def make_acks_message(head_commit, acks):
|
|
|
|
if acks:
|
|
|
|
ack_str ='\n\nACKs for top commit:\n'.format(head_commit)
|
|
|
|
for name, msg in acks:
|
|
|
|
ack_str += ' {}:\n'.format(name)
|
|
|
|
ack_str += ' {}\n'.format(msg)
|
|
|
|
else:
|
|
|
|
ack_str ='\n\nTop commit has no ACKs.\n'
|
2019-03-27 11:54:54 +01:00
|
|
|
return ack_str
|
|
|
|
|
2019-06-24 20:46:08 +02:00
|
|
|
def print_merge_details(pull, title, branch, base_branch, head_branch, acks):
|
2017-04-26 11:43:52 +02:00
|
|
|
print('%s#%s%s %s %sinto %s%s' % (ATTR_RESET+ATTR_PR,pull,ATTR_RESET,title,ATTR_RESET+ATTR_PR,branch,ATTR_RESET))
|
|
|
|
subprocess.check_call([GIT,'log','--graph','--topo-order','--pretty=format:'+COMMIT_FORMAT,base_branch+'..'+head_branch])
|
2019-06-24 20:46:08 +02:00
|
|
|
if acks is not None:
|
|
|
|
if acks:
|
|
|
|
print('{}ACKs:{}'.format(ATTR_PR, ATTR_RESET))
|
|
|
|
for (name, message) in acks:
|
|
|
|
print('* {} {}({}){}'.format(message, ATTR_NAME, name, ATTR_RESET))
|
|
|
|
else:
|
|
|
|
print('{}Top commit has no ACKs!{}'.format(ATTR_WARN, ATTR_RESET))
|
2017-03-01 10:03:33 +01:00
|
|
|
|
2016-04-05 15:41:13 +02:00
|
|
|
def parse_arguments():
|
2016-01-20 13:39:19 +01:00
|
|
|
epilog = '''
|
|
|
|
In addition, you can set the following git configuration variables:
|
|
|
|
githubmerge.repository (mandatory),
|
|
|
|
user.signingkey (mandatory),
|
2019-01-16 12:12:02 +01:00
|
|
|
user.ghtoken (default: none).
|
2016-01-20 13:39:19 +01:00
|
|
|
githubmerge.host (default: git@github.com),
|
2016-04-05 15:41:13 +02:00
|
|
|
githubmerge.branch (no default),
|
2016-01-20 13:39:19 +01:00
|
|
|
githubmerge.testcmd (default: none).
|
|
|
|
'''
|
|
|
|
parser = argparse.ArgumentParser(description='Utility to merge, sign and push github pull requests',
|
|
|
|
epilog=epilog)
|
|
|
|
parser.add_argument('pull', metavar='PULL', type=int, nargs=1,
|
|
|
|
help='Pull request ID to merge')
|
|
|
|
parser.add_argument('branch', metavar='BRANCH', type=str, nargs='?',
|
2016-04-05 15:41:13 +02:00
|
|
|
default=None, help='Branch to merge against (default: githubmerge.branch setting, or base branch for pull, or \'master\')')
|
2016-01-20 13:39:19 +01:00
|
|
|
return parser.parse_args()
|
|
|
|
|
|
|
|
def main():
|
|
|
|
# Extract settings from git repo
|
|
|
|
repo = git_config_get('githubmerge.repository')
|
|
|
|
host = git_config_get('githubmerge.host','git@github.com')
|
2016-04-05 15:41:13 +02:00
|
|
|
opt_branch = git_config_get('githubmerge.branch',None)
|
2016-01-20 13:39:19 +01:00
|
|
|
testcmd = git_config_get('githubmerge.testcmd')
|
2019-01-16 12:12:02 +01:00
|
|
|
ghtoken = git_config_get('user.ghtoken')
|
2016-01-20 13:39:19 +01:00
|
|
|
signingkey = git_config_get('user.signingkey')
|
|
|
|
if repo is None:
|
|
|
|
print("ERROR: No repository configured. Use this command to set:", file=stderr)
|
|
|
|
print("git config githubmerge.repository <owner>/<repo>", file=stderr)
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(1)
|
2016-01-20 13:39:19 +01:00
|
|
|
if signingkey is None:
|
|
|
|
print("ERROR: No GPG signing key set. Set one using:",file=stderr)
|
|
|
|
print("git config --global user.signingkey <key>",file=stderr)
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(1)
|
2016-01-20 13:39:19 +01:00
|
|
|
|
2019-01-16 12:12:02 +01:00
|
|
|
if host.startswith(('https:','http:')):
|
|
|
|
host_repo = host+"/"+repo+".git"
|
|
|
|
else:
|
|
|
|
host_repo = host+":"+repo
|
2016-01-20 13:39:19 +01:00
|
|
|
|
|
|
|
# Extract settings from command line
|
2016-04-05 15:41:13 +02:00
|
|
|
args = parse_arguments()
|
2016-01-20 13:39:19 +01:00
|
|
|
pull = str(args.pull[0])
|
2016-04-05 15:41:13 +02:00
|
|
|
|
|
|
|
# Receive pull information from github
|
2019-01-16 12:12:02 +01:00
|
|
|
info = retrieve_pr_info(repo,pull,ghtoken)
|
2016-04-05 15:41:13 +02:00
|
|
|
if info is None:
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(1)
|
2017-04-26 11:43:52 +02:00
|
|
|
title = info['title'].strip()
|
2017-07-11 15:39:55 +02:00
|
|
|
body = info['body'].strip()
|
2024-02-23 20:54:30 +01:00
|
|
|
# Extract forker's repo SSH URL and branch name
|
|
|
|
forker_repo_ssh_url = info['head']['repo']['ssh_url']
|
|
|
|
target_branch = info['head']['ref']
|
|
|
|
|
2016-04-05 15:41:13 +02:00
|
|
|
# precedence order for destination branch argument:
|
|
|
|
# - command line argument
|
|
|
|
# - githubmerge.branch setting
|
|
|
|
# - base branch for pull (as retrieved from github)
|
|
|
|
# - 'master'
|
|
|
|
branch = args.branch or opt_branch or info['base']['ref'] or 'master'
|
2016-01-20 13:39:19 +01:00
|
|
|
|
|
|
|
# Initialize source branches
|
|
|
|
head_branch = 'pull/'+pull+'/head'
|
|
|
|
base_branch = 'pull/'+pull+'/base'
|
|
|
|
merge_branch = 'pull/'+pull+'/merge'
|
|
|
|
local_merge_branch = 'pull/'+pull+'/local-merge'
|
|
|
|
|
2018-06-16 15:21:01 +02:00
|
|
|
devnull = open(os.devnull, 'w', encoding="utf8")
|
2016-01-20 13:39:19 +01:00
|
|
|
try:
|
|
|
|
subprocess.check_call([GIT,'checkout','-q',branch])
|
2018-04-02 00:23:23 +02:00
|
|
|
except subprocess.CalledProcessError:
|
2016-01-20 13:39:19 +01:00
|
|
|
print("ERROR: Cannot check out branch %s." % (branch), file=stderr)
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(3)
|
2016-01-20 13:39:19 +01:00
|
|
|
try:
|
2017-09-05 23:51:21 +02:00
|
|
|
subprocess.check_call([GIT,'fetch','-q',host_repo,'+refs/pull/'+pull+'/*:refs/heads/pull/'+pull+'/*',
|
|
|
|
'+refs/heads/'+branch+':refs/heads/'+base_branch])
|
2018-04-02 00:23:23 +02:00
|
|
|
except subprocess.CalledProcessError:
|
2017-09-05 23:51:21 +02:00
|
|
|
print("ERROR: Cannot find pull request #%s or branch %s on %s." % (pull,branch,host_repo), file=stderr)
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(3)
|
2024-02-23 20:54:30 +01:00
|
|
|
|
|
|
|
# Ask the user if they want to rebase the branch
|
|
|
|
rebase_reply = ask_prompt("Would you like to rebase the branch? Type 'yes' to rebase or anything else to continue without rebasing.").lower()
|
|
|
|
if rebase_reply == 'yes':
|
|
|
|
try:
|
|
|
|
subprocess.check_call([GIT, 'checkout', head_branch])
|
|
|
|
# Capture the commit hash of head_branch before the rebase
|
|
|
|
head_commit_before_rebase = subprocess.check_output([GIT, 'rev-parse', 'HEAD']).strip().decode('utf-8')
|
|
|
|
# Identify the base commit before the rebase
|
|
|
|
base_commit_before_rebase = subprocess.check_output([GIT, 'merge-base', base_branch, head_branch]).strip().decode('utf-8')
|
|
|
|
|
|
|
|
# Perform the rebase
|
|
|
|
subprocess.check_call([GIT, 'rebase', base_branch])
|
|
|
|
|
|
|
|
# Identify the new head commit after rebase
|
|
|
|
new_head_commit = subprocess.check_output([GIT, 'rev-parse', 'HEAD']).strip().decode('utf-8')
|
|
|
|
|
|
|
|
# Using git range-diff to compare changes before and after the rebase
|
|
|
|
range_diff_output = subprocess.check_output([GIT, 'range-diff', base_commit_before_rebase + '..' + head_commit_before_rebase, base_commit_before_rebase + '..' + new_head_commit], stderr=subprocess.STDOUT)
|
|
|
|
|
|
|
|
# Check the range-diff output for significant changes
|
|
|
|
if not range_diff_output:
|
|
|
|
print("No significant changes detected by git range-diff.")
|
|
|
|
else:
|
|
|
|
print("Significant changes detected by git range-diff. Please review the output below:")
|
|
|
|
print(range_diff_output.decode('utf-8'))
|
|
|
|
try:
|
|
|
|
subprocess.check_call([GIT,'log','--graph','--topo-order','--pretty=format:'+COMMIT_FORMAT])
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
review_reply = ask_prompt("Do you want to continue with force push? Type 'yes' to continue or anything else to abort.").lower()
|
|
|
|
if review_reply != 'yes':
|
|
|
|
sys.exit(5)
|
|
|
|
|
|
|
|
# If no significant changes or user accepts changes, force push the rebased branch to the PR branch
|
|
|
|
# subprocess.check_call([GIT, 'push', host_repo, head_branch + ':' + 'pull/' + pull + '/head'])
|
|
|
|
try:
|
|
|
|
subprocess.check_call([GIT, 'push', '--force', forker_repo_ssh_url, f'HEAD:refs/heads/{target_branch}'])
|
|
|
|
print(f"Force pushed to {target_branch} on {forker_repo_ssh_url}.")
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
print(f"Error while pushing: {str(e)}", file=stderr)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
print("ERROR: ", e.output.decode('utf-8'))
|
|
|
|
sys.exit(4)
|
|
|
|
|
|
|
|
subprocess.check_call([GIT,'checkout','-q',base_branch])
|
|
|
|
subprocess.call([GIT,'branch','-q','-D',local_merge_branch], stderr=devnull)
|
|
|
|
subprocess.check_call([GIT,'checkout','-q','-b',local_merge_branch])
|
|
|
|
|
2016-01-20 13:39:19 +01:00
|
|
|
try:
|
|
|
|
subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+head_branch], stdout=devnull, stderr=stdout)
|
2019-06-24 20:46:08 +02:00
|
|
|
head_commit = subprocess.check_output([GIT,'log','-1','--pretty=format:%H',head_branch]).decode('utf-8')
|
|
|
|
assert len(head_commit) == 40
|
2018-04-02 00:23:23 +02:00
|
|
|
except subprocess.CalledProcessError:
|
2016-01-20 13:39:19 +01:00
|
|
|
print("ERROR: Cannot find head of pull request #%s on %s." % (pull,host_repo), file=stderr)
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(3)
|
2016-01-20 13:39:19 +01:00
|
|
|
try:
|
|
|
|
subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+merge_branch], stdout=devnull, stderr=stdout)
|
2018-04-02 00:23:23 +02:00
|
|
|
except subprocess.CalledProcessError:
|
2016-01-20 13:39:19 +01:00
|
|
|
print("ERROR: Cannot find merge of pull request #%s on %s." % (pull,host_repo), file=stderr)
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(3)
|
2016-01-20 13:39:19 +01:00
|
|
|
subprocess.check_call([GIT,'checkout','-q',base_branch])
|
|
|
|
subprocess.call([GIT,'branch','-q','-D',local_merge_branch], stderr=devnull)
|
|
|
|
subprocess.check_call([GIT,'checkout','-q','-b',local_merge_branch])
|
|
|
|
|
|
|
|
try:
|
2017-03-01 10:03:33 +01:00
|
|
|
# Go up to the repository's root.
|
|
|
|
toplevel = subprocess.check_output([GIT,'rev-parse','--show-toplevel']).strip()
|
|
|
|
os.chdir(toplevel)
|
2016-01-20 13:39:19 +01:00
|
|
|
# Create unsigned merge commit.
|
|
|
|
if title:
|
|
|
|
firstline = 'Merge #%s: %s' % (pull,title)
|
|
|
|
else:
|
|
|
|
firstline = 'Merge #%s' % (pull,)
|
|
|
|
message = firstline + '\n\n'
|
2019-06-14 07:26:59 +02:00
|
|
|
message += subprocess.check_output([GIT,'log','--no-merges','--topo-order','--pretty=format:%H %s (%an)',base_branch+'..'+head_branch]).decode('utf-8')
|
2017-07-11 15:39:55 +02:00
|
|
|
message += '\n\nPull request description:\n\n ' + body.replace('\n', '\n ') + '\n'
|
2016-01-20 13:39:19 +01:00
|
|
|
try:
|
2019-06-06 12:57:37 +02:00
|
|
|
subprocess.check_call([GIT,'merge','-q','--commit','--no-edit','--no-ff','--no-gpg-sign','-m',message.encode('utf-8'),head_branch])
|
2018-04-02 00:23:23 +02:00
|
|
|
except subprocess.CalledProcessError:
|
2016-01-20 13:39:19 +01:00
|
|
|
print("ERROR: Cannot be merged cleanly.",file=stderr)
|
|
|
|
subprocess.check_call([GIT,'merge','--abort'])
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(4)
|
2016-01-28 10:54:41 +01:00
|
|
|
logmsg = subprocess.check_output([GIT,'log','--pretty=format:%s','-n','1']).decode('utf-8')
|
2016-01-20 13:39:19 +01:00
|
|
|
if logmsg.rstrip() != firstline.rstrip():
|
|
|
|
print("ERROR: Creating merge failed (already merged?).",file=stderr)
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(4)
|
2016-01-20 13:39:19 +01:00
|
|
|
|
2017-03-06 17:19:15 +01:00
|
|
|
symlink_files = get_symlink_files()
|
2017-03-07 10:52:27 +01:00
|
|
|
for f in symlink_files:
|
2017-03-06 17:19:15 +01:00
|
|
|
print("ERROR: File %s was a symlink" % f)
|
|
|
|
if len(symlink_files) > 0:
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(4)
|
2017-03-06 17:19:15 +01:00
|
|
|
|
2019-06-24 20:46:08 +02:00
|
|
|
# Compute SHA512 of git tree (to be able to detect changes before sign-off)
|
2017-03-01 10:03:33 +01:00
|
|
|
try:
|
|
|
|
first_sha512 = tree_sha512sum()
|
2018-04-02 00:23:23 +02:00
|
|
|
except subprocess.CalledProcessError:
|
2017-08-28 22:53:34 +02:00
|
|
|
print("ERROR: Unable to compute tree hash")
|
|
|
|
sys.exit(4)
|
2017-03-01 10:03:33 +01:00
|
|
|
|
2019-06-24 20:46:08 +02:00
|
|
|
print_merge_details(pull, title, branch, base_branch, head_branch, None)
|
2016-01-25 14:58:02 +01:00
|
|
|
print()
|
2017-03-01 10:03:33 +01:00
|
|
|
|
2016-01-20 13:39:19 +01:00
|
|
|
# Run test command if configured.
|
|
|
|
if testcmd:
|
|
|
|
if subprocess.call(testcmd,shell=True):
|
|
|
|
print("ERROR: Running %s failed." % testcmd,file=stderr)
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(5)
|
2016-01-20 13:39:19 +01:00
|
|
|
|
|
|
|
# Show the created merge.
|
|
|
|
diff = subprocess.check_output([GIT,'diff',merge_branch+'..'+local_merge_branch])
|
|
|
|
subprocess.check_call([GIT,'diff',base_branch+'..'+local_merge_branch])
|
|
|
|
if diff:
|
|
|
|
print("WARNING: merge differs from github!",file=stderr)
|
|
|
|
reply = ask_prompt("Type 'ignore' to continue.")
|
|
|
|
if reply.lower() == 'ignore':
|
|
|
|
print("Difference with github ignored.",file=stderr)
|
|
|
|
else:
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(6)
|
2016-01-20 13:39:19 +01:00
|
|
|
else:
|
|
|
|
# Verify the result manually.
|
|
|
|
print("Dropping you on a shell so you can try building/testing the merged source.",file=stderr)
|
|
|
|
print("Run 'git diff HEAD~' to show the changes being merged.",file=stderr)
|
|
|
|
print("Type 'exit' when done.",file=stderr)
|
|
|
|
if os.path.isfile('/etc/debian_version'): # Show pull number on Debian default prompt
|
|
|
|
os.putenv('debian_chroot',pull)
|
|
|
|
subprocess.call([BASH,'-i'])
|
|
|
|
|
2017-03-05 13:37:27 +01:00
|
|
|
second_sha512 = tree_sha512sum()
|
|
|
|
if first_sha512 != second_sha512:
|
|
|
|
print("ERROR: Tree hash changed unexpectedly",file=stderr)
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(8)
|
2017-03-05 13:37:27 +01:00
|
|
|
|
2019-06-24 20:46:08 +02:00
|
|
|
# Retrieve PR comments and ACKs and add to commit message, store ACKs to print them with commit
|
|
|
|
# description
|
|
|
|
comments = retrieve_pr_comments(repo,pull,ghtoken) + retrieve_pr_reviews(repo,pull,ghtoken)
|
|
|
|
if comments is None:
|
|
|
|
print("ERROR: Could not fetch PR comments and reviews",file=stderr)
|
|
|
|
sys.exit(1)
|
|
|
|
acks = get_acks_from_comments(head_commit=head_commit, comments=comments)
|
|
|
|
message += make_acks_message(head_commit=head_commit, acks=acks)
|
|
|
|
# end message with SHA512 tree hash, then update message
|
|
|
|
message += '\n\nTree-SHA512: ' + first_sha512
|
|
|
|
try:
|
|
|
|
subprocess.check_call([GIT,'commit','--amend','--no-gpg-sign','-m',message.encode('utf-8')])
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
print("ERROR: Cannot update message.", file=stderr)
|
|
|
|
sys.exit(4)
|
|
|
|
|
2016-01-20 13:39:19 +01:00
|
|
|
# Sign the merge commit.
|
2019-06-24 20:46:08 +02:00
|
|
|
print_merge_details(pull, title, branch, base_branch, head_branch, acks)
|
2017-04-26 11:43:52 +02:00
|
|
|
while True:
|
|
|
|
reply = ask_prompt("Type 's' to sign off on the above merge, or 'x' to reject and exit.").lower()
|
|
|
|
if reply == 's':
|
|
|
|
try:
|
|
|
|
subprocess.check_call([GIT,'commit','-q','--gpg-sign','--amend','--no-edit'])
|
|
|
|
break
|
2018-04-02 00:23:23 +02:00
|
|
|
except subprocess.CalledProcessError:
|
2017-06-01 10:42:13 +02:00
|
|
|
print("Error while signing, asking again.",file=stderr)
|
2017-04-26 11:43:52 +02:00
|
|
|
elif reply == 'x':
|
|
|
|
print("Not signing off on merge, exiting.",file=stderr)
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(1)
|
2016-01-20 13:39:19 +01:00
|
|
|
|
|
|
|
# Put the result in branch.
|
|
|
|
subprocess.check_call([GIT,'checkout','-q',branch])
|
|
|
|
subprocess.check_call([GIT,'reset','-q','--hard',local_merge_branch])
|
|
|
|
finally:
|
|
|
|
# Clean up temporary branches.
|
|
|
|
subprocess.call([GIT,'checkout','-q',branch])
|
|
|
|
subprocess.call([GIT,'branch','-q','-D',head_branch],stderr=devnull)
|
|
|
|
subprocess.call([GIT,'branch','-q','-D',base_branch],stderr=devnull)
|
|
|
|
subprocess.call([GIT,'branch','-q','-D',merge_branch],stderr=devnull)
|
|
|
|
subprocess.call([GIT,'branch','-q','-D',local_merge_branch],stderr=devnull)
|
|
|
|
|
|
|
|
# Push the result.
|
2017-04-26 11:43:52 +02:00
|
|
|
while True:
|
|
|
|
reply = ask_prompt("Type 'push' to push the result to %s, branch %s, or 'x' to exit without pushing." % (host_repo,branch)).lower()
|
|
|
|
if reply == 'push':
|
|
|
|
subprocess.check_call([GIT,'push',host_repo,'refs/heads/'+branch])
|
|
|
|
break
|
|
|
|
elif reply == 'x':
|
2017-08-28 22:53:34 +02:00
|
|
|
sys.exit(1)
|
2016-01-20 13:39:19 +01:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|