Commit c9ce46a603053f352c4bb9967f0d1f3c7d23cabf
Merge pull request #40 into master
TheCharlatan committed on 12/8/2019, 7:31:11 PMParent: fc3806f5caaadcb60b6b02854ac165cdd4ebc01c
Parent: 6eadfcbb362c72d4100b0a5e3000807c1bdd24b6
Files changed
verify-merge.py | changed |
verify-merge.py | ||
---|---|---|
@@ -4,29 +4,100 @@ | ||
4 | 4 | import subprocess |
5 | 5 | import glob |
6 | 6 | import sys |
7 | 7 | |
8 | -GIT = os.getenv('GIT','git') | |
9 | -GPG = os.getenv('GPG','gpg') | |
8 | +GIT = os.getenv('GIT', 'git') | |
9 | +GPG = os.getenv('GPG', 'gpg') | |
10 | +GITIAN_PUBKEYS_DIR = os.getenv('GITIAN_PUBKEYS_DIR', 'gitian-pubkeys') | |
10 | 11 | |
11 | 12 | def verify(): |
12 | 13 | global args, workdir |
13 | 14 | if args.import_keys: |
14 | - os.chdir('gitian-pubkeys') | |
15 | - print('Importing gpg pubkeys...') | |
16 | - keys = [f for f in glob.glob('*.asc', recursive=False)] | |
17 | - for key in keys: | |
18 | - subprocess.check_call([GPG, '--import', key]) | |
19 | - os.chdir('../') | |
15 | + import_gpg_keys() | |
20 | 16 | if args.refresh_keys: |
21 | - print('Refreshing pubkeys...') | |
22 | - subprocess.check_call([GPG, '--refresh']) | |
17 | + refresh_gpg_keys() | |
18 | + assert_files = get_assert_file_list() | |
19 | + verify_gpg_sigs(assert_files) | |
20 | + verify_checksums(assert_files) | |
21 | + print('All checks passed.') | |
22 | + os.chdir(workdir) | |
23 | + | |
24 | +def main(): | |
25 | + global args, workdir | |
26 | + args = get_parsed_args() | |
27 | + workdir = os.getcwd() | |
28 | + if args.pull_id != None: | |
29 | + pull_request() | |
30 | + else: | |
31 | + verify() | |
32 | + | |
33 | +def get_parsed_args(): | |
34 | + parser = argparse.ArgumentParser(usage='%(prog)s [options]', description='Use this script to verify the signatures of existing gitian assert files and / or assert files in a specific pull request.') | |
35 | + parser.add_argument('-p', '--pull_id', dest='pull_id', help='GitHub Pull request id to check') | |
36 | + parser.add_argument('-r', '--remote', dest='remote', default='upstream', help='The git remote repository') | |
37 | + parser.add_argument('-t', '--target-branch', dest='target_branch', default='master', help='Remote repository merge into branch') | |
38 | + parser.add_argument('-m', '--merge', action='store_true', dest='merge', help='Merge the given pull request id') | |
39 | + parser.add_argument('-k', '--refresh-keys', action='store_true', dest='refresh_keys', help='Refresh all public keys that are currently in the gpg keyring.') | |
40 | + parser.add_argument('-i', '--import-keys', action='store_true', dest='import_keys', help='Import all public keys in the gitian-pubkeys directory to the gpg keyring.') | |
41 | + parser.add_argument('-o', '--no-verify', action='store_true', dest='no_verify', help='Do not run any signature verification') | |
42 | + parser.add_argument('-v', '--version', dest='version', help='Version number of sigs to be verified (defaults to all versions if not specified).') | |
43 | + return parser.parse_args() | |
44 | + | |
45 | +def pull_request(): | |
46 | + global args | |
47 | + # Get branch from remote pull request and compare | |
48 | + head_branch = args.pull_id + '_head' | |
49 | + subprocess.check_call([GIT, 'fetch', args.remote]) | |
50 | + subprocess.check_call([GIT, 'checkout', args.remote + '/' + args.target_branch]) | |
51 | + subprocess.check_call([GIT, 'fetch', '-q', args.remote, 'pull/' + args.pull_id + '/head:' + head_branch]) | |
52 | + subprocess.check_call([GIT, 'checkout', '-f', head_branch]) | |
53 | + if args.merge: | |
54 | + # Hard reset the target branch to the remote's state and merge the pull request's head branch into it | |
55 | + subprocess.check_call([GIT, 'checkout', args.target_branch]) | |
56 | + subprocess.check_call([GIT, 'reset', '--hard', args.remote + '/' + args.target_branch]) | |
57 | + print('Merging and signing pull request #' + args.pull_id + ' , if you are using a smartcard, confirm the signature now.') | |
58 | + subprocess.check_call([GIT, 'merge', '-q', '--commit', '--no-edit', '-m', 'Merge pull request #' + args.pull_id + ' into ' + args.target_branch, '--no-ff', '--gpg-sign', head_branch]) | |
59 | + if not args.no_verify: | |
60 | + verify() | |
61 | + subprocess.check_call([GIT, 'checkout', 'master']) | |
62 | + subprocess.check_call([GIT, 'branch', '-D', head_branch]) | |
63 | + | |
64 | +def refresh_gpg_keys(): | |
65 | + print('Refreshing pubkeys...') | |
66 | + subprocess.check_call([GPG, '--refresh']) | |
67 | + | |
68 | +def import_gpg_keys(): | |
69 | + os.chdir(GITIAN_PUBKEYS_DIR) | |
70 | + print('Importing gpg pubkeys...') | |
71 | + keys = [f for f in glob.glob('*.asc', recursive=False)] | |
72 | + for key in keys: | |
73 | + subprocess.check_call([GPG, '--import', key]) | |
74 | + os.chdir('../') | |
75 | + | |
76 | +def get_assert_file_list(): | |
77 | + global args | |
78 | + # Shell glob pattern for specific version or all builds: | |
79 | + ver_pattern = args.version if args.version else 'v0*' | |
80 | + assert_files = [] | |
81 | + for assert_file in sorted(glob.glob(ver_pattern + '-*/*/*.assert')): | |
82 | + pieces = assert_file.split('/') | |
83 | + release_full = pieces[0] # eg v0.15.0.1-linux | |
84 | + release_num, platform = release_full.split('-') | |
85 | + assert_files.append({ | |
86 | + 'release_full': release_full, | |
87 | + 'release_num': release_num, | |
88 | + 'platform': platform, | |
89 | + 'path': assert_file, | |
90 | + 'user': pieces[1]}) | |
91 | + return assert_files | |
92 | + | |
93 | +def verify_gpg_sigs(assert_files): | |
23 | 94 | print('Verifying signatures:') |
24 | 95 | is_verification_error = False |
25 | - ver_pattern = args.version if args.version else 'v0*' | |
26 | - for sig_file in sorted(glob.glob(ver_pattern + '-*/*/*.sig', recursive=False)): | |
96 | + for assert_file in assert_files: | |
97 | + sig_file = assert_file['path'] + '.sig' | |
27 | 98 | print(' - ' + '{message: <{fill}}'.format(message=sig_file, fill='72'), end='') |
28 | - result = subprocess.run([GPG, '--verify', sig_file], capture_output=True, encoding='utf-8') | |
99 | + result = verify_gpg_sig(sig_file) | |
29 | 100 | if result.returncode != 0: |
30 | 101 | is_verification_error = True |
31 | 102 | print('\n') |
32 | 103 | sys.stderr.write('ERROR:\n' + result.stderr + '-' * 80 + '\n') |
@@ -34,76 +105,55 @@ | ||
34 | 105 | print(' [OK]') |
35 | 106 | if is_verification_error: |
36 | 107 | sys.stderr.write('ERROR: One or more signatures failed verification.\n') |
37 | 108 | exit(1) |
109 | + print('All signatures verified correctly.\n') | |
38 | 110 | |
39 | - print('All signatures verified correctly.\n') | |
40 | - print('Beginning checksum comparison...\n') | |
111 | +def verify_gpg_sig(sig_file): | |
112 | + return subprocess.run([GPG, '--verify', sig_file], capture_output=True, encoding='utf-8') | |
113 | + | |
114 | +def verify_checksums(assert_files): | |
115 | + print('Beginning binary checksum comparison...\n') | |
41 | 116 | # Check that the contents between the assertion signers match. |
42 | 117 | # This is meant for quick verification, not for validation of their contents. |
43 | 118 | # TODO: prevent false positives related to filenames / whitespace / formatting. |
44 | - builds = glob.glob(ver_pattern + '*') | |
45 | - for build in builds: | |
46 | - first_file = glob.glob(build + '/*/*.assert', recursive=False)[0] | |
47 | - f = open(first_file, 'r') | |
48 | - first_file_contents = f.readlines() | |
49 | - f.close() | |
50 | - for assert_file in glob.glob(build + '/*/*.assert', recursive=False): | |
51 | - f = open(assert_file, 'r') | |
52 | - assert_file_contents = f.readlines() | |
119 | + prev_release_num = '' | |
120 | + prev_release_full = '' | |
121 | + prev_platform = '' | |
122 | + for assert_file in assert_files: | |
123 | + release_full = assert_file['release_full'] | |
124 | + if release_full != prev_release_full: | |
125 | + first_user = assert_file['user'] | |
126 | + first_file = assert_file['path'] | |
127 | + prev_release_full = release_full | |
128 | + if prev_release_num != assert_file['release_num']: | |
129 | + print(' ' + assert_file['release_num']) | |
130 | + prev_release_num = assert_file['release_num'] | |
131 | + f = open(first_file, 'r') | |
132 | + first_file_contents = f.readlines() | |
53 | 133 | f.close() |
54 | - for i in range(len(assert_file_contents)): | |
55 | - # Compare each line in the assertion file until base_manifests: | |
56 | - if assert_file_contents[i] == '- base_manifests: !!omap\n': | |
57 | - break | |
58 | - # The OSX SDK may change from time to time: | |
59 | - if 'sdk' in assert_file_contents[i]: | |
60 | - continue | |
61 | - if assert_file_contents[i] != first_file_contents[i]: | |
62 | - sys.stderr.write('ERROR: Found conflicting contents on line: ' + str(i) + ' of file ') | |
63 | - sys.stderr.write(assert_file + ':\n' + assert_file_contents[i]) | |
64 | - sys.stderr.write(first_file + ':\n' + first_file_contents[i]) | |
65 | - exit(1) | |
66 | - | |
134 | + continue | |
135 | + platform = assert_file['platform'] | |
136 | + if platform != prev_platform: | |
137 | + prev_platform = platform | |
138 | + print(' ' + platform) | |
139 | + print(' ' + first_user) | |
140 | + print(' ' + assert_file['user']) | |
141 | + assert_file_handle = open(assert_file['path'], 'r') | |
142 | + assert_file_contents = assert_file_handle.readlines() | |
143 | + assert_file_handle.close() | |
144 | + for i in range(len(assert_file_contents)): | |
145 | + # Compare each line in the assertion file until base_manifests: | |
146 | + if assert_file_contents[i] == '- base_manifests: !!omap\n': | |
147 | + break | |
148 | + # The OSX SDK may change from time to time: | |
149 | + if 'sdk' in assert_file_contents[i]: | |
150 | + continue | |
151 | + if assert_file_contents[i] != first_file_contents[i]: | |
152 | + sys.stderr.write('ERROR: Found conflicting contents on line: ' + str(i) + ' of file ') | |
153 | + sys.stderr.write(assert_file['path'] + ':\n' + assert_file_contents[i]) | |
154 | + sys.stderr.write(first_file + ':\n' + first_file_contents[i]) | |
155 | + exit(1) | |
67 | 156 | print('No discrepancies found in assertion files.') |
68 | - print('All checks passed.') | |
69 | - os.chdir(workdir) | |
70 | 157 | |
71 | -def main(): | |
72 | - host_repo = 'git@github.com/monero-project/gitian.sigs' | |
73 | - global args, workdir | |
74 | - parser = argparse.ArgumentParser(usage='%(prog)s [options]', description='Use this script to verify the signatures of existing gitian assert files and / or assert files in a specific pull request.') | |
75 | - parser.add_argument('-p', '--pull_id', dest='pull_id', help='GitHub Pull request id to check') | |
76 | - parser.add_argument('-r', '--remote', dest='remote', default='upstream', help='The git remote repository') | |
77 | - parser.add_argument('-t', '--target-branch', dest='target_branch', default='master', help='Remote repository merge into branch') | |
78 | - parser.add_argument('-m', '--merge', action='store_true', dest='merge', help='Merge the given pull request id') | |
79 | - parser.add_argument('-k', '--refresh-keys', action='store_true', dest='refresh_keys', help='Refresh all public keys that are currently in the gpg keyring.') | |
80 | - parser.add_argument('-i', '--import-keys', action='store_true', dest='import_keys', help='Import all public keys in the gitian-pubkeys directory to the gpg keyring.') | |
81 | - parser.add_argument('-o', '--no-verify', action='store_true', dest='no_verify', help='Do not run any signature verification') | |
82 | - parser.add_argument('-v', '--version', dest='version', help='Version number of sigs to be verified (defaults to all versions if not specified).') | |
83 | - | |
84 | - args = parser.parse_args() | |
85 | - | |
86 | - workdir = os.getcwd() | |
87 | - if args.pull_id != None: | |
88 | - # Get branch from remote pull request and compare | |
89 | - head_branch = args.pull_id + '_head' | |
90 | - subprocess.check_call([GIT, 'fetch', args.remote]) | |
91 | - subprocess.check_call([GIT, 'checkout', args.remote + '/' + args.target_branch]) | |
92 | - subprocess.check_call([GIT, 'fetch', '-q', args.remote, 'pull/' + args.pull_id + '/head:' + head_branch]) | |
93 | - subprocess.check_call([GIT, 'checkout', '-f', head_branch]) | |
94 | - if args.merge: | |
95 | - # Hard reset the target branch to the remote's state and merge the pull request's head branch into it | |
96 | - subprocess.check_call([GIT, 'checkout', args.target_branch]) | |
97 | - subprocess.check_call([GIT, 'reset', '--hard', args.remote + '/' + args.target_branch]) | |
98 | - print('Merging and signing pull request #' + args.pull_id + ' , if you are using a smartcard, confirm the signature now.') | |
99 | - subprocess.check_call([GIT, 'merge', '-q', '--commit', '--no-edit', '-m', 'Merge pull request #' + args.pull_id + ' into ' + args.target_branch, '--no-ff', '--gpg-sign', head_branch]) | |
100 | - if not args.no_verify: | |
101 | - verify() | |
102 | - subprocess.check_call([GIT, 'checkout', 'master']) | |
103 | - subprocess.check_call([GIT, 'branch', '-D', head_branch]) | |
104 | - else: | |
105 | - verify() | |
106 | - | |
107 | - | |
108 | 158 | if __name__ == '__main__': |
109 | 159 | main() |
Built with git-ssb-web