mirror of
https://github.com/qpdf/qpdf.git
synced 2024-12-22 02:49:00 +00:00
99 lines
2.8 KiB
Plaintext
99 lines
2.8 KiB
Plaintext
|
#!/usr/bin/env python3
|
||
|
import os
|
||
|
import sys
|
||
|
import argparse
|
||
|
import hashlib
|
||
|
import re
|
||
|
|
||
|
whoami = os.path.basename(sys.argv[0])
|
||
|
BANNER = f'''//
|
||
|
// This file is automatically generated by {whoami}.
|
||
|
// Edits will be automatically overwritten if the build is
|
||
|
// run in maintainer mode.
|
||
|
//'''
|
||
|
|
||
|
|
||
|
def warn(*args, **kwargs):
|
||
|
print(*args, file=sys.stderr, **kwargs)
|
||
|
|
||
|
|
||
|
class Main:
|
||
|
SOURCES = [whoami, 'job.yml']
|
||
|
SUMS = 'job.sums'
|
||
|
|
||
|
def main(self, args=sys.argv[1:], prog=whoami):
|
||
|
options = self.parse_args(args, prog)
|
||
|
self.top(options)
|
||
|
|
||
|
def parse_args(self, args, prog):
|
||
|
parser = argparse.ArgumentParser(
|
||
|
prog=prog,
|
||
|
description='Generate files for QPDFJob',
|
||
|
)
|
||
|
mxg = parser.add_mutually_exclusive_group(required=True)
|
||
|
mxg.add_argument('--check',
|
||
|
help='update checksums if files are not up to date',
|
||
|
action='store_true', default=False)
|
||
|
mxg.add_argument('--generate',
|
||
|
help='generate files from sources',
|
||
|
action='store_true', default=False)
|
||
|
return parser.parse_args(args)
|
||
|
|
||
|
def top(self, options):
|
||
|
if options.check:
|
||
|
self.check()
|
||
|
elif options.generate:
|
||
|
self.generate()
|
||
|
else:
|
||
|
exit(f'{whoami} unknown mode')
|
||
|
|
||
|
def get_hashes(self):
|
||
|
hashes = {}
|
||
|
for i in sorted(self.SOURCES):
|
||
|
m = hashlib.sha256()
|
||
|
with open(i, 'rb') as f:
|
||
|
m.update(f.read())
|
||
|
hashes[i] = m.hexdigest()
|
||
|
return hashes
|
||
|
|
||
|
def check(self):
|
||
|
hashes = self.get_hashes()
|
||
|
match = False
|
||
|
try:
|
||
|
old_hashes = {}
|
||
|
with open(self.SUMS, 'r') as f:
|
||
|
for line in f.readlines():
|
||
|
m = re.match(r'^(\S+) (\S+)\s*$', line)
|
||
|
if m:
|
||
|
old_hashes[m.group(1)] = m.group(2)
|
||
|
match = old_hashes == hashes
|
||
|
except Exception:
|
||
|
pass
|
||
|
if not match:
|
||
|
exit(f'{whoami}: auto job inputs have changed')
|
||
|
|
||
|
def update_hashes(self):
|
||
|
hashes = self.get_hashes()
|
||
|
with open(self.SUMS, 'w') as f:
|
||
|
print(f'# Generated by {whoami}', file=f)
|
||
|
for k, v in hashes.items():
|
||
|
print(f'{k} {v}', file=f)
|
||
|
|
||
|
def generate(self):
|
||
|
warn(f'{whoami}: regenerating auto job files')
|
||
|
|
||
|
with open('libqpdf/qpdf/auto_job_decl.hh', 'w') as f:
|
||
|
print(BANNER, file=f)
|
||
|
|
||
|
# Update hashes last to ensure that this will be rerun in the
|
||
|
# event of a failure.
|
||
|
self.update_hashes()
|
||
|
|
||
|
|
||
|
if __name__ == '__main__':
|
||
|
try:
|
||
|
os.chdir(os.path.dirname(os.path.realpath(__file__)))
|
||
|
Main().main()
|
||
|
except KeyboardInterrupt:
|
||
|
exit(130)
|