mirror of
https://github.com/qpdf/qpdf.git
synced 2024-12-22 10:58:58 +00:00
Add basic framework for QPDFJob code generation
This commit is contained in:
parent
ad096b462c
commit
c216854607
98
generate_auto_job
Executable file
98
generate_auto_job
Executable file
@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import hashlib
|
||||
import re
|
||||
|
||||
whoami = os.path.basename(sys.argv[0])
|
||||
BANNER = f'''//
|
||||
// This file is automatically generated by {whoami}.
|
||||
// Edits will be automatically overwritten if the build is
|
||||
// run in maintainer mode.
|
||||
//'''
|
||||
|
||||
|
||||
def warn(*args, **kwargs):
|
||||
print(*args, file=sys.stderr, **kwargs)
|
||||
|
||||
|
||||
class Main:
|
||||
SOURCES = [whoami, 'job.yml']
|
||||
SUMS = 'job.sums'
|
||||
|
||||
def main(self, args=sys.argv[1:], prog=whoami):
|
||||
options = self.parse_args(args, prog)
|
||||
self.top(options)
|
||||
|
||||
def parse_args(self, args, prog):
|
||||
parser = argparse.ArgumentParser(
|
||||
prog=prog,
|
||||
description='Generate files for QPDFJob',
|
||||
)
|
||||
mxg = parser.add_mutually_exclusive_group(required=True)
|
||||
mxg.add_argument('--check',
|
||||
help='update checksums if files are not up to date',
|
||||
action='store_true', default=False)
|
||||
mxg.add_argument('--generate',
|
||||
help='generate files from sources',
|
||||
action='store_true', default=False)
|
||||
return parser.parse_args(args)
|
||||
|
||||
def top(self, options):
|
||||
if options.check:
|
||||
self.check()
|
||||
elif options.generate:
|
||||
self.generate()
|
||||
else:
|
||||
exit(f'{whoami} unknown mode')
|
||||
|
||||
def get_hashes(self):
|
||||
hashes = {}
|
||||
for i in sorted(self.SOURCES):
|
||||
m = hashlib.sha256()
|
||||
with open(i, 'rb') as f:
|
||||
m.update(f.read())
|
||||
hashes[i] = m.hexdigest()
|
||||
return hashes
|
||||
|
||||
def check(self):
|
||||
hashes = self.get_hashes()
|
||||
match = False
|
||||
try:
|
||||
old_hashes = {}
|
||||
with open(self.SUMS, 'r') as f:
|
||||
for line in f.readlines():
|
||||
m = re.match(r'^(\S+) (\S+)\s*$', line)
|
||||
if m:
|
||||
old_hashes[m.group(1)] = m.group(2)
|
||||
match = old_hashes == hashes
|
||||
except Exception:
|
||||
pass
|
||||
if not match:
|
||||
exit(f'{whoami}: auto job inputs have changed')
|
||||
|
||||
def update_hashes(self):
|
||||
hashes = self.get_hashes()
|
||||
with open(self.SUMS, 'w') as f:
|
||||
print(f'# Generated by {whoami}', file=f)
|
||||
for k, v in hashes.items():
|
||||
print(f'{k} {v}', file=f)
|
||||
|
||||
def generate(self):
|
||||
warn(f'{whoami}: regenerating auto job files')
|
||||
|
||||
with open('libqpdf/qpdf/auto_job_decl.hh', 'w') as f:
|
||||
print(BANNER, file=f)
|
||||
|
||||
# Update hashes last to ensure that this will be rerun in the
|
||||
# event of a failure.
|
||||
self.update_hashes()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
os.chdir(os.path.dirname(os.path.realpath(__file__)))
|
||||
Main().main()
|
||||
except KeyboardInterrupt:
|
||||
exit(130)
|
3
job.sums
Normal file
3
job.sums
Normal file
@ -0,0 +1,3 @@
|
||||
# Generated by generate_auto_job
|
||||
generate_auto_job 3905985c383d33f9e8629414d1481724ea67d836749f6dff53859ca558325743
|
||||
job.yml f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090
|
@ -1,5 +1,11 @@
|
||||
TARGETS_libqpdf = libqpdf/$(OUTPUT_DIR)/$(call libname,qpdf)
|
||||
|
||||
ifeq ($(MAINTAINER_MODE), 1)
|
||||
ifeq ($(shell if ./generate_auto_job --check; then echo 0; else echo 1; fi), 1)
|
||||
_ := $(shell ./generate_auto_job --generate)
|
||||
endif
|
||||
endif
|
||||
|
||||
INCLUDES_libqpdf = include libqpdf
|
||||
LDFLAGS_libqpdf = -Llibqpdf/$(OUTPUT_DIR)
|
||||
LIBS_libqpdf = -lqpdf
|
||||
|
5
libqpdf/qpdf/auto_job_decl.hh
Normal file
5
libqpdf/qpdf/auto_job_decl.hh
Normal file
@ -0,0 +1,5 @@
|
||||
//
|
||||
// This file is automatically generated by generate_auto_job.
|
||||
// Edits will be automatically overwritten if the build is
|
||||
// run in maintainer mode.
|
||||
//
|
Loading…
Reference in New Issue
Block a user