Add json to large file test

This commit is contained in:
Jay Berkenbilt 2022-05-21 07:09:11 -04:00
parent c56a9ca7f6
commit 905f47a55f
5 changed files with 8530 additions and 6 deletions

2
TODO
View File

@ -87,8 +87,6 @@ General things to remember:
* When we get to full serialization, add json serialization
performance test.
* Add json to the large file tests.
* Document that keys other than "qpdf-v2" are ignored so people can
stash their own stuff.

View File

@ -10,8 +10,6 @@ chdir("qpdf") or die "chdir testdir failed: $!\n";
require TestDriver;
cleanup();
my $td = new TestDriver('large-file');
my $large_file_test_path = $ENV{'QPDF_LARGE_FILE_TEST_PATH'} || undef;
@ -21,6 +19,7 @@ if (defined($large_file_test_path))
$large_file_test_path =~ s!\\!/!g;
}
large_cleanup();
my $nlarge = 1;
if (defined $large_file_test_path)
@ -32,12 +31,17 @@ else
$td->notify("--- Skipping tests on actual large files ---");
}
my $n_tests = $nlarge * 13;
my $n_tests = $nlarge * 21;
for (my $large = 0; $large < $nlarge; ++$large)
{
my $now = time();
my $show_time = sub {};
if ($large)
{
$td->notify("--- Running tests on actual large files ---");
$show_time = sub {
$td->notify("--- time: " . (time() - $now));
};
}
else
{
@ -45,14 +49,21 @@ for (my $large = 0; $large < $nlarge; ++$large)
}
my $size = ($large ? "large" : "small");
my $file = $large ? "$large_file_test_path/a.pdf" : "a.pdf";
my $json = $large ? "$large_file_test_path/a.json" : "a.json";
# Pick a stream near the end of the file to test.
my $stream = $large ? "$large_file_test_path/a.json-603" : "a.json-603";
$now = time();
$td->runtest("write test file",
{$td->COMMAND => "test_large_file write $size '$file'"},
{$td->FILE => "large_file.out", $td->EXIT_STATUS => 0},
$td->NORMALIZE_NEWLINES);
&$show_time();
$now = time();
$td->runtest("read test file",
{$td->COMMAND => "test_large_file read $size '$file'"},
{$td->FILE => "large_file.out", $td->EXIT_STATUS => 0},
$td->NORMALIZE_NEWLINES);
&$show_time();
$td->runtest("check",
{$td->COMMAND => "qpdf --suppress-recovery --check '$file'",
$td->FILTER => "grep -v checking"},
@ -60,6 +71,49 @@ for (my $large = 0; $large < $nlarge; ++$large)
$td->EXIT_STATUS => 0},
$td->NORMALIZE_NEWLINES);
$now = time();
$td->runtest("large to json inline",
{$td->COMMAND => "qpdf --json-output '$file' '$json'"},
{$td->STRING => "", $td->EXIT_STATUS => 0});
&$show_time();
$now = time();
$td->runtest("json inline to large",
{$td->COMMAND =>
"qpdf --json-input --compress-streams=n" .
" --static-id '$json' '$file'"},
{$td->STRING => "", $td->EXIT_STATUS => 0});
&$show_time();
$td->runtest("read test file",
{$td->COMMAND => "test_large_file read $size '$file'"},
{$td->FILE => "large_file.out", $td->EXIT_STATUS => 0},
$td->NORMALIZE_NEWLINES);
$now = time();
$td->runtest("large to json with file",
{$td->COMMAND =>
"qpdf --json-output --json-stream-data=file" .
" '$file' '$json'"},
{$td->STRING => "", $td->EXIT_STATUS => 0});
&$show_time();
$td->runtest("inspect json",
{$td->FILE => $json, $td->FILTER => "perl filter-json.pl"},
{$td->FILE => "exp-large-json.json"},
$td->NORMALIZE_NEWLINES);
$td->runtest("spot check stream",
{$td->FILE => $stream},
{$td->FILE => "exp-large-stream"},
$td->NORMALIZE_NEWLINES);
$now = time();
$td->runtest("json with file to large",
{$td->COMMAND =>
"qpdf --json-input" .
" --compress-streams=n '$json' '$file'"},
{$td->STRING => "", $td->EXIT_STATUS => 0});
&$show_time();
$td->runtest("read test file",
{$td->COMMAND => "test_large_file read $size '$file'"},
{$td->FILE => "large_file.out", $td->EXIT_STATUS => 0},
$td->NORMALIZE_NEWLINES);
for my $ostream (0, 1)
{
for my $linearize (0, 1)
@ -126,7 +180,20 @@ for (my $large = 0; $large < $nlarge; ++$large)
$td->EXIT_STATUS => 0},
$td->NORMALIZE_NEWLINES);
unlink $file;
large_cleanup();
}
large_cleanup();
sub large_cleanup
{
cleanup();
system("rm -f a.json* a.pdf*");
if (defined $large_file_test_path)
{
system("rm -f $large_file_test_path/a.pdf*" .
" $large_file_test_path/a.json*");
}
}
cleanup();
$td->report($n_tests);

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,2 @@
BT /F1 24 Tf 72 720 Td (page 200) Tj ET
q 468 0 0 468 72 72 cm /Im1 Do Q

View File

@ -0,0 +1,9 @@
use warnings;
use strict;
while (<>)
{
s/("datafile": ").*?(a.json-.*",)/$1$2/;
s%("/(?:Width|Height)": )\d+(.*)%${1}50${2}%;
print;
}