summaryrefslogtreecommitdiff
path: root/src/test
diff options
context:
space:
mode:
authorAndrew Dunstan2024-11-27 17:05:44 +0000
committerAndrew Dunstan2024-11-27 17:07:14 +0000
commit5c32c21afe6449a19b6dfafa17f29b71c9595e03 (patch)
treee04a23c34956fcd4eab964e35c9a9b0f12277076 /src/test
parent262283d5eec3d582a8645692fede2e8f5e6029bd (diff)
jsonapi: add lexer option to keep token ownership
Commit 0785d1b8b adds support for libpq as a JSON client, but allocations for string tokens can still be leaked during parsing failures. This is tricky to fix for the object_field semantic callbacks: the field name must remain valid until the end of the object, but if a parsing error is encountered partway through, object_field_end() won't be invoked and the client won't get a chance to free the field name. This patch adds a flag to switch the ownership of parsed tokens to the lexer. When this is enabled, the client must make a copy of any tokens it wants to persist past the callback lifetime, but the lexer will handle necessary cleanup on failure. Backend uses of the JSON parser don't need to use this flag, since the parser's allocations will occur in a short lived memory context. A -o option has been added to test_json_parser_incremental to exercise the new setJsonLexContextOwnsTokens() API, and the test_json_parser TAP tests make use of it. (The test program now cleans up allocated memory, so that tests can be usefully run under leak sanitizers.) Author: Jacob Champion Discussion: https://postgr.es/m/CAOYmi+kb38EciwyBQOf9peApKGwraHqA7pgzBkvoUnw5BRfS1g@mail.gmail.com
Diffstat (limited to 'src/test')
-rw-r--r--src/test/modules/test_json_parser/t/001_test_json_parser_incremental.pl13
-rw-r--r--src/test/modules/test_json_parser/t/002_inline.pl15
-rw-r--r--src/test/modules/test_json_parser/t/003_test_semantic.pl11
-rw-r--r--src/test/modules/test_json_parser/test_json_parser_incremental.c37
4 files changed, 54 insertions, 22 deletions
diff --git a/src/test/modules/test_json_parser/t/001_test_json_parser_incremental.pl b/src/test/modules/test_json_parser/t/001_test_json_parser_incremental.pl
index 8cc42e8e292..0c663b8e689 100644
--- a/src/test/modules/test_json_parser/t/001_test_json_parser_incremental.pl
+++ b/src/test/modules/test_json_parser/t/001_test_json_parser_incremental.pl
@@ -13,21 +13,24 @@ use FindBin;
my $test_file = "$FindBin::RealBin/../tiny.json";
-my @exes =
- ("test_json_parser_incremental", "test_json_parser_incremental_shlib");
+my @exes = (
+ [ "test_json_parser_incremental", ],
+ [ "test_json_parser_incremental", "-o", ],
+ [ "test_json_parser_incremental_shlib", ],
+ [ "test_json_parser_incremental_shlib", "-o", ]);
foreach my $exe (@exes)
{
- note "testing executable $exe";
+ note "testing executable @$exe";
# Test the usage error
- my ($stdout, $stderr) = run_command([ $exe, "-c", 10 ]);
+ my ($stdout, $stderr) = run_command([ @$exe, "-c", 10 ]);
like($stderr, qr/Usage:/, 'error message if not enough arguments');
# Test that we get success for small chunk sizes from 64 down to 1.
for (my $size = 64; $size > 0; $size--)
{
- ($stdout, $stderr) = run_command([ $exe, "-c", $size, $test_file ]);
+ ($stdout, $stderr) = run_command([ @$exe, "-c", $size, $test_file ]);
like($stdout, qr/SUCCESS/, "chunk size $size: test succeeds");
is($stderr, "", "chunk size $size: no error output");
diff --git a/src/test/modules/test_json_parser/t/002_inline.pl b/src/test/modules/test_json_parser/t/002_inline.pl
index 5b6c6dc4ae7..71c462b3191 100644
--- a/src/test/modules/test_json_parser/t/002_inline.pl
+++ b/src/test/modules/test_json_parser/t/002_inline.pl
@@ -13,7 +13,7 @@ use Test::More;
use File::Temp qw(tempfile);
my $dir = PostgreSQL::Test::Utils::tempdir;
-my $exe;
+my @exe;
sub test
{
@@ -35,7 +35,7 @@ sub test
foreach my $size (reverse(1 .. $chunk))
{
- my ($stdout, $stderr) = run_command([ $exe, "-c", $size, $fname ]);
+ my ($stdout, $stderr) = run_command([ @exe, "-c", $size, $fname ]);
if (defined($params{error}))
{
@@ -53,13 +53,16 @@ sub test
}
}
-my @exes =
- ("test_json_parser_incremental", "test_json_parser_incremental_shlib");
+my @exes = (
+ [ "test_json_parser_incremental", ],
+ [ "test_json_parser_incremental", "-o", ],
+ [ "test_json_parser_incremental_shlib", ],
+ [ "test_json_parser_incremental_shlib", "-o", ]);
foreach (@exes)
{
- $exe = $_;
- note "testing executable $exe";
+ @exe = @$_;
+ note "testing executable @exe";
test("number", "12345");
test("string", '"hello"');
diff --git a/src/test/modules/test_json_parser/t/003_test_semantic.pl b/src/test/modules/test_json_parser/t/003_test_semantic.pl
index c11480172d3..c57ccdb6602 100644
--- a/src/test/modules/test_json_parser/t/003_test_semantic.pl
+++ b/src/test/modules/test_json_parser/t/003_test_semantic.pl
@@ -16,14 +16,17 @@ use File::Temp qw(tempfile);
my $test_file = "$FindBin::RealBin/../tiny.json";
my $test_out = "$FindBin::RealBin/../tiny.out";
-my @exes =
- ("test_json_parser_incremental", "test_json_parser_incremental_shlib");
+my @exes = (
+ [ "test_json_parser_incremental", ],
+ [ "test_json_parser_incremental", "-o", ],
+ [ "test_json_parser_incremental_shlib", ],
+ [ "test_json_parser_incremental_shlib", "-o", ]);
foreach my $exe (@exes)
{
- note "testing executable $exe";
+ note "testing executable @$exe";
- my ($stdout, $stderr) = run_command([ $exe, "-s", $test_file ]);
+ my ($stdout, $stderr) = run_command([ @$exe, "-s", $test_file ]);
is($stderr, "", "no error output");
diff --git a/src/test/modules/test_json_parser/test_json_parser_incremental.c b/src/test/modules/test_json_parser/test_json_parser_incremental.c
index 294e5f74eac..0b02b5203bf 100644
--- a/src/test/modules/test_json_parser/test_json_parser_incremental.c
+++ b/src/test/modules/test_json_parser/test_json_parser_incremental.c
@@ -18,6 +18,10 @@
* If the -s flag is given, the program does semantic processing. This should
* just mirror back the json, albeit with white space changes.
*
+ * If the -o flag is given, the JSONLEX_CTX_OWNS_TOKENS flag is set. (This can
+ * be used in combination with a leak sanitizer; without the option, the parser
+ * may leak memory with invalid JSON.)
+ *
* The argument specifies the file containing the JSON input.
*
*-------------------------------------------------------------------------
@@ -72,6 +76,8 @@ static JsonSemAction sem = {
.scalar = do_scalar
};
+static bool lex_owns_tokens = false;
+
int
main(int argc, char **argv)
{
@@ -88,10 +94,11 @@ main(int argc, char **argv)
char *testfile;
int c;
bool need_strings = false;
+ int ret = 0;
pg_logging_init(argv[0]);
- while ((c = getopt(argc, argv, "c:s")) != -1)
+ while ((c = getopt(argc, argv, "c:os")) != -1)
{
switch (c)
{
@@ -100,6 +107,9 @@ main(int argc, char **argv)
if (chunk_size > BUFSIZE)
pg_fatal("chunk size cannot exceed %d", BUFSIZE);
break;
+ case 'o': /* switch token ownership */
+ lex_owns_tokens = true;
+ break;
case 's': /* do semantic processing */
testsem = &sem;
sem.semstate = palloc(sizeof(struct DoState));
@@ -112,7 +122,7 @@ main(int argc, char **argv)
if (optind < argc)
{
- testfile = pg_strdup(argv[optind]);
+ testfile = argv[optind];
optind++;
}
else
@@ -122,6 +132,7 @@ main(int argc, char **argv)
}
makeJsonLexContextIncremental(&lex, PG_UTF8, need_strings);
+ setJsonLexContextOwnsTokens(&lex, lex_owns_tokens);
initStringInfo(&json);
if ((json_file = fopen(testfile, PG_BINARY_R)) == NULL)
@@ -160,7 +171,8 @@ main(int argc, char **argv)
if (result != JSON_INCOMPLETE)
{
fprintf(stderr, "%s\n", json_errdetail(result, &lex));
- exit(1);
+ ret = 1;
+ goto cleanup;
}
resetStringInfo(&json);
}
@@ -172,15 +184,21 @@ main(int argc, char **argv)
if (result != JSON_SUCCESS)
{
fprintf(stderr, "%s\n", json_errdetail(result, &lex));
- exit(1);
+ ret = 1;
+ goto cleanup;
}
if (!need_strings)
printf("SUCCESS!\n");
break;
}
}
+
+cleanup:
fclose(json_file);
- exit(0);
+ freeJsonLexContext(&lex);
+ free(json.data);
+
+ return ret;
}
/*
@@ -230,7 +248,8 @@ do_object_field_start(void *state, char *fname, bool isnull)
static JsonParseErrorType
do_object_field_end(void *state, char *fname, bool isnull)
{
- /* nothing to do really */
+ if (!lex_owns_tokens)
+ free(fname);
return JSON_SUCCESS;
}
@@ -291,6 +310,9 @@ do_scalar(void *state, char *token, JsonTokenType tokentype)
else
printf("%s", token);
+ if (!lex_owns_tokens)
+ free(token);
+
return JSON_SUCCESS;
}
@@ -343,7 +365,8 @@ usage(const char *progname)
{
fprintf(stderr, "Usage: %s [OPTION ...] testfile\n", progname);
fprintf(stderr, "Options:\n");
- fprintf(stderr, " -c chunksize size of piece fed to parser (default 64)n");
+ fprintf(stderr, " -c chunksize size of piece fed to parser (default 64)\n");
+ fprintf(stderr, " -o set JSONLEX_CTX_OWNS_TOKENS for leak checking\n");
fprintf(stderr, " -s do semantic processing\n");
}