Skip to content

Commit 994a100

Browse files
Allocate JsonLexContexts on the heap to avoid warnings
The stack allocated JsonLexContexts, in combination with codepaths using goto, were causing warnings when compiling with LTO enabled as the optimizer is unable to figure out that is safe. Rather than contort the code with workarounds for this simply heap allocate the structs instead as these are not in any performance critical paths. Author: Daniel Gustafsson <daniel@yesql.se> Reported-by: Tom Lane <tgl@sss.pgh.pa.us> Reviewed-by: Jacob Champion <jacob.champion@enterprisedb.com> Reviewed-by: Tom Lane <tgl@sss.pgh.pa.us> Discussion: https://postgr.es/m/2074634.1744839761@sss.pgh.pa.us
1 parent 0ff95e0 commit 994a100

File tree

2 files changed

+20
-15
lines changed

2 files changed

+20
-15
lines changed

src/interfaces/libpq/fe-auth-oauth.c

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -476,7 +476,7 @@ issuer_from_well_known_uri(PGconn *conn, const char *wkuri)
476476
static bool
477477
handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
478478
{
479-
JsonLexContext lex = {0};
479+
JsonLexContext *lex;
480480
JsonSemAction sem = {0};
481481
JsonParseErrorType err;
482482
struct json_ctx ctx = {0};
@@ -504,8 +504,8 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
504504
return false;
505505
}
506506

507-
makeJsonLexContextCstringLen(&lex, msg, msglen, PG_UTF8, true);
508-
setJsonLexContextOwnsTokens(&lex, true); /* must not leak on error */
507+
lex = makeJsonLexContextCstringLen(NULL, msg, msglen, PG_UTF8, true);
508+
setJsonLexContextOwnsTokens(lex, true); /* must not leak on error */
509509

510510
initPQExpBuffer(&ctx.errbuf);
511511
sem.semstate = &ctx;
@@ -516,7 +516,7 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
516516
sem.array_start = oauth_json_array_start;
517517
sem.scalar = oauth_json_scalar;
518518

519-
err = pg_parse_json(&lex, &sem);
519+
err = pg_parse_json(lex, &sem);
520520

521521
if (err == JSON_SEM_ACTION_FAILED)
522522
{
@@ -535,7 +535,7 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
535535
}
536536
}
537537
else if (err != JSON_SUCCESS)
538-
errmsg = json_errdetail(err, &lex);
538+
errmsg = json_errdetail(err, lex);
539539

540540
if (errmsg)
541541
libpq_append_conn_error(conn,
@@ -544,7 +544,7 @@ handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
544544

545545
/* Don't need the error buffer or the JSON lexer anymore. */
546546
termPQExpBuffer(&ctx.errbuf);
547-
freeJsonLexContext(&lex);
547+
freeJsonLexContext(lex);
548548

549549
if (errmsg)
550550
goto cleanup;

src/test/modules/test_json_parser/test_json_parser_incremental.c

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ main(int argc, char **argv)
8484
char buff[BUFSIZE];
8585
FILE *json_file;
8686
JsonParseErrorType result;
87-
JsonLexContext lex;
87+
JsonLexContext *lex;
8888
StringInfoData json;
8989
int n_read;
9090
size_t chunk_size = DEFAULT_CHUNK_SIZE;
@@ -98,6 +98,10 @@ main(int argc, char **argv)
9898

9999
pg_logging_init(argv[0]);
100100

101+
lex = calloc(1, sizeof(JsonLexContext));
102+
if (!lex)
103+
pg_fatal("out of memory");
104+
101105
while ((c = getopt(argc, argv, "c:os")) != -1)
102106
{
103107
switch (c)
@@ -113,7 +117,7 @@ main(int argc, char **argv)
113117
case 's': /* do semantic processing */
114118
testsem = &sem;
115119
sem.semstate = palloc(sizeof(struct DoState));
116-
((struct DoState *) sem.semstate)->lex = &lex;
120+
((struct DoState *) sem.semstate)->lex = lex;
117121
((struct DoState *) sem.semstate)->buf = makeStringInfo();
118122
need_strings = true;
119123
break;
@@ -131,8 +135,8 @@ main(int argc, char **argv)
131135
exit(1);
132136
}
133137

134-
makeJsonLexContextIncremental(&lex, PG_UTF8, need_strings);
135-
setJsonLexContextOwnsTokens(&lex, lex_owns_tokens);
138+
makeJsonLexContextIncremental(lex, PG_UTF8, need_strings);
139+
setJsonLexContextOwnsTokens(lex, lex_owns_tokens);
136140
initStringInfo(&json);
137141

138142
if ((json_file = fopen(testfile, PG_BINARY_R)) == NULL)
@@ -165,25 +169,25 @@ main(int argc, char **argv)
165169
bytes_left -= n_read;
166170
if (bytes_left > 0)
167171
{
168-
result = pg_parse_json_incremental(&lex, testsem,
172+
result = pg_parse_json_incremental(lex, testsem,
169173
json.data, n_read,
170174
false);
171175
if (result != JSON_INCOMPLETE)
172176
{
173-
fprintf(stderr, "%s\n", json_errdetail(result, &lex));
177+
fprintf(stderr, "%s\n", json_errdetail(result, lex));
174178
ret = 1;
175179
goto cleanup;
176180
}
177181
resetStringInfo(&json);
178182
}
179183
else
180184
{
181-
result = pg_parse_json_incremental(&lex, testsem,
185+
result = pg_parse_json_incremental(lex, testsem,
182186
json.data, n_read,
183187
true);
184188
if (result != JSON_SUCCESS)
185189
{
186-
fprintf(stderr, "%s\n", json_errdetail(result, &lex));
190+
fprintf(stderr, "%s\n", json_errdetail(result, lex));
187191
ret = 1;
188192
goto cleanup;
189193
}
@@ -195,8 +199,9 @@ main(int argc, char **argv)
195199

196200
cleanup:
197201
fclose(json_file);
198-
freeJsonLexContext(&lex);
202+
freeJsonLexContext(lex);
199203
free(json.data);
204+
free(lex);
200205

201206
return ret;
202207
}

0 commit comments

Comments
 (0)