50
50
#define atoxid (x ) ((TransactionId) strtoul((x), NULL, 10))
51
51
52
52
#define MAX_TOKEN 256
53
+ #define MAX_LINE 8192
53
54
54
55
/* callback data for check_network_callback */
55
56
typedef struct check_network_data
@@ -93,7 +94,7 @@ static MemoryContext parsed_ident_context = NULL;
93
94
94
95
95
96
static MemoryContext tokenize_file (const char * filename , FILE * file ,
96
- List * * lines , List * * line_nums );
97
+ List * * lines , List * * line_nums , List * * raw_lines );
97
98
static List * tokenize_inc_file (List * tokens , const char * outer_filename ,
98
99
const char * inc_filename );
99
100
static bool parse_hba_auth_opt (char * name , char * val , HbaLine * hbaline ,
@@ -111,7 +112,8 @@ pg_isblank(const char c)
111
112
112
113
113
114
/*
114
- * Grab one token out of fp. Tokens are strings of non-blank
115
+ * Grab one token out of the string pointed to by lineptr.
116
+ * Tokens are strings of non-blank
115
117
* characters bounded by blank characters, commas, beginning of line, and
116
118
* end of line. Blank means space or tab. Tokens can be delimited by
117
119
* double quotes (this allows the inclusion of blanks, but not newlines).
@@ -134,7 +136,7 @@ pg_isblank(const char c)
134
136
* Handle comments.
135
137
*/
136
138
static bool
137
- next_token (FILE * fp , char * buf , int bufsz , bool * initial_quote ,
139
+ next_token (char * * lineptr , char * buf , int bufsz , bool * initial_quote ,
138
140
bool * terminating_comma )
139
141
{
140
142
int c ;
@@ -151,10 +153,10 @@ next_token(FILE *fp, char *buf, int bufsz, bool *initial_quote,
151
153
* terminating_comma = false;
152
154
153
155
/* Move over initial whitespace and commas */
154
- while ((c = getc ( fp )) != EOF && (pg_isblank (c ) || c == ',' ))
156
+ while ((c = ( * ( * lineptr ) ++ )) != '\0' && (pg_isblank (c ) || c == ',' ))
155
157
;
156
158
157
- if (c == EOF || c == '\n' )
159
+ if (c == '\0' || c == '\n' )
158
160
{
159
161
* buf = '\0' ;
160
162
return false;
@@ -164,17 +166,17 @@ next_token(FILE *fp, char *buf, int bufsz, bool *initial_quote,
164
166
* Build a token in buf of next characters up to EOF, EOL, unquoted comma,
165
167
* or unquoted whitespace.
166
168
*/
167
- while (c != EOF && c != '\n' &&
169
+ while (c != '\0' && c != '\n' &&
168
170
(!pg_isblank (c ) || in_quote ))
169
171
{
170
172
/* skip comments to EOL */
171
173
if (c == '#' && !in_quote )
172
174
{
173
- while ((c = getc ( fp )) != EOF && c != '\n' )
175
+ while ((c = ( * ( * lineptr ) ++ )) != '\0' && c != '\n' )
174
176
;
175
177
/* If only comment, consume EOL too; return EOL */
176
- if (c != EOF && buf == start_buf )
177
- c = getc ( fp ) ;
178
+ if (c != '\0' && buf == start_buf )
179
+ ( * lineptr ) ++ ;
178
180
break ;
179
181
}
180
182
@@ -186,7 +188,7 @@ next_token(FILE *fp, char *buf, int bufsz, bool *initial_quote,
186
188
errmsg ("authentication file token too long, skipping: \"%s\"" ,
187
189
start_buf )));
188
190
/* Discard remainder of line */
189
- while ((c = getc ( fp )) != EOF && c != '\n' )
191
+ while ((c = ( * ( * lineptr ) ++ )) != '\0' && c != '\n' )
190
192
;
191
193
break ;
192
194
}
@@ -215,15 +217,14 @@ next_token(FILE *fp, char *buf, int bufsz, bool *initial_quote,
215
217
* initial_quote = true;
216
218
}
217
219
218
- c = getc ( fp ) ;
220
+ c = * ( * lineptr ) ++ ;
219
221
}
220
222
221
223
/*
222
224
* Put back the char right after the token (critical in case it is EOL,
223
225
* since we need to detect end-of-line at next call).
224
226
*/
225
- if (c != EOF )
226
- ungetc (c , fp );
227
+ (* lineptr )-- ;
227
228
228
229
* buf = '\0' ;
229
230
@@ -258,13 +259,13 @@ copy_hba_token(HbaToken *in)
258
259
259
260
260
261
/*
261
- * Tokenize one HBA field from a file , handling file inclusion and comma lists.
262
+ * Tokenize one HBA field from a line , handling file inclusion and comma lists.
262
263
*
263
264
* The result is a List of HbaToken structs for each individual token,
264
265
* or NIL if we reached EOL.
265
266
*/
266
267
static List *
267
- next_field_expand (const char * filename , FILE * file )
268
+ next_field_expand (const char * filename , char * * lineptr )
268
269
{
269
270
char buf [MAX_TOKEN ];
270
271
bool trailing_comma ;
@@ -273,7 +274,7 @@ next_field_expand(const char *filename, FILE *file)
273
274
274
275
do
275
276
{
276
- if (!next_token (file , buf , sizeof (buf ), & initial_quote , & trailing_comma ))
277
+ if (!next_token (lineptr , buf , sizeof (buf ), & initial_quote , & trailing_comma ))
277
278
break ;
278
279
279
280
/* Is this referencing a file? */
@@ -335,7 +336,7 @@ tokenize_inc_file(List *tokens,
335
336
}
336
337
337
338
/* There is possible recursion here if the file contains @ */
338
- linecxt = tokenize_file (inc_fullname , inc_file , & inc_lines , & inc_line_nums );
339
+ linecxt = tokenize_file (inc_fullname , inc_file , & inc_lines , & inc_line_nums , NULL );
339
340
340
341
FreeFile (inc_file );
341
342
pfree (inc_fullname );
@@ -364,8 +365,8 @@ tokenize_inc_file(List *tokens,
364
365
}
365
366
366
367
/*
367
- * Tokenize the given file, storing the resulting data into two Lists: a
368
- * List of lines, and a List of line numbers .
368
+ * Tokenize the given file, storing the resulting data into three Lists: a
369
+ * List of lines, a List of line numbers, and a List of raw line contents .
369
370
*
370
371
* The list of lines is a triple-nested List structure. Each line is a List of
371
372
* fields, and each field is a List of HbaTokens.
@@ -377,7 +378,7 @@ tokenize_inc_file(List *tokens,
377
378
*/
378
379
static MemoryContext
379
380
tokenize_file (const char * filename , FILE * file ,
380
- List * * lines , List * * line_nums )
381
+ List * * lines , List * * line_nums , List * * raw_lines )
381
382
{
382
383
List * current_line = NIL ;
383
384
List * current_field = NIL ;
@@ -396,30 +397,51 @@ tokenize_file(const char *filename, FILE *file,
396
397
397
398
while (!feof (file ) && !ferror (file ))
398
399
{
399
- current_field = next_field_expand (filename , file );
400
+ char rawline [MAX_LINE ];
401
+ char * lineptr ;
400
402
401
- /* add tokens to list, unless we are at EOL or comment start */
402
- if (list_length (current_field ) > 0 )
403
+ if (!fgets (rawline , sizeof (rawline ), file ))
404
+ break ;
405
+ if (strlen (rawline ) == MAX_LINE - 1 )
406
+ /* Line too long! */
407
+ ereport (ERROR ,
408
+ (errcode (ERRCODE_CONFIG_FILE_ERROR ),
409
+ errmsg ("authentication file line too long" ),
410
+ errcontext ("line %d of configuration file \"%s\"" ,
411
+ line_number , filename )));
412
+
413
+ /* Strip trailing linebreak from rawline */
414
+ while (rawline [strlen (rawline )- 1 ] == '\n' ||
415
+ rawline [strlen (rawline )- 1 ] == '\r' )
416
+ rawline [strlen (rawline )- 1 ] = '\0' ;
417
+
418
+ lineptr = rawline ;
419
+ while (strlen (lineptr ) > 0 )
403
420
{
404
- if (current_line == NIL )
405
- {
406
- /* make a new line List, record its line number */
407
- current_line = lappend (current_line , current_field );
408
- * lines = lappend (* lines , current_line );
409
- * line_nums = lappend_int (* line_nums , line_number );
410
- }
411
- else
421
+ current_field = next_field_expand (filename , & lineptr );
422
+
423
+ /* add tokens to list, unless we are at EOL or comment start */
424
+ if (list_length (current_field ) > 0 )
412
425
{
413
- /* append tokens to current line's list */
414
- current_line = lappend (current_line , current_field );
426
+ if (current_line == NIL )
427
+ {
428
+ /* make a new line List, record its line number */
429
+ current_line = lappend (current_line , current_field );
430
+ * lines = lappend (* lines , current_line );
431
+ * line_nums = lappend_int (* line_nums , line_number );
432
+ if (raw_lines )
433
+ * raw_lines = lappend (* raw_lines , pstrdup (rawline ));
434
+ }
435
+ else
436
+ {
437
+ /* append tokens to current line's list */
438
+ current_line = lappend (current_line , current_field );
439
+ }
415
440
}
416
441
}
417
- else
418
- {
419
- /* we are at real or logical EOL, so force a new line List */
420
- current_line = NIL ;
421
- line_number ++ ;
422
- }
442
+ /* we are at real or logical EOL, so force a new line List */
443
+ current_line = NIL ;
444
+ line_number ++ ;
423
445
}
424
446
425
447
MemoryContextSwitchTo (oldcxt );
@@ -815,7 +837,7 @@ check_same_host_or_net(SockAddr *raddr, IPCompareMethod method)
815
837
* NULL.
816
838
*/
817
839
static HbaLine *
818
- parse_hba_line (List * line , int line_num )
840
+ parse_hba_line (List * line , int line_num , char * raw_line )
819
841
{
820
842
char * str ;
821
843
struct addrinfo * gai_result ;
@@ -831,6 +853,7 @@ parse_hba_line(List *line, int line_num)
831
853
832
854
parsedline = palloc0 (sizeof (HbaLine ));
833
855
parsedline -> linenumber = line_num ;
856
+ parsedline -> rawline = pstrdup (raw_line );
834
857
835
858
/* Check the record type. */
836
859
field = list_head (line );
@@ -1761,8 +1784,10 @@ load_hba(void)
1761
1784
FILE * file ;
1762
1785
List * hba_lines = NIL ;
1763
1786
List * hba_line_nums = NIL ;
1787
+ List * hba_raw_lines = NIL ;
1764
1788
ListCell * line ,
1765
- * line_num ;
1789
+ * line_num ,
1790
+ * raw_line ;
1766
1791
List * new_parsed_lines = NIL ;
1767
1792
bool ok = true;
1768
1793
MemoryContext linecxt ;
@@ -1779,7 +1804,7 @@ load_hba(void)
1779
1804
return false;
1780
1805
}
1781
1806
1782
- linecxt = tokenize_file (HbaFileName , file , & hba_lines , & hba_line_nums );
1807
+ linecxt = tokenize_file (HbaFileName , file , & hba_lines , & hba_line_nums , & hba_raw_lines );
1783
1808
FreeFile (file );
1784
1809
1785
1810
/* Now parse all the lines */
@@ -1789,11 +1814,11 @@ load_hba(void)
1789
1814
ALLOCSET_DEFAULT_MINSIZE ,
1790
1815
ALLOCSET_DEFAULT_MAXSIZE );
1791
1816
oldcxt = MemoryContextSwitchTo (hbacxt );
1792
- forboth (line , hba_lines , line_num , hba_line_nums )
1817
+ forthree (line , hba_lines , line_num , hba_line_nums , raw_line , hba_raw_lines )
1793
1818
{
1794
1819
HbaLine * newline ;
1795
1820
1796
- if ((newline = parse_hba_line (lfirst (line ), lfirst_int (line_num ))) == NULL )
1821
+ if ((newline = parse_hba_line (lfirst (line ), lfirst_int (line_num ), lfirst ( raw_line ) )) == NULL )
1797
1822
{
1798
1823
/*
1799
1824
* Parse error in the file, so indicate there's a problem. NB: a
@@ -2153,7 +2178,7 @@ load_ident(void)
2153
2178
return false;
2154
2179
}
2155
2180
2156
- linecxt = tokenize_file (IdentFileName , file , & ident_lines , & ident_line_nums );
2181
+ linecxt = tokenize_file (IdentFileName , file , & ident_lines , & ident_line_nums , NULL );
2157
2182
FreeFile (file );
2158
2183
2159
2184
/* Now parse all the lines */
0 commit comments