@ -21,10 +21,70 @@
# include "mb/pg_wchar.h"
# include "port/pg_lfind.h"
# ifndef FRONTEND
# ifdef JSONAPI_USE_PQEXPBUFFER
# include "pqexpbuffer.h"
# else
# include "lib/stringinfo.h"
# include "miscadmin.h"
# endif
/*
* By default , we will use palloc / pfree along with StringInfo . In libpq ,
* use malloc and PQExpBuffer , and return JSON_OUT_OF_MEMORY on out - of - memory .
*/
# ifdef JSONAPI_USE_PQEXPBUFFER
# define STRDUP(s) strdup(s)
# define ALLOC(size) malloc(size)
# define ALLOC0(size) calloc(1, size)
# define REALLOC realloc
# define FREE(s) free(s)
# define jsonapi_appendStringInfo appendPQExpBuffer
# define jsonapi_appendBinaryStringInfo appendBinaryPQExpBuffer
# define jsonapi_appendStringInfoChar appendPQExpBufferChar
/* XXX should we add a macro version to PQExpBuffer? */
# define jsonapi_appendStringInfoCharMacro appendPQExpBufferChar
# define jsonapi_makeStringInfo createPQExpBuffer
# define jsonapi_initStringInfo initPQExpBuffer
# define jsonapi_resetStringInfo resetPQExpBuffer
# define jsonapi_termStringInfo termPQExpBuffer
# define jsonapi_destroyStringInfo destroyPQExpBuffer
# else /* !JSONAPI_USE_PQEXPBUFFER */
# define STRDUP(s) pstrdup(s)
# define ALLOC(size) palloc(size)
# define ALLOC0(size) palloc0(size)
# define REALLOC repalloc
# ifdef FRONTEND
# define FREE pfree
# else
/*
* Backend pfree ( ) doesn ' t handle NULL pointers like the frontend ' s does ; smooth
* that over to reduce mental gymnastics . Avoid multiple evaluation of the macro
* argument to avoid future hair - pulling .
*/
# define FREE(s) do { \
void * __v = ( s ) ; \
if ( __v ) \
pfree ( __v ) ; \
} while ( 0 )
# endif
# define jsonapi_appendStringInfo appendStringInfo
# define jsonapi_appendBinaryStringInfo appendBinaryStringInfo
# define jsonapi_appendStringInfoChar appendStringInfoChar
# define jsonapi_appendStringInfoCharMacro appendStringInfoCharMacro
# define jsonapi_makeStringInfo makeStringInfo
# define jsonapi_initStringInfo initStringInfo
# define jsonapi_resetStringInfo resetStringInfo
# define jsonapi_termStringInfo(s) pfree((s)->data)
# define jsonapi_destroyStringInfo destroyStringInfo
# endif /* JSONAPI_USE_PQEXPBUFFER */
/*
* The context of the parser is maintained by the recursive descent
* mechanism , but is passed explicitly to the error reporting routine
@ -103,7 +163,7 @@ struct JsonIncrementalState
{
bool is_last_chunk ;
bool partial_completed ;
StringInfoData partial_token ;
jsonapi_StrValType partial_token ;
} ;
/*
@ -219,6 +279,7 @@ static JsonParseErrorType parse_object(JsonLexContext *lex, const JsonSemAction
static JsonParseErrorType parse_array_element ( JsonLexContext * lex , const JsonSemAction * sem ) ;
static JsonParseErrorType parse_array ( JsonLexContext * lex , const JsonSemAction * sem ) ;
static JsonParseErrorType report_parse_error ( JsonParseContext ctx , JsonLexContext * lex ) ;
static bool allocate_incremental_state ( JsonLexContext * lex ) ;
/* the null action object used for pure validation */
const JsonSemAction nullSemAction =
@ -227,6 +288,10 @@ const JsonSemAction nullSemAction =
NULL , NULL , NULL , NULL , NULL
} ;
/* sentinels used for out-of-memory conditions */
static JsonLexContext failed_oom ;
static JsonIncrementalState failed_inc_oom ;
/* Parser support routines */
/*
@ -273,15 +338,11 @@ IsValidJsonNumber(const char *str, size_t len)
{
bool numeric_error ;
size_t total_len ;
JsonLexContext dummy_lex ;
JsonLexContext dummy_lex = { 0 } ;
if ( len < = 0 )
return false ;
dummy_lex . incremental = false ;
dummy_lex . inc_state = NULL ;
dummy_lex . pstack = NULL ;
/*
* json_lex_number expects a leading ' - ' to have been eaten already .
*
@ -321,6 +382,9 @@ IsValidJsonNumber(const char *str, size_t len)
* responsible for freeing the returned struct , either by calling
* freeJsonLexContext ( ) or ( in backend environment ) via memory context
* cleanup .
*
* In shlib code , any out - of - memory failures will be deferred to time
* of use ; this function is guaranteed to return a valid JsonLexContext .
*/
JsonLexContext *
makeJsonLexContextCstringLen ( JsonLexContext * lex , const char * json ,
@ -328,7 +392,9 @@ makeJsonLexContextCstringLen(JsonLexContext *lex, const char *json,
{
if ( lex = = NULL )
{
lex = palloc0 ( sizeof ( JsonLexContext ) ) ;
lex = ALLOC0 ( sizeof ( JsonLexContext ) ) ;
if ( ! lex )
return & failed_oom ;
lex - > flags | = JSONLEX_FREE_STRUCT ;
}
else
@ -339,15 +405,73 @@ makeJsonLexContextCstringLen(JsonLexContext *lex, const char *json,
lex - > line_number = 1 ;
lex - > input_length = len ;
lex - > input_encoding = encoding ;
lex - > need_escapes = need_escapes ;
if ( need_escapes )
{
lex - > strval = makeStringInfo ( ) ;
/*
* This call can fail in shlib code . We defer error handling to time
* of use ( json_lex_string ( ) ) since we might not need to parse any
* strings anyway .
*/
lex - > strval = jsonapi_makeStringInfo ( ) ;
lex - > flags | = JSONLEX_FREE_STRVAL ;
}
return lex ;
}
/*
* Allocates the internal bookkeeping structures for incremental parsing . This
* can only fail in - band with shlib code .
*/
# define JS_STACK_CHUNK_SIZE 64
# define JS_MAX_PROD_LEN 10 /* more than we need */
# define JSON_TD_MAX_STACK 6400 / * hard coded for now - this is a REALLY high
* number */
static bool
allocate_incremental_state ( JsonLexContext * lex )
{
void * pstack ,
* prediction ,
* fnames ,
* fnull ;
lex - > inc_state = ALLOC0 ( sizeof ( JsonIncrementalState ) ) ;
pstack = ALLOC ( sizeof ( JsonParserStack ) ) ;
prediction = ALLOC ( JS_STACK_CHUNK_SIZE * JS_MAX_PROD_LEN ) ;
fnames = ALLOC ( JS_STACK_CHUNK_SIZE * sizeof ( char * ) ) ;
fnull = ALLOC ( JS_STACK_CHUNK_SIZE * sizeof ( bool ) ) ;
# ifdef JSONAPI_USE_PQEXPBUFFER
if ( ! lex - > inc_state
| | ! pstack
| | ! prediction
| | ! fnames
| | ! fnull )
{
FREE ( lex - > inc_state ) ;
FREE ( pstack ) ;
FREE ( prediction ) ;
FREE ( fnames ) ;
FREE ( fnull ) ;
lex - > inc_state = & failed_inc_oom ;
return false ;
}
# endif
jsonapi_initStringInfo ( & ( lex - > inc_state - > partial_token ) ) ;
lex - > pstack = pstack ;
lex - > pstack - > stack_size = JS_STACK_CHUNK_SIZE ;
lex - > pstack - > prediction = prediction ;
lex - > pstack - > pred_index = 0 ;
lex - > pstack - > fnames = fnames ;
lex - > pstack - > fnull = fnull ;
lex - > incremental = true ;
return true ;
}
/*
* makeJsonLexContextIncremental
@ -357,19 +481,20 @@ makeJsonLexContextCstringLen(JsonLexContext *lex, const char *json,
* we don ' t need the input , that will be handed in bit by bit to the
* parse routine . We also need an accumulator for partial tokens in case
* the boundary between chunks happens to fall in the middle of a token .
*
* In shlib code , any out - of - memory failures will be deferred to time of use ;
* this function is guaranteed to return a valid JsonLexContext .
*/
# define JS_STACK_CHUNK_SIZE 64
# define JS_MAX_PROD_LEN 10 /* more than we need */
# define JSON_TD_MAX_STACK 6400 / * hard coded for now - this is a REALLY high
* number */
JsonLexContext *
makeJsonLexContextIncremental ( JsonLexContext * lex , int encoding ,
bool need_escapes )
{
if ( lex = = NULL )
{
lex = palloc0 ( sizeof ( JsonLexContext ) ) ;
lex = ALLOC0 ( sizeof ( JsonLexContext ) ) ;
if ( ! lex )
return & failed_oom ;
lex - > flags | = JSONLEX_FREE_STRUCT ;
}
else
@ -377,42 +502,65 @@ makeJsonLexContextIncremental(JsonLexContext *lex, int encoding,
lex - > line_number = 1 ;
lex - > input_encoding = encoding ;
lex - > incremental = true ;
lex - > inc_state = palloc0 ( sizeof ( JsonIncrementalState ) ) ;
initStringInfo ( & ( lex - > inc_state - > partial_token ) ) ;
lex - > pstack = palloc ( sizeof ( JsonParserStack ) ) ;
lex - > pstack - > stack_size = JS_STACK_CHUNK_SIZE ;
lex - > pstack - > prediction = palloc ( JS_STACK_CHUNK_SIZE * JS_MAX_PROD_LEN ) ;
lex - > pstack - > pred_index = 0 ;
lex - > pstack - > fnames = palloc ( JS_STACK_CHUNK_SIZE * sizeof ( char * ) ) ;
lex - > pstack - > fnull = palloc ( JS_STACK_CHUNK_SIZE * sizeof ( bool ) ) ;
if ( ! allocate_incremental_state ( lex ) )
{
if ( lex - > flags & JSONLEX_FREE_STRUCT )
{
FREE ( lex ) ;
return & failed_oom ;
}
/* lex->inc_state tracks the OOM failure; we can return here. */
return lex ;
}
lex - > need_escapes = need_escapes ;
if ( need_escapes )
{
lex - > strval = makeStringInfo ( ) ;
/*
* This call can fail in shlib code . We defer error handling to time
* of use ( json_lex_string ( ) ) since we might not need to parse any
* strings anyway .
*/
lex - > strval = jsonapi_makeStringInfo ( ) ;
lex - > flags | = JSONLEX_FREE_STRVAL ;
}
return lex ;
}
static inline void
static inline bool
inc_lex_level ( JsonLexContext * lex )
{
lex - > lex_level + = 1 ;
if ( lex - > incremental & & lex - > lex_level > = lex - > pstack - > stack_size )
if ( lex - > incremental & & ( lex - > lex_level + 1 ) > = lex - > pstack - > stack_size )
{
lex - > pstack - > stack_size + = JS_STACK_CHUNK_SIZE ;
lex - > pstack - > prediction =
repalloc ( lex - > pstack - > prediction ,
lex - > pstack - > stack_size * JS_MAX_PROD_LEN ) ;
if ( lex - > pstack - > fnames )
lex - > pstack - > fnames =
repalloc ( lex - > pstack - > fnames ,
lex - > pstack - > stack_size * sizeof ( char * ) ) ;
if ( lex - > pstack - > fnull )
lex - > pstack - > fnull =
repalloc ( lex - > pstack - > fnull , lex - > pstack - > stack_size * sizeof ( bool ) ) ;
size_t new_stack_size ;
char * new_prediction ;
char * * new_fnames ;
bool * new_fnull ;
new_stack_size = lex - > pstack - > stack_size + JS_STACK_CHUNK_SIZE ;
new_prediction = REALLOC ( lex - > pstack - > prediction ,
new_stack_size * JS_MAX_PROD_LEN ) ;
new_fnames = REALLOC ( lex - > pstack - > fnames ,
new_stack_size * sizeof ( char * ) ) ;
new_fnull = REALLOC ( lex - > pstack - > fnull , new_stack_size * sizeof ( bool ) ) ;
# ifdef JSONAPI_USE_PQEXPBUFFER
if ( ! new_prediction | | ! new_fnames | | ! new_fnull )
return false ;
# endif
lex - > pstack - > stack_size = new_stack_size ;
lex - > pstack - > prediction = new_prediction ;
lex - > pstack - > fnames = new_fnames ;
lex - > pstack - > fnull = new_fnull ;
}
lex - > lex_level + = 1 ;
return true ;
}
static inline void
@ -482,24 +630,31 @@ get_fnull(JsonLexContext *lex)
void
freeJsonLexContext ( JsonLexContext * lex )
{
static const JsonLexContext empty = { 0 } ;
if ( ! lex | | lex = = & failed_oom )
return ;
if ( lex - > flags & JSONLEX_FREE_STRVAL )
destroyStringInfo ( lex - > strval ) ;
jsonapi_ destroyStringInfo( lex - > strval ) ;
if ( lex - > errormsg )
destroyStringInfo ( lex - > errormsg ) ;
jsonapi_ destroyStringInfo( lex - > errormsg ) ;
if ( lex - > incremental )
{
pfree ( lex - > inc_state - > partial_token . data ) ;
pfree ( lex - > inc_state ) ;
pfree ( lex - > pstack - > prediction ) ;
pfree ( lex - > pstack - > fnames ) ;
pfree ( lex - > pstack - > fnull ) ;
pfree ( lex - > pstack ) ;
jsonapi_termStringInfo ( & lex - > inc_state - > partial_token ) ;
FREE ( lex - > inc_state ) ;
FREE ( lex - > pstack - > prediction ) ;
FREE ( lex - > pstack - > fnames ) ;
FREE ( lex - > pstack - > fnull ) ;
FREE ( lex - > pstack ) ;
}
if ( lex - > flags & JSONLEX_FREE_STRUCT )
pfree ( lex ) ;
FREE ( lex ) ;
else
* lex = empty ;
}
/*
@ -522,22 +677,13 @@ JsonParseErrorType
pg_parse_json ( JsonLexContext * lex , const JsonSemAction * sem )
{
# ifdef FORCE_JSON_PSTACK
lex - > incremental = true ;
lex - > inc_state = palloc0 ( sizeof ( JsonIncrementalState ) ) ;
/*
* We don ' t need partial token processing , there is only one chunk . But we
* still need to init the partial token string so that freeJsonLexContext
* works .
* works , so perform the full incremental initialization .
*/
initStringInfo ( & ( lex - > inc_state - > partial_token ) ) ;
lex - > pstack = palloc ( sizeof ( JsonParserStack ) ) ;
lex - > pstack - > stack_size = JS_STACK_CHUNK_SIZE ;
lex - > pstack - > prediction = palloc ( JS_STACK_CHUNK_SIZE * JS_MAX_PROD_LEN ) ;
lex - > pstack - > pred_index = 0 ;
lex - > pstack - > fnames = palloc ( JS_STACK_CHUNK_SIZE * sizeof ( char * ) ) ;
lex - > pstack - > fnull = palloc ( JS_STACK_CHUNK_SIZE * sizeof ( bool ) ) ;
if ( ! allocate_incremental_state ( lex ) )
return JSON_OUT_OF_MEMORY ;
return pg_parse_json_incremental ( lex , sem , lex - > input , lex - > input_length , true ) ;
@ -546,6 +692,8 @@ pg_parse_json(JsonLexContext *lex, const JsonSemAction *sem)
JsonTokenType tok ;
JsonParseErrorType result ;
if ( lex = = & failed_oom )
return JSON_OUT_OF_MEMORY ;
if ( lex - > incremental )
return JSON_INVALID_LEXER_TYPE ;
@ -591,13 +739,16 @@ json_count_array_elements(JsonLexContext *lex, int *elements)
int count ;
JsonParseErrorType result ;
if ( lex = = & failed_oom )
return JSON_OUT_OF_MEMORY ;
/*
* It ' s safe to do this with a shallow copy because the lexical routines
* don ' t scribble on the input . They do scribble on the other pointers
* etc , so doing this with a copy makes that safe .
*/
memcpy ( & copylex , lex , sizeof ( JsonLexContext ) ) ;
copylex . strval = NULL ; /* not interested in values here */
copylex . need_escapes = false ; /* not interested in values here */
copylex . lex_level + + ;
count = 0 ;
@ -658,7 +809,8 @@ pg_parse_json_incremental(JsonLexContext *lex,
JsonParseContext ctx = JSON_PARSE_VALUE ;
JsonParserStack * pstack = lex - > pstack ;
if ( lex = = & failed_oom | | lex - > inc_state = = & failed_inc_oom )
return JSON_OUT_OF_MEMORY ;
if ( ! lex - > incremental )
return JSON_INVALID_LEXER_TYPE ;
@ -737,7 +889,9 @@ pg_parse_json_incremental(JsonLexContext *lex,
if ( result ! = JSON_SUCCESS )
return result ;
}
inc_lex_level ( lex ) ;
if ( ! inc_lex_level ( lex ) )
return JSON_OUT_OF_MEMORY ;
}
break ;
case JSON_SEM_OEND :
@ -766,7 +920,9 @@ pg_parse_json_incremental(JsonLexContext *lex,
if ( result ! = JSON_SUCCESS )
return result ;
}
inc_lex_level ( lex ) ;
if ( ! inc_lex_level ( lex ) )
return JSON_OUT_OF_MEMORY ;
}
break ;
case JSON_SEM_AEND :
@ -793,9 +949,11 @@ pg_parse_json_incremental(JsonLexContext *lex,
json_ofield_action ostart = sem - > object_field_start ;
json_ofield_action oend = sem - > object_field_end ;
if ( ( ostart ! = NULL | | oend ! = NULL ) & & lex - > strval ! = NULL )
if ( ( ostart ! = NULL | | oend ! = NULL ) & & lex - > need_escapes )
{
fname = pstrdup ( lex - > strval - > data ) ;
fname = STRDUP ( lex - > strval - > data ) ;
if ( fname = = NULL )
return JSON_OUT_OF_MEMORY ;
}
set_fname ( lex , fname ) ;
}
@ -883,14 +1041,21 @@ pg_parse_json_incremental(JsonLexContext *lex,
*/
if ( tok = = JSON_TOKEN_STRING )
{
if ( lex - > strval ! = NULL )
pstack - > scalar_val = pstrdup ( lex - > strval - > data ) ;
if ( lex - > need_escapes )
{
pstack - > scalar_val = STRDUP ( lex - > strval - > data ) ;
if ( pstack - > scalar_val = = NULL )
return JSON_OUT_OF_MEMORY ;
}
}
else
{
ptrdiff_t tlen = ( lex - > token_terminator - lex - > token_start ) ;
pstack - > scalar_val = palloc ( tlen + 1 ) ;
pstack - > scalar_val = ALLOC ( tlen + 1 ) ;
if ( pstack - > scalar_val = = NULL )
return JSON_OUT_OF_MEMORY ;
memcpy ( pstack - > scalar_val , lex - > token_start , tlen ) ;
pstack - > scalar_val [ tlen ] = ' \0 ' ;
}
@ -1025,14 +1190,21 @@ parse_scalar(JsonLexContext *lex, const JsonSemAction *sem)
/* extract the de-escaped string value, or the raw lexeme */
if ( lex_peek ( lex ) = = JSON_TOKEN_STRING )
{
if ( lex - > strval ! = NULL )
val = pstrdup ( lex - > strval - > data ) ;
if ( lex - > need_escapes )
{
val = STRDUP ( lex - > strval - > data ) ;
if ( val = = NULL )
return JSON_OUT_OF_MEMORY ;
}
}
else
{
int len = ( lex - > token_terminator - lex - > token_start ) ;
val = palloc ( len + 1 ) ;
val = ALLOC ( len + 1 ) ;
if ( val = = NULL )
return JSON_OUT_OF_MEMORY ;
memcpy ( val , lex - > token_start , len ) ;
val [ len ] = ' \0 ' ;
}
@ -1066,8 +1238,12 @@ parse_object_field(JsonLexContext *lex, const JsonSemAction *sem)
if ( lex_peek ( lex ) ! = JSON_TOKEN_STRING )
return report_parse_error ( JSON_PARSE_STRING , lex ) ;
if ( ( ostart ! = NULL | | oend ! = NULL ) & & lex - > strval ! = NULL )
fname = pstrdup ( lex - > strval - > data ) ;
if ( ( ostart ! = NULL | | oend ! = NULL ) & & lex - > need_escapes )
{
fname = STRDUP ( lex - > strval - > data ) ;
if ( fname = = NULL )
return JSON_OUT_OF_MEMORY ;
}
result = json_lex ( lex ) ;
if ( result ! = JSON_SUCCESS )
return result ;
@ -1123,6 +1299,11 @@ parse_object(JsonLexContext *lex, const JsonSemAction *sem)
JsonParseErrorType result ;
# ifndef FRONTEND
/*
* TODO : clients need some way to put a bound on stack growth . Parse level
* limits maybe ?
*/
check_stack_depth ( ) ;
# endif
@ -1312,15 +1493,27 @@ json_lex(JsonLexContext *lex)
const char * const end = lex - > input + lex - > input_length ;
JsonParseErrorType result ;
if ( lex - > incremental & & lex - > inc_state - > partial_completed )
if ( lex = = & failed_oom | | lex - > inc_state = = & failed_inc_oom )
return JSON_OUT_OF_MEMORY ;
if ( lex - > incremental )
{
/*
* We just lexed a completed partial token on the last call , so reset
* everything
*/
resetStringInfo ( & ( lex - > inc_state - > partial_token ) ) ;
lex - > token_terminator = lex - > input ;
lex - > inc_state - > partial_completed = false ;
if ( lex - > inc_state - > partial_completed )
{
/*
* We just lexed a completed partial token on the last call , so
* reset everything
*/
jsonapi_resetStringInfo ( & ( lex - > inc_state - > partial_token ) ) ;
lex - > token_terminator = lex - > input ;
lex - > inc_state - > partial_completed = false ;
}
# ifdef JSONAPI_USE_PQEXPBUFFER
/* Make sure our partial token buffer is valid before using it below. */
if ( PQExpBufferDataBroken ( lex - > inc_state - > partial_token ) )
return JSON_OUT_OF_MEMORY ;
# endif
}
s = lex - > token_terminator ;
@ -1331,7 +1524,7 @@ json_lex(JsonLexContext *lex)
* We have a partial token . Extend it and if completed lex it by a
* recursive call
*/
StringInfo ptok = & ( lex - > inc_state - > partial_token ) ;
jsonapi_StrValType * ptok = & ( lex - > inc_state - > partial_token ) ;
size_t added = 0 ;
bool tok_done = false ;
JsonLexContext dummy_lex ;
@ -1358,7 +1551,7 @@ json_lex(JsonLexContext *lex)
{
char c = lex - > input [ i ] ;
appendStringInfoCharMacro ( ptok , c ) ;
jsonapi_ appendStringInfoCharMacro( ptok , c ) ;
added + + ;
if ( c = = ' " ' & & escapes % 2 = = 0 )
{
@ -1403,7 +1596,7 @@ json_lex(JsonLexContext *lex)
case ' 8 ' :
case ' 9 ' :
{
appendStringInfoCharMacro ( ptok , cc ) ;
jsonapi_ appendStringInfoCharMacro( ptok , cc ) ;
added + + ;
}
break ;
@ -1424,7 +1617,7 @@ json_lex(JsonLexContext *lex)
if ( JSON_ALPHANUMERIC_CHAR ( cc ) )
{
appendStringInfoCharMacro ( ptok , cc ) ;
jsonapi_ appendStringInfoCharMacro( ptok , cc ) ;
added + + ;
}
else
@ -1467,6 +1660,7 @@ json_lex(JsonLexContext *lex)
dummy_lex . input_length = ptok - > len ;
dummy_lex . input_encoding = lex - > input_encoding ;
dummy_lex . incremental = false ;
dummy_lex . need_escapes = lex - > need_escapes ;
dummy_lex . strval = lex - > strval ;
partial_result = json_lex ( & dummy_lex ) ;
@ -1622,8 +1816,7 @@ json_lex(JsonLexContext *lex)
if ( lex - > incremental & & ! lex - > inc_state - > is_last_chunk & &
p = = lex - > input + lex - > input_length )
{
appendBinaryStringInfo (
& ( lex - > inc_state - > partial_token ) , s , end - s ) ;
jsonapi_appendBinaryStringInfo ( & ( lex - > inc_state - > partial_token ) , s , end - s ) ;
return JSON_INCOMPLETE ;
}
@ -1680,8 +1873,9 @@ json_lex_string(JsonLexContext *lex)
do { \
if ( lex - > incremental & & ! lex - > inc_state - > is_last_chunk ) \
{ \
appendBinaryStringInfo ( & lex - > inc_state - > partial_token , \
lex - > token_start , end - lex - > token_start ) ; \
jsonapi_appendBinaryStringInfo ( & lex - > inc_state - > partial_token , \
lex - > token_start , \
end - lex - > token_start ) ; \
return JSON_INCOMPLETE ; \
} \
lex - > token_terminator = s ; \
@ -1694,8 +1888,15 @@ json_lex_string(JsonLexContext *lex)
return code ; \
} while ( 0 )
if ( lex - > strval ! = NULL )
resetStringInfo ( lex - > strval ) ;
if ( lex - > need_escapes )
{
# ifdef JSONAPI_USE_PQEXPBUFFER
/* make sure initialization succeeded */
if ( lex - > strval = = NULL )
return JSON_OUT_OF_MEMORY ;
# endif
jsonapi_resetStringInfo ( lex - > strval ) ;
}
Assert ( lex - > input_length > 0 ) ;
s = lex - > token_start ;
@ -1732,7 +1933,7 @@ json_lex_string(JsonLexContext *lex)
else
FAIL_AT_CHAR_END ( JSON_UNICODE_ESCAPE_FORMAT ) ;
}
if ( lex - > strval ! = NULL )
if ( lex - > need_escapes )
{
/*
* Combine surrogate pairs .
@ -1789,19 +1990,19 @@ json_lex_string(JsonLexContext *lex)
unicode_to_utf8 ( ch , ( unsigned char * ) utf8str ) ;
utf8len = pg_utf_mblen ( ( unsigned char * ) utf8str ) ;
appendBinaryStringInfo ( lex - > strval , utf8str , utf8len ) ;
jsonapi_ appendBinaryStringInfo( lex - > strval , utf8str , utf8len ) ;
}
else if ( ch < = 0x007f )
{
/* The ASCII range is the same in all encodings */
appendStringInfoChar ( lex - > strval , ( char ) ch ) ;
jsonapi_ appendStringInfoChar( lex - > strval , ( char ) ch ) ;
}
else
FAIL_AT_CHAR_END ( JSON_UNICODE_HIGH_ESCAPE ) ;
# endif /* FRONTEND */
}
}
else if ( lex - > strval ! = NULL )
else if ( lex - > need_escapes )
{
if ( hi_surrogate ! = - 1 )
FAIL_AT_CHAR_END ( JSON_UNICODE_LOW_SURROGATE ) ;
@ -1811,22 +2012,22 @@ json_lex_string(JsonLexContext *lex)
case ' " ' :
case ' \\ ' :
case ' / ' :
appendStringInfoChar ( lex - > strval , * s ) ;
jsonapi_ appendStringInfoChar( lex - > strval , * s ) ;
break ;
case ' b ' :
appendStringInfoChar ( lex - > strval , ' \b ' ) ;
jsonapi_ appendStringInfoChar( lex - > strval , ' \b ' ) ;
break ;
case ' f ' :
appendStringInfoChar ( lex - > strval , ' \f ' ) ;
jsonapi_ appendStringInfoChar( lex - > strval , ' \f ' ) ;
break ;
case ' n ' :
appendStringInfoChar ( lex - > strval , ' \n ' ) ;
jsonapi_ appendStringInfoChar( lex - > strval , ' \n ' ) ;
break ;
case ' r ' :
appendStringInfoChar ( lex - > strval , ' \r ' ) ;
jsonapi_ appendStringInfoChar( lex - > strval , ' \r ' ) ;
break ;
case ' t ' :
appendStringInfoChar ( lex - > strval , ' \t ' ) ;
jsonapi_ appendStringInfoChar( lex - > strval , ' \t ' ) ;
break ;
default :
@ -1861,7 +2062,7 @@ json_lex_string(JsonLexContext *lex)
/*
* Skip to the first byte that requires special handling , so we
* can batch calls to appendBinaryStringInfo .
* can batch calls to jsonapi_ appendBinaryStringInfo.
*/
while ( p < end - sizeof ( Vector8 ) & &
! pg_lfind8 ( ' \\ ' , ( uint8 * ) p , sizeof ( Vector8 ) ) & &
@ -1885,8 +2086,8 @@ json_lex_string(JsonLexContext *lex)
}
}
if ( lex - > strval ! = NULL )
appendBinaryStringInfo ( lex - > strval , s , p - s ) ;
if ( lex - > need_escapes )
jsonapi_ appendBinaryStringInfo( lex - > strval , s , p - s ) ;
/*
* s will be incremented at the top of the loop , so set it to just
@ -1902,6 +2103,11 @@ json_lex_string(JsonLexContext *lex)
return JSON_UNICODE_LOW_SURROGATE ;
}
# ifdef JSONAPI_USE_PQEXPBUFFER
if ( lex - > need_escapes & & PQExpBufferBroken ( lex - > strval ) )
return JSON_OUT_OF_MEMORY ;
# endif
/* Hooray, we found the end of the string! */
lex - > prev_token_terminator = lex - > token_terminator ;
lex - > token_terminator = s + 1 ;
@ -2019,8 +2225,8 @@ json_lex_number(JsonLexContext *lex, const char *s,
if ( lex - > incremental & & ! lex - > inc_state - > is_last_chunk & &
len > = lex - > input_length )
{
appendBinaryStringInfo ( & lex - > inc_state - > partial_token ,
lex - > token_start , s - lex - > token_start ) ;
jsonapi_ appendBinaryStringInfo( & lex - > inc_state - > partial_token ,
lex - > token_start , s - lex - > token_start ) ;
if ( num_err ! = NULL )
* num_err = error ;
@ -2096,19 +2302,25 @@ report_parse_error(JsonParseContext ctx, JsonLexContext *lex)
char *
json_errdetail ( JsonParseErrorType error , JsonLexContext * lex )
{
if ( error = = JSON_OUT_OF_MEMORY | | lex = = & failed_oom )
{
/* Short circuit. Allocating anything for this case is unhelpful. */
return _ ( " out of memory " ) ;
}
if ( lex - > errormsg )
resetStringInfo ( lex - > errormsg ) ;
jsonapi_ resetStringInfo( lex - > errormsg ) ;
else
lex - > errormsg = makeStringInfo ( ) ;
lex - > errormsg = jsonapi_ makeStringInfo( ) ;
/*
* A helper for error messages that should print the current token . The
* format must contain exactly one % . * s specifier .
*/
# define json_token_error(lex, format) \
appendStringInfo ( ( lex ) - > errormsg , _ ( format ) , \
( int ) ( ( lex ) - > token_terminator - ( lex ) - > token_start ) , \
( lex ) - > token_start ) ;
jsonapi_ appendStringInfo( ( lex ) - > errormsg , _ ( format ) , \
( int ) ( ( lex ) - > token_terminator - ( lex ) - > token_start ) , \
( lex ) - > token_start ) ;
switch ( error )
{
@ -2127,9 +2339,9 @@ json_errdetail(JsonParseErrorType error, JsonLexContext *lex)
json_token_error ( lex , " Escape sequence \" \\ %.*s \" is invalid. " ) ;
break ;
case JSON_ESCAPING_REQUIRED :
appendStringInfo ( lex - > errormsg ,
_ ( " Character with value 0x%02x must be escaped. " ) ,
( unsigned char ) * ( lex - > token_terminator ) ) ;
jsonapi_ appendStringInfo( lex - > errormsg ,
_ ( " Character with value 0x%02x must be escaped. " ) ,
( unsigned char ) * ( lex - > token_terminator ) ) ;
break ;
case JSON_EXPECTED_END :
json_token_error ( lex , " Expected end of input, but found \" %.*s \" . " ) ;
@ -2160,6 +2372,9 @@ json_errdetail(JsonParseErrorType error, JsonLexContext *lex)
case JSON_INVALID_TOKEN :
json_token_error ( lex , " Token \" %.*s \" is invalid. " ) ;
break ;
case JSON_OUT_OF_MEMORY :
/* should have been handled above; use the error path */
break ;
case JSON_UNICODE_CODE_POINT_ZERO :
return _ ( " \\ u0000 cannot be converted to text. " ) ;
case JSON_UNICODE_ESCAPE_FORMAT :
@ -2191,15 +2406,23 @@ json_errdetail(JsonParseErrorType error, JsonLexContext *lex)
}
# undef json_token_error
/*
* We don ' t use a default : case , so that the compiler will warn about
* unhandled enum values . But this needs to be here anyway to cover the
* possibility of an incorrect input .
*/
if ( lex - > errormsg - > len = = 0 )
appendStringInfo ( lex - > errormsg ,
" unexpected json parse error type: %d " ,
( int ) error ) ;
/* Note that lex->errormsg can be NULL in shlib code. */
if ( lex - > errormsg & & lex - > errormsg - > len = = 0 )
{
/*
* We don ' t use a default : case , so that the compiler will warn about
* unhandled enum values . But this needs to be here anyway to cover
* the possibility of an incorrect input .
*/
jsonapi_appendStringInfo ( lex - > errormsg ,
" unexpected json parse error type: %d " ,
( int ) error ) ;
}
# ifdef JSONAPI_USE_PQEXPBUFFER
if ( PQExpBufferBroken ( lex - > errormsg ) )
return _ ( " out of memory while constructing error description " ) ;
# endif
return lex - > errormsg - > data ;
}