forgotten attachment
Regards
Pavel
2015-07-10 14:34 GMT+02:00 Pavel Stehule <[email protected]>:
> Hi
>
> I am sending review of this patch:
>
> 1. I reread a previous discussion and almost all are for this patch (me
> too)
>
> 2. I have to fix a typo in hstore_io.c function (update attached), other
> (patching, regress tests) without problems
>
> My objections:
>
> 1. comments - missing comment for some basic API, basic fields like
> "key_scalar" and similar
> 2. why you did indirect call via JsonOutContext?
>
> What is benefit
>
> dst.value(&dst, (Datum) 0, JSONTYPE_NULL, InvalidOid, InvalidOid, false);
>
> instead
>
> json_out_value(&dst, ....)
>
> ? Is it necessary?
>
> 3. if it should be used everywhere, then in EXPLAIN statement too.
>
> Regards
>
> Pavel
>
>
> 2015-07-10 6:31 GMT+02:00 Pavel Stehule <[email protected]>:
>
>>
>>
>> 2015-07-03 12:27 GMT+02:00 Heikki Linnakangas <[email protected]>:
>>
>>> On 05/27/2015 09:51 PM, Andrew Dunstan wrote:
>>>
>>>>
>>>> On 05/27/2015 02:37 PM, Robert Haas wrote:
>>>>
>>>>> On Tue, May 26, 2015 at 2:50 AM, Shulgin, Oleksandr
>>>>> <[email protected]> wrote:
>>>>>
>>>>>> Is it reasonable to add this patch to CommitFest now?
>>>>>>
>>>>> It's always reasonable to add a patch to the CommitFest if you would
>>>>> like for it to be reviewed and avoid having it get forgotten about.
>>>>> There seems to be some disagreement about whether we want this, but
>>>>> don't let that stop you from adding it to the next CommitFest.
>>>>>
>>>>
>>>> I'm not dead set against it either. When I have time I will take a
>>>> closer look.
>>>>
>>>
>>> Andrew, will you have the time to review this? Please add yourself as
>>> reviewer in the commitfest app if you do.
>>>
>>> My 2 cents is that I agree with your initial reaction: This is a lot of
>>> infrastructure and generalizing things, for little benefit. Let's change
>>> the current code where we generate JSON to be consistent with whitespace,
>>> and call it a day.
>>>
>>
>> I am thinking so it is not bad idea. This code can enforce uniform
>> format, and it can check if produced value is correct. It can be used in
>> our code, it can be used by extension's developers.
>>
>> This patch is not small, but really new lines are not too much.
>>
>> I'll do review today.
>>
>> Regards
>>
>> Pavel
>>
>>
>>
>>
>>> - Heikki
>>>
>>>
>>> --
>>> Sent via pgsql-hackers mailing list ([email protected])
>>> To make changes to your subscription:
>>> http://www.postgresql.org/mailpref/pgsql-hackers
>>>
>>
>>
>
diff --git a/contrib/hstore/hstore_io.c b/contrib/hstore/hstore_io.c
new file mode 100644
index 7d89867..0ca223f
*** a/contrib/hstore/hstore_io.c
--- b/contrib/hstore/hstore_io.c
*************** hstore_to_json_loose(PG_FUNCTION_ARGS)
*** 1241,1286 ****
int count = HS_COUNT(in);
char *base = STRPTR(in);
HEntry *entries = ARRPTR(in);
! StringInfoData tmp,
! dst;
if (count == 0)
PG_RETURN_TEXT_P(cstring_to_text_with_len("{}", 2));
initStringInfo(&tmp);
! initStringInfo(&dst);
!
! appendStringInfoChar(&dst, '{');
for (i = 0; i < count; i++)
{
resetStringInfo(&tmp);
appendBinaryStringInfo(&tmp, HS_KEY(entries, base, i), HS_KEYLEN(entries, i));
! escape_json(&dst, tmp.data);
! appendStringInfoString(&dst, ": ");
if (HS_VALISNULL(entries, i))
! appendStringInfoString(&dst, "null");
/* guess that values of 't' or 'f' are booleans */
else if (HS_VALLEN(entries, i) == 1 && *(HS_VAL(entries, base, i)) == 't')
! appendStringInfoString(&dst, "true");
else if (HS_VALLEN(entries, i) == 1 && *(HS_VAL(entries, base, i)) == 'f')
! appendStringInfoString(&dst, "false");
else
{
resetStringInfo(&tmp);
appendBinaryStringInfo(&tmp, HS_VAL(entries, base, i), HS_VALLEN(entries, i));
if (IsValidJsonNumber(tmp.data, tmp.len))
! appendBinaryStringInfo(&dst, tmp.data, tmp.len);
else
! escape_json(&dst, tmp.data);
}
-
- if (i + 1 != count)
- appendStringInfoString(&dst, ", ");
}
- appendStringInfoChar(&dst, '}');
! PG_RETURN_TEXT_P(cstring_to_text(dst.data));
}
PG_FUNCTION_INFO_V1(hstore_to_json);
--- 1241,1289 ----
int count = HS_COUNT(in);
char *base = STRPTR(in);
HEntry *entries = ARRPTR(in);
! StringInfoData tmp;
! JsonOutContext dst;
if (count == 0)
PG_RETURN_TEXT_P(cstring_to_text_with_len("{}", 2));
initStringInfo(&tmp);
! json_out_init_context(&dst, JSON_OUT_USE_SPACES);
! dst.object_start(&dst);
for (i = 0; i < count; i++)
{
resetStringInfo(&tmp);
appendBinaryStringInfo(&tmp, HS_KEY(entries, base, i), HS_KEYLEN(entries, i));
! json_out_cstring(&dst, tmp.data, true);
!
if (HS_VALISNULL(entries, i))
! dst.value(&dst, (Datum) 0, JSONTYPE_NULL, InvalidOid, InvalidOid, false);
!
/* guess that values of 't' or 'f' are booleans */
else if (HS_VALLEN(entries, i) == 1 && *(HS_VAL(entries, base, i)) == 't')
! dst.value(&dst, BoolGetDatum(true), JSONTYPE_BOOL,
! InvalidOid, InvalidOid, false);
!
else if (HS_VALLEN(entries, i) == 1 && *(HS_VAL(entries, base, i)) == 'f')
! dst.value(&dst, BoolGetDatum(false), JSONTYPE_BOOL,
! InvalidOid, InvalidOid, false);
else
{
resetStringInfo(&tmp);
appendBinaryStringInfo(&tmp, HS_VAL(entries, base, i), HS_VALLEN(entries, i));
+
+ /* this is a bit of a hack, but strictly it is not incorrect */
if (IsValidJsonNumber(tmp.data, tmp.len))
! dst.value(&dst, CStringGetDatum(tmp.data), JSONTYPE_JSON,
! CSTRINGOID, 2293 /* cstring_out */, false);
else
! json_out_cstring(&dst, tmp.data, false);
}
}
! dst.object_end(&dst);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(dst.result.data, dst.result.len));
}
PG_FUNCTION_INFO_V1(hstore_to_json);
*************** hstore_to_json(PG_FUNCTION_ARGS)
*** 1292,1329 ****
int count = HS_COUNT(in);
char *base = STRPTR(in);
HEntry *entries = ARRPTR(in);
! StringInfoData tmp,
! dst;
if (count == 0)
PG_RETURN_TEXT_P(cstring_to_text_with_len("{}", 2));
initStringInfo(&tmp);
! initStringInfo(&dst);
!
! appendStringInfoChar(&dst, '{');
for (i = 0; i < count; i++)
{
resetStringInfo(&tmp);
appendBinaryStringInfo(&tmp, HS_KEY(entries, base, i), HS_KEYLEN(entries, i));
! escape_json(&dst, tmp.data);
! appendStringInfoString(&dst, ": ");
if (HS_VALISNULL(entries, i))
! appendStringInfoString(&dst, "null");
else
{
resetStringInfo(&tmp);
appendBinaryStringInfo(&tmp, HS_VAL(entries, base, i), HS_VALLEN(entries, i));
! escape_json(&dst, tmp.data);
}
-
- if (i + 1 != count)
- appendStringInfoString(&dst, ", ");
}
- appendStringInfoChar(&dst, '}');
! PG_RETURN_TEXT_P(cstring_to_text(dst.data));
}
PG_FUNCTION_INFO_V1(hstore_to_jsonb);
--- 1295,1328 ----
int count = HS_COUNT(in);
char *base = STRPTR(in);
HEntry *entries = ARRPTR(in);
! StringInfoData tmp;
! JsonOutContext dst;
if (count == 0)
PG_RETURN_TEXT_P(cstring_to_text_with_len("{}", 2));
initStringInfo(&tmp);
! json_out_init_context(&dst, JSON_OUT_USE_SPACES);
! dst.object_start(&dst);
for (i = 0; i < count; i++)
{
resetStringInfo(&tmp);
appendBinaryStringInfo(&tmp, HS_KEY(entries, base, i), HS_KEYLEN(entries, i));
! json_out_cstring(&dst, tmp.data, true);
!
if (HS_VALISNULL(entries, i))
! dst.value(&dst, (Datum) 0, JSONTYPE_NULL, InvalidOid, InvalidOid, false);
else
{
resetStringInfo(&tmp);
appendBinaryStringInfo(&tmp, HS_VAL(entries, base, i), HS_VALLEN(entries, i));
! json_out_cstring(&dst, tmp.data, false);
}
}
! dst.object_end(&dst);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(dst.result.data, dst.result.len));
}
PG_FUNCTION_INFO_V1(hstore_to_jsonb);
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
new file mode 100644
index 26d3843..1ed9deb
*** a/src/backend/utils/adt/json.c
--- b/src/backend/utils/adt/json.c
*************** typedef enum /* contexts of JSON par
*** 53,73 ****
JSON_PARSE_END /* saw the end of a document, expect nothing */
} JsonParseContext;
- typedef enum /* type categories for datum_to_json */
- {
- JSONTYPE_NULL, /* null, so we didn't bother to identify */
- JSONTYPE_BOOL, /* boolean (built-in types only) */
- JSONTYPE_NUMERIC, /* numeric (ditto) */
- JSONTYPE_DATE, /* we use special formatting for datetimes */
- JSONTYPE_TIMESTAMP,
- JSONTYPE_TIMESTAMPTZ,
- JSONTYPE_JSON, /* JSON itself (and JSONB) */
- JSONTYPE_ARRAY, /* array */
- JSONTYPE_COMPOSITE, /* composite */
- JSONTYPE_CAST, /* something with an explicit cast to JSON */
- JSONTYPE_OTHER /* all else */
- } JsonTypeCategory;
-
static inline void json_lex(JsonLexContext *lex);
static inline void json_lex_string(JsonLexContext *lex);
static inline void json_lex_number(JsonLexContext *lex, char *s, bool *num_err);
--- 53,58 ----
*************** static void report_parse_error(JsonParse
*** 80,102 ****
static void report_invalid_token(JsonLexContext *lex);
static int report_json_context(JsonLexContext *lex);
static char *extract_mb_char(char *s);
! static void composite_to_json(Datum composite, StringInfo result,
! bool use_line_feeds);
! static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
Datum *vals, bool *nulls, int *valcount,
! JsonTypeCategory tcategory, Oid outfuncoid,
! bool use_line_feeds);
! static void array_to_json_internal(Datum array, StringInfo result,
! bool use_line_feeds);
static void json_categorize_type(Oid typoid,
JsonTypeCategory *tcategory,
Oid *outfuncoid);
! static void datum_to_json(Datum val, bool is_null, StringInfo result,
! JsonTypeCategory tcategory, Oid outfuncoid,
! bool key_scalar);
! static void add_json(Datum val, bool is_null, StringInfo result,
Oid val_type, bool key_scalar);
- static text *catenate_stringinfo_string(StringInfo buffer, const char *addon);
/* the null action object used for pure validation */
static JsonSemAction nullSemAction =
--- 65,80 ----
static void report_invalid_token(JsonLexContext *lex);
static int report_json_context(JsonLexContext *lex);
static char *extract_mb_char(char *s);
!
! static void array_dim_to_json(JsonOutContext *out, int dim, int ndims, int *dims,
Datum *vals, bool *nulls, int *valcount,
! JsonTypeCategory tcategory, Oid elemtypoid, Oid outfuncoid);
! static void array_to_json_internal(Datum array, JsonOutContext *out);
static void json_categorize_type(Oid typoid,
JsonTypeCategory *tcategory,
Oid *outfuncoid);
! static void add_json(Datum val, bool is_null, JsonOutContext *out,
Oid val_type, bool key_scalar);
/* the null action object used for pure validation */
static JsonSemAction nullSemAction =
*************** json_categorize_type(Oid typoid,
*** 1377,1399 ****
* If key_scalar is true, the value is being printed as a key, so insist
* it's of an acceptable type, and force it to be quoted.
*/
! static void
! datum_to_json(Datum val, bool is_null, StringInfo result,
! JsonTypeCategory tcategory, Oid outfuncoid,
bool key_scalar)
{
- char *outputstr;
- text *jsontext;
-
/* callers are expected to ensure that null keys are not passed in */
Assert(!(key_scalar && is_null));
- if (is_null)
- {
- appendStringInfoString(result, "null");
- return;
- }
-
if (key_scalar &&
(tcategory == JSONTYPE_ARRAY ||
tcategory == JSONTYPE_COMPOSITE ||
--- 1355,1368 ----
* If key_scalar is true, the value is being printed as a key, so insist
* it's of an acceptable type, and force it to be quoted.
*/
! void
! datum_to_json(Datum val, bool is_null, JsonOutContext *out,
! JsonTypeCategory tcategory, Oid typoid, Oid outfuncoid,
bool key_scalar)
{
/* callers are expected to ensure that null keys are not passed in */
Assert(!(key_scalar && is_null));
if (key_scalar &&
(tcategory == JSONTYPE_ARRAY ||
tcategory == JSONTYPE_COMPOSITE ||
*************** datum_to_json(Datum val, bool is_null, S
*** 1406,1415 ****
switch (tcategory)
{
case JSONTYPE_ARRAY:
! array_to_json_internal(val, result, false);
break;
case JSONTYPE_COMPOSITE:
! composite_to_json(val, result, false);
break;
case JSONTYPE_BOOL:
outputstr = DatumGetBool(val) ? "true" : "false";
--- 1375,1500 ----
switch (tcategory)
{
case JSONTYPE_ARRAY:
! array_to_json_internal(val, out);
break;
case JSONTYPE_COMPOSITE:
! composite_to_json(val, out);
! break;
! default:
! out->value(out, val, tcategory, typoid, outfuncoid, key_scalar);
! break;
! }
! }
!
! void
! json_out_init_context(JsonOutContext *out, int flags)
! {
! out->object_start = json_out_object_start;
! out->object_end = json_out_object_end;
! out->array_start = json_out_array_start;
! out->array_end = json_out_array_end;
! out->before_value = json_out_before_value;
! out->value = json_out_value;
! out->after_value = json_out_after_value;
!
! initStringInfo(&out->result);
! out->flags = flags;
! out->need_comma = false;
! out->depth = 0;
!
! out->agg_tcategory = JSONTYPE_OTHER;
! out->agg_outfuncoid = InvalidOid;
! }
!
! void
! json_out_before_value(JsonOutContext *out)
! {
! if (out->need_comma)
! {
! out->need_comma = false;
! appendStringInfoChar(&out->result, ',');
!
! /* don't get into all the prettiness deep in the object structure */
! if (out->depth == 1)
! {
! if ((out->flags & JSON_OUT_USE_SPACES) != 0)
! appendStringInfoChar(&out->result, ' ');
!
! if ((out->flags & JSON_OUT_USE_LINE_FEEDS) != 0)
! appendStringInfoString(&out->result, "\n ");
! }
! }
! }
!
! void
! json_out_after_value(JsonOutContext *out, bool key_scalar)
! {
! if (key_scalar)
! {
! appendStringInfoChar(&out->result, ':');
!
! if (out->depth == 1 && (out->flags & JSON_OUT_USE_SPACES) != 0)
! appendStringInfoChar(&out->result, ' ');
!
! out->need_comma = false;
! }
! else
! out->need_comma = true;
! }
!
! void
! json_out_object_start(JsonOutContext *out)
! {
! out->before_value(out);
!
! appendStringInfoChar(&out->result, '{');
! out->depth++;
! }
!
! void
! json_out_object_end(JsonOutContext *out)
! {
! appendStringInfoChar(&out->result, '}');
!
! out->after_value(out, false);
! out->depth--;
! }
!
! void
! json_out_array_start(JsonOutContext *out)
! {
! out->before_value(out);
!
! appendStringInfoChar(&out->result, '[');
! out->depth++;
! }
!
! void
! json_out_array_end(JsonOutContext *out)
! {
! appendStringInfoChar(&out->result, ']');
!
! out->after_value(out, false);
! out->depth--;
! }
!
! void
! json_out_value(JsonOutContext *out, Datum val, JsonTypeCategory tcategory,
! Oid typoid, Oid outfuncoid, bool key_scalar)
! {
! char *outputstr;
! text *jsontext;
! StringInfo result = &out->result;
!
! Assert(!(tcategory == JSONTYPE_ARRAY || tcategory == JSONTYPE_COMPOSITE));
!
! /* check if there was an element before this one and add a separator */
! out->before_value(out);
!
! switch (tcategory)
! {
! case JSONTYPE_NULL:
! appendStringInfoString(result, "null");
break;
case JSONTYPE_BOOL:
outputstr = DatumGetBool(val) ? "true" : "false";
*************** datum_to_json(Datum val, bool is_null, S
*** 1520,1530 ****
pfree(jsontext);
break;
default:
! outputstr = OidOutputFunctionCall(outfuncoid, val);
! escape_json(result, outputstr);
! pfree(outputstr);
break;
}
}
/*
--- 1605,1630 ----
pfree(jsontext);
break;
default:
! if (typoid == CSTRINGOID)
! escape_json(result, DatumGetCString(val));
! else
! {
! outputstr = OidOutputFunctionCall(outfuncoid, val);
! escape_json(result, outputstr);
! pfree(outputstr);
! }
break;
}
+
+ /* output key-value separator if needed and set need_comma accordingly */
+ out->after_value(out, key_scalar);
+ }
+
+ void
+ json_out_cstring(JsonOutContext *out, const char *str, bool key_scalar)
+ {
+ out->value(out, CStringGetDatum(str), JSONTYPE_OTHER,
+ CSTRINGOID, 2293 /* cstring_out */, key_scalar);
}
/*
*************** datum_to_json(Datum val, bool is_null, S
*** 1533,1581 ****
* ourselves recursively to process the next dimension.
*/
static void
! array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, Datum *vals,
bool *nulls, int *valcount, JsonTypeCategory tcategory,
! Oid outfuncoid, bool use_line_feeds)
{
int i;
- const char *sep;
Assert(dim < ndims);
! sep = use_line_feeds ? ",\n " : ",";
!
! appendStringInfoChar(result, '[');
for (i = 1; i <= dims[dim]; i++)
{
- if (i > 1)
- appendStringInfoString(result, sep);
-
if (dim + 1 == ndims)
{
! datum_to_json(vals[*valcount], nulls[*valcount], result, tcategory,
! outfuncoid, false);
(*valcount)++;
}
else
! {
! /*
! * Do we want line feeds on inner dimensions of arrays? For now
! * we'll say no.
! */
! array_dim_to_json(result, dim + 1, ndims, dims, vals, nulls,
! valcount, tcategory, outfuncoid, false);
! }
}
! appendStringInfoChar(result, ']');
}
/*
* Turn an array into JSON.
*/
static void
! array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
{
ArrayType *v = DatumGetArrayTypeP(array);
Oid element_type = ARR_ELEMTYPE(v);
--- 1633,1669 ----
* ourselves recursively to process the next dimension.
*/
static void
! array_dim_to_json(JsonOutContext *out, int dim, int ndims, int *dims, Datum *vals,
bool *nulls, int *valcount, JsonTypeCategory tcategory,
! Oid elemtypoid, Oid outfuncoid)
{
int i;
Assert(dim < ndims);
! out->array_start(out);
for (i = 1; i <= dims[dim]; i++)
{
if (dim + 1 == ndims)
{
! datum_to_json(vals[*valcount], nulls[*valcount], out, tcategory,
! elemtypoid, outfuncoid, false);
(*valcount)++;
}
else
! array_dim_to_json(out, dim + 1, ndims, dims, vals, nulls,
! valcount, tcategory, elemtypoid, outfuncoid);
}
! out->array_end(out);
}
/*
* Turn an array into JSON.
*/
static void
! array_to_json_internal(Datum array, JsonOutContext *out)
{
ArrayType *v = DatumGetArrayTypeP(array);
Oid element_type = ARR_ELEMTYPE(v);
*************** array_to_json_internal(Datum array, Stri
*** 1597,1603 ****
if (nitems <= 0)
{
! appendStringInfoString(result, "[]");
return;
}
--- 1685,1692 ----
if (nitems <= 0)
{
! out->array_start(out);
! out->array_end(out);
return;
}
*************** array_to_json_internal(Datum array, Stri
*** 1611,1618 ****
typalign, &elements, &nulls,
&nitems);
! array_dim_to_json(result, 0, ndim, dim, elements, nulls, &count, tcategory,
! outfuncoid, use_line_feeds);
pfree(elements);
pfree(nulls);
--- 1700,1707 ----
typalign, &elements, &nulls,
&nitems);
! array_dim_to_json(out, 0, ndim, dim, elements, nulls, &count, tcategory,
! element_type, outfuncoid);
pfree(elements);
pfree(nulls);
*************** array_to_json_internal(Datum array, Stri
*** 1621,1628 ****
/*
* Turn a composite / record into JSON.
*/
! static void
! composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
{
HeapTupleHeader td;
Oid tupType;
--- 1710,1717 ----
/*
* Turn a composite / record into JSON.
*/
! void
! composite_to_json(Datum composite, JsonOutContext *out)
{
HeapTupleHeader td;
Oid tupType;
*************** composite_to_json(Datum composite, Strin
*** 1631,1640 ****
HeapTupleData tmptup,
*tuple;
int i;
- bool needsep = false;
- const char *sep;
-
- sep = use_line_feeds ? ",\n " : ",";
td = DatumGetHeapTupleHeader(composite);
--- 1720,1725 ----
*************** composite_to_json(Datum composite, Strin
*** 1648,1654 ****
tmptup.t_data = td;
tuple = &tmptup;
! appendStringInfoChar(result, '{');
for (i = 0; i < tupdesc->natts; i++)
{
--- 1733,1739 ----
tmptup.t_data = td;
tuple = &tmptup;
! out->object_start(out);
for (i = 0; i < tupdesc->natts; i++)
{
*************** composite_to_json(Datum composite, Strin
*** 1661,1676 ****
if (tupdesc->attrs[i]->attisdropped)
continue;
- if (needsep)
- appendStringInfoString(result, sep);
- needsep = true;
-
attname = NameStr(tupdesc->attrs[i]->attname);
! escape_json(result, attname);
! appendStringInfoChar(result, ':');
val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
-
if (isnull)
{
tcategory = JSONTYPE_NULL;
--- 1746,1755 ----
if (tupdesc->attrs[i]->attisdropped)
continue;
attname = NameStr(tupdesc->attrs[i]->attname);
! json_out_cstring(out, attname, true);
val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
if (isnull)
{
tcategory = JSONTYPE_NULL;
*************** composite_to_json(Datum composite, Strin
*** 1680,1689 ****
json_categorize_type(tupdesc->attrs[i]->atttypid,
&tcategory, &outfuncoid);
! datum_to_json(val, isnull, result, tcategory, outfuncoid, false);
}
! appendStringInfoChar(result, '}');
ReleaseTupleDesc(tupdesc);
}
--- 1759,1770 ----
json_categorize_type(tupdesc->attrs[i]->atttypid,
&tcategory, &outfuncoid);
! datum_to_json(val, isnull, out, tcategory,
! tupdesc->attrs[i]->atttypid, outfuncoid, false);
}
! out->object_end(out);
!
ReleaseTupleDesc(tupdesc);
}
*************** composite_to_json(Datum composite, Strin
*** 1695,1701 ****
* lookups only once.
*/
static void
! add_json(Datum val, bool is_null, StringInfo result,
Oid val_type, bool key_scalar)
{
JsonTypeCategory tcategory;
--- 1776,1782 ----
* lookups only once.
*/
static void
! add_json(Datum val, bool is_null, JsonOutContext *out,
Oid val_type, bool key_scalar)
{
JsonTypeCategory tcategory;
*************** add_json(Datum val, bool is_null, String
*** 1715,1721 ****
json_categorize_type(val_type,
&tcategory, &outfuncoid);
! datum_to_json(val, is_null, result, tcategory, outfuncoid, key_scalar);
}
/*
--- 1796,1802 ----
json_categorize_type(val_type,
&tcategory, &outfuncoid);
! datum_to_json(val, is_null, out, tcategory, val_type, outfuncoid, key_scalar);
}
/*
*************** extern Datum
*** 1725,1737 ****
array_to_json(PG_FUNCTION_ARGS)
{
Datum array = PG_GETARG_DATUM(0);
! StringInfo result;
!
! result = makeStringInfo();
! array_to_json_internal(array, result, false);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
/*
--- 1806,1817 ----
array_to_json(PG_FUNCTION_ARGS)
{
Datum array = PG_GETARG_DATUM(0);
! JsonOutContext out;
! json_out_init_context(&out, 0);
! array_to_json_internal(array, &out);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(out.result.data, out.result.len));
}
/*
*************** array_to_json_pretty(PG_FUNCTION_ARGS)
*** 1742,1754 ****
{
Datum array = PG_GETARG_DATUM(0);
bool use_line_feeds = PG_GETARG_BOOL(1);
! StringInfo result;
!
! result = makeStringInfo();
! array_to_json_internal(array, result, use_line_feeds);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
/*
--- 1822,1833 ----
{
Datum array = PG_GETARG_DATUM(0);
bool use_line_feeds = PG_GETARG_BOOL(1);
! JsonOutContext out;
! json_out_init_context(&out, use_line_feeds ? JSON_OUT_USE_LINE_FEEDS : 0);
! array_to_json_internal(array, &out);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(out.result.data, out.result.len));
}
/*
*************** extern Datum
*** 1758,1770 ****
row_to_json(PG_FUNCTION_ARGS)
{
Datum array = PG_GETARG_DATUM(0);
! StringInfo result;
!
! result = makeStringInfo();
! composite_to_json(array, result, false);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
/*
--- 1837,1848 ----
row_to_json(PG_FUNCTION_ARGS)
{
Datum array = PG_GETARG_DATUM(0);
! JsonOutContext out;
! json_out_init_context(&out, 0);
! composite_to_json(array, &out);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(out.result.data, out.result.len));
}
/*
*************** row_to_json_pretty(PG_FUNCTION_ARGS)
*** 1775,1787 ****
{
Datum array = PG_GETARG_DATUM(0);
bool use_line_feeds = PG_GETARG_BOOL(1);
! StringInfo result;
!
! result = makeStringInfo();
! composite_to_json(array, result, use_line_feeds);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
/*
--- 1853,1864 ----
{
Datum array = PG_GETARG_DATUM(0);
bool use_line_feeds = PG_GETARG_BOOL(1);
! JsonOutContext out;
! json_out_init_context(&out, use_line_feeds ? JSON_OUT_USE_LINE_FEEDS : 0);
! composite_to_json(array, &out);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(out.result.data, out.result.len));
}
/*
*************** to_json(PG_FUNCTION_ARGS)
*** 1792,1800 ****
{
Datum val = PG_GETARG_DATUM(0);
Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
- StringInfo result;
JsonTypeCategory tcategory;
Oid outfuncoid;
if (val_type == InvalidOid)
ereport(ERROR,
--- 1869,1879 ----
{
Datum val = PG_GETARG_DATUM(0);
Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
JsonTypeCategory tcategory;
Oid outfuncoid;
+ JsonOutContext out;
+
+ json_out_init_context(&out, 0);
if (val_type == InvalidOid)
ereport(ERROR,
*************** to_json(PG_FUNCTION_ARGS)
*** 1804,1814 ****
json_categorize_type(val_type,
&tcategory, &outfuncoid);
! result = makeStringInfo();
!
! datum_to_json(val, false, result, tcategory, outfuncoid, false);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
/*
--- 1883,1891 ----
json_categorize_type(val_type,
&tcategory, &outfuncoid);
! datum_to_json(val, false, &out, tcategory, val_type, outfuncoid, false);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(out.result.data, out.result.len));
}
/*
*************** json_agg_transfn(PG_FUNCTION_ARGS)
*** 1822,1831 ****
Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
MemoryContext aggcontext,
oldcontext;
! StringInfo state;
Datum val;
- JsonTypeCategory tcategory;
- Oid outfuncoid;
if (val_type == InvalidOid)
ereport(ERROR,
--- 1899,1906 ----
Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
MemoryContext aggcontext,
oldcontext;
! JsonOutContext *out = NULL;
Datum val;
if (val_type == InvalidOid)
ereport(ERROR,
*************** json_agg_transfn(PG_FUNCTION_ARGS)
*** 1847,1891 ****
* use the right context to enlarge the object if necessary.
*/
oldcontext = MemoryContextSwitchTo(aggcontext);
! state = makeStringInfo();
MemoryContextSwitchTo(oldcontext);
! appendStringInfoChar(state, '[');
}
else
! {
! state = (StringInfo) PG_GETARG_POINTER(0);
! appendStringInfoString(state, ", ");
! }
/* fast path for NULLs */
if (PG_ARGISNULL(1))
{
! datum_to_json((Datum) 0, true, state, JSONTYPE_NULL, InvalidOid, false);
! PG_RETURN_POINTER(state);
}
val = PG_GETARG_DATUM(1);
! /* XXX we do this every time?? */
! json_categorize_type(val_type,
! &tcategory, &outfuncoid);
!
! /* add some whitespace if structured type and not first item */
! if (!PG_ARGISNULL(0) &&
! (tcategory == JSONTYPE_ARRAY || tcategory == JSONTYPE_COMPOSITE))
! {
! appendStringInfoString(state, "\n ");
! }
! datum_to_json(val, false, state, tcategory, outfuncoid, false);
/*
* The transition type for array_agg() is declared to be "internal", which
* is a pass-by-value type the same size as a pointer. So we can safely
! * pass the ArrayBuildState pointer through nodeAgg.c's machinations.
*/
! PG_RETURN_POINTER(state);
}
/*
--- 1922,1959 ----
* use the right context to enlarge the object if necessary.
*/
oldcontext = MemoryContextSwitchTo(aggcontext);
! out = palloc(sizeof(JsonOutContext));
! json_out_init_context(out, JSON_OUT_USE_LINE_FEEDS);
MemoryContextSwitchTo(oldcontext);
! out->array_start(out);
}
else
! out = (JsonOutContext *) PG_GETARG_POINTER(0);
/* fast path for NULLs */
if (PG_ARGISNULL(1))
{
! datum_to_json((Datum) 0, true, out,
! JSONTYPE_NULL, InvalidOid, InvalidOid, false);
! PG_RETURN_POINTER(out);
}
val = PG_GETARG_DATUM(1);
! if (out->agg_outfuncoid == InvalidOid)
! json_categorize_type(val_type,
! &out->agg_tcategory, &out->agg_outfuncoid);
! datum_to_json(val, false, out, out->agg_tcategory,
! val_type, out->agg_outfuncoid, false);
/*
* The transition type for array_agg() is declared to be "internal", which
* is a pass-by-value type the same size as a pointer. So we can safely
! * pass the ArrayBuildOut pointer through nodeAgg.c's machinations.
*/
! PG_RETURN_POINTER(out);
}
/*
*************** json_agg_transfn(PG_FUNCTION_ARGS)
*** 1894,1912 ****
Datum
json_agg_finalfn(PG_FUNCTION_ARGS)
{
! StringInfo state;
/* cannot be called directly because of internal-type argument */
Assert(AggCheckCallContext(fcinfo, NULL));
! state = PG_ARGISNULL(0) ? NULL : (StringInfo) PG_GETARG_POINTER(0);
/* NULL result for no rows in, as is standard with aggregates */
! if (state == NULL)
PG_RETURN_NULL();
! /* Else return state with appropriate array terminator added */
! PG_RETURN_TEXT_P(catenate_stringinfo_string(state, "]"));
}
/*
--- 1962,1981 ----
Datum
json_agg_finalfn(PG_FUNCTION_ARGS)
{
! JsonOutContext *out;
/* cannot be called directly because of internal-type argument */
Assert(AggCheckCallContext(fcinfo, NULL));
! out = PG_ARGISNULL(0) ? NULL : (JsonOutContext *) PG_GETARG_POINTER(0);
/* NULL result for no rows in, as is standard with aggregates */
! if (out == NULL)
PG_RETURN_NULL();
! out->array_end(out);
!
! PG_RETURN_TEXT_P(cstring_to_text_with_len(out->result.data, out->result.len));
}
/*
*************** json_object_agg_transfn(PG_FUNCTION_ARGS
*** 1920,1926 ****
Oid val_type;
MemoryContext aggcontext,
oldcontext;
! StringInfo state;
Datum arg;
if (!AggCheckCallContext(fcinfo, &aggcontext))
--- 1989,1995 ----
Oid val_type;
MemoryContext aggcontext,
oldcontext;
! JsonOutContext *out = NULL;
Datum arg;
if (!AggCheckCallContext(fcinfo, &aggcontext))
*************** json_object_agg_transfn(PG_FUNCTION_ARGS
*** 1938,1953 ****
* use the right context to enlarge the object if necessary.
*/
oldcontext = MemoryContextSwitchTo(aggcontext);
! state = makeStringInfo();
MemoryContextSwitchTo(oldcontext);
! appendStringInfoString(state, "{ ");
}
else
! {
! state = (StringInfo) PG_GETARG_POINTER(0);
! appendStringInfoString(state, ", ");
! }
/*
* Note: since json_object_agg() is declared as taking type "any", the
--- 2007,2020 ----
* use the right context to enlarge the object if necessary.
*/
oldcontext = MemoryContextSwitchTo(aggcontext);
! out = palloc(sizeof(JsonOutContext));
! json_out_init_context(out, JSON_OUT_USE_SPACES);
MemoryContextSwitchTo(oldcontext);
! out->object_start(out);
}
else
! out = (JsonOutContext *) PG_GETARG_POINTER(0);
/*
* Note: since json_object_agg() is declared as taking type "any", the
*************** json_object_agg_transfn(PG_FUNCTION_ARGS
*** 1970,1978 ****
arg = PG_GETARG_DATUM(1);
! add_json(arg, false, state, val_type, true);
!
! appendStringInfoString(state, " : ");
val_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
--- 2037,2043 ----
arg = PG_GETARG_DATUM(1);
! add_json(arg, false, out, val_type, true);
val_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
*************** json_object_agg_transfn(PG_FUNCTION_ARGS
*** 1986,1994 ****
else
arg = PG_GETARG_DATUM(2);
! add_json(arg, PG_ARGISNULL(2), state, val_type, false);
! PG_RETURN_POINTER(state);
}
/*
--- 2051,2059 ----
else
arg = PG_GETARG_DATUM(2);
! add_json(arg, PG_ARGISNULL(2), out, val_type, false);
! PG_RETURN_POINTER(out);
}
/*
*************** json_object_agg_transfn(PG_FUNCTION_ARGS
*** 1997,2035 ****
Datum
json_object_agg_finalfn(PG_FUNCTION_ARGS)
{
! StringInfo state;
/* cannot be called directly because of internal-type argument */
Assert(AggCheckCallContext(fcinfo, NULL));
! state = PG_ARGISNULL(0) ? NULL : (StringInfo) PG_GETARG_POINTER(0);
/* NULL result for no rows in, as is standard with aggregates */
! if (state == NULL)
PG_RETURN_NULL();
! /* Else return state with appropriate object terminator added */
! PG_RETURN_TEXT_P(catenate_stringinfo_string(state, " }"));
! }
!
! /*
! * Helper function for aggregates: return given StringInfo's contents plus
! * specified trailing string, as a text datum. We need this because aggregate
! * final functions are not allowed to modify the aggregate state.
! */
! static text *
! catenate_stringinfo_string(StringInfo buffer, const char *addon)
! {
! /* custom version of cstring_to_text_with_len */
! int buflen = buffer->len;
! int addlen = strlen(addon);
! text *result = (text *) palloc(buflen + addlen + VARHDRSZ);
!
! SET_VARSIZE(result, buflen + addlen + VARHDRSZ);
! memcpy(VARDATA(result), buffer->data, buflen);
! memcpy(VARDATA(result) + buflen, addon, addlen);
! return result;
}
/*
--- 2062,2081 ----
Datum
json_object_agg_finalfn(PG_FUNCTION_ARGS)
{
! JsonOutContext *out;
/* cannot be called directly because of internal-type argument */
Assert(AggCheckCallContext(fcinfo, NULL));
! out = PG_ARGISNULL(0) ? NULL : (JsonOutContext *) PG_GETARG_POINTER(0);
/* NULL result for no rows in, as is standard with aggregates */
! if (out == NULL)
PG_RETURN_NULL();
! out->object_end(out);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(out->result.data, out->result.len));
}
/*
*************** json_build_object(PG_FUNCTION_ARGS)
*** 2041,2048 ****
int nargs = PG_NARGS();
int i;
Datum arg;
! const char *sep = "";
! StringInfo result;
Oid val_type;
if (nargs % 2 != 0)
--- 2087,2093 ----
int nargs = PG_NARGS();
int i;
Datum arg;
! JsonOutContext out;
Oid val_type;
if (nargs % 2 != 0)
*************** json_build_object(PG_FUNCTION_ARGS)
*** 2051,2059 ****
errmsg("argument list must have even number of elements"),
errhint("The arguments of json_build_object() must consist of alternating keys and values.")));
! result = makeStringInfo();
!
! appendStringInfoChar(result, '{');
for (i = 0; i < nargs; i += 2)
{
--- 2096,2103 ----
errmsg("argument list must have even number of elements"),
errhint("The arguments of json_build_object() must consist of alternating keys and values.")));
! json_out_init_context(&out, JSON_OUT_USE_SPACES);
! out.object_start(&out);
for (i = 0; i < nargs; i += 2)
{
*************** json_build_object(PG_FUNCTION_ARGS)
*** 2064,2071 ****
* here as type UNKNOWN, which fortunately does not matter to us,
* since unknownout() works fine.
*/
- appendStringInfoString(result, sep);
- sep = ", ";
/* process key */
val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
--- 2108,2113 ----
*************** json_build_object(PG_FUNCTION_ARGS)
*** 2084,2092 ****
arg = PG_GETARG_DATUM(i);
! add_json(arg, false, result, val_type, true);
!
! appendStringInfoString(result, " : ");
/* process value */
val_type = get_fn_expr_argtype(fcinfo->flinfo, i + 1);
--- 2126,2132 ----
arg = PG_GETARG_DATUM(i);
! add_json(arg, false, &out, val_type, true);
/* process value */
val_type = get_fn_expr_argtype(fcinfo->flinfo, i + 1);
*************** json_build_object(PG_FUNCTION_ARGS)
*** 2102,2113 ****
else
arg = PG_GETARG_DATUM(i + 1);
! add_json(arg, PG_ARGISNULL(i + 1), result, val_type, false);
}
! appendStringInfoChar(result, '}');
! PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
/*
--- 2142,2153 ----
else
arg = PG_GETARG_DATUM(i + 1);
! add_json(arg, PG_ARGISNULL(i + 1), &out, val_type, false);
}
! out.object_end(&out);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(out.result.data, out.result.len));
}
/*
*************** json_build_array(PG_FUNCTION_ARGS)
*** 2128,2140 ****
int nargs = PG_NARGS();
int i;
Datum arg;
! const char *sep = "";
! StringInfo result;
Oid val_type;
! result = makeStringInfo();
!
! appendStringInfoChar(result, '[');
for (i = 0; i < nargs; i++)
{
--- 2168,2178 ----
int nargs = PG_NARGS();
int i;
Datum arg;
! JsonOutContext out;
Oid val_type;
! json_out_init_context(&out, JSON_OUT_USE_SPACES);
! out.array_start(&out);
for (i = 0; i < nargs; i++)
{
*************** json_build_array(PG_FUNCTION_ARGS)
*** 2145,2153 ****
* here as type UNKNOWN, which fortunately does not matter to us,
* since unknownout() works fine.
*/
- appendStringInfoString(result, sep);
- sep = ", ";
-
val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
if (val_type == InvalidOid)
--- 2183,2188 ----
*************** json_build_array(PG_FUNCTION_ARGS)
*** 2161,2172 ****
else
arg = PG_GETARG_DATUM(i);
! add_json(arg, PG_ARGISNULL(i), result, val_type, false);
}
! appendStringInfoChar(result, ']');
! PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
/*
--- 2196,2207 ----
else
arg = PG_GETARG_DATUM(i);
! add_json(arg, PG_ARGISNULL(i), &out, val_type, false);
}
! out.array_end(&out);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(out.result.data, out.result.len));
}
/*
*************** json_object(PG_FUNCTION_ARGS)
*** 2189,2202 ****
{
ArrayType *in_array = PG_GETARG_ARRAYTYPE_P(0);
int ndims = ARR_NDIM(in_array);
! StringInfoData result;
Datum *in_datums;
bool *in_nulls;
int in_count,
count,
i;
- text *rval;
- char *v;
switch (ndims)
{
--- 2224,2235 ----
{
ArrayType *in_array = PG_GETARG_ARRAYTYPE_P(0);
int ndims = ARR_NDIM(in_array);
! JsonOutContext out;
Datum *in_datums;
bool *in_nulls;
int in_count,
count,
i;
switch (ndims)
{
*************** json_object(PG_FUNCTION_ARGS)
*** 2230,2238 ****
count = in_count / 2;
! initStringInfo(&result);
!
! appendStringInfoChar(&result, '{');
for (i = 0; i < count; ++i)
{
--- 2263,2270 ----
count = in_count / 2;
! json_out_init_context(&out, JSON_OUT_USE_SPACES);
! out.object_start(&out);
for (i = 0; i < count; ++i)
{
*************** json_object(PG_FUNCTION_ARGS)
*** 2241,2272 ****
(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
errmsg("null value not allowed for object key")));
! v = TextDatumGetCString(in_datums[i * 2]);
! if (i > 0)
! appendStringInfoString(&result, ", ");
! escape_json(&result, v);
! appendStringInfoString(&result, " : ");
! pfree(v);
if (in_nulls[i * 2 + 1])
! appendStringInfoString(&result, "null");
else
! {
! v = TextDatumGetCString(in_datums[i * 2 + 1]);
! escape_json(&result, v);
! pfree(v);
! }
}
! appendStringInfoChar(&result, '}');
pfree(in_datums);
pfree(in_nulls);
! rval = cstring_to_text_with_len(result.data, result.len);
! pfree(result.data);
!
! PG_RETURN_TEXT_P(rval);
!
}
/*
--- 2273,2295 ----
(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
errmsg("null value not allowed for object key")));
! out.value(&out, in_datums[i * 2], JSONTYPE_OTHER,
! TEXTOID, 47 /* textout */, true);
!
if (in_nulls[i * 2 + 1])
! out.value(&out, (Datum) 0, JSONTYPE_NULL,
! InvalidOid, InvalidOid, false);
else
! out.value(&out, in_datums[i * 2 + 1], JSONTYPE_OTHER,
! TEXTOID, 47 /* textout */, false);
}
! out.object_end(&out);
pfree(in_datums);
pfree(in_nulls);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(out.result.data, out.result.len));
}
/*
*************** json_object_two_arg(PG_FUNCTION_ARGS)
*** 2282,2288 ****
ArrayType *val_array = PG_GETARG_ARRAYTYPE_P(1);
int nkdims = ARR_NDIM(key_array);
int nvdims = ARR_NDIM(val_array);
! StringInfoData result;
Datum *key_datums,
*val_datums;
bool *key_nulls,
--- 2305,2311 ----
ArrayType *val_array = PG_GETARG_ARRAYTYPE_P(1);
int nkdims = ARR_NDIM(key_array);
int nvdims = ARR_NDIM(val_array);
! JsonOutContext out;
Datum *key_datums,
*val_datums;
bool *key_nulls,
*************** json_object_two_arg(PG_FUNCTION_ARGS)
*** 2290,2297 ****
int key_count,
val_count,
i;
- text *rval;
- char *v;
if (nkdims > 1 || nkdims != nvdims)
ereport(ERROR,
--- 2313,2318 ----
*************** json_object_two_arg(PG_FUNCTION_ARGS)
*** 2314,2322 ****
(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
errmsg("mismatched array dimensions")));
! initStringInfo(&result);
!
! appendStringInfoChar(&result, '{');
for (i = 0; i < key_count; ++i)
{
--- 2335,2342 ----
(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
errmsg("mismatched array dimensions")));
! json_out_init_context(&out, JSON_OUT_USE_SPACES);
! out.object_start(&out);
for (i = 0; i < key_count; ++i)
{
*************** json_object_two_arg(PG_FUNCTION_ARGS)
*** 2325,2357 ****
(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
errmsg("null value not allowed for object key")));
! v = TextDatumGetCString(key_datums[i]);
! if (i > 0)
! appendStringInfoString(&result, ", ");
! escape_json(&result, v);
! appendStringInfoString(&result, " : ");
! pfree(v);
if (val_nulls[i])
! appendStringInfoString(&result, "null");
else
! {
! v = TextDatumGetCString(val_datums[i]);
! escape_json(&result, v);
! pfree(v);
! }
}
! appendStringInfoChar(&result, '}');
pfree(key_datums);
pfree(key_nulls);
pfree(val_datums);
pfree(val_nulls);
! rval = cstring_to_text_with_len(result.data, result.len);
! pfree(result.data);
!
! PG_RETURN_TEXT_P(rval);
}
--- 2345,2369 ----
(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
errmsg("null value not allowed for object key")));
! out.value(&out, key_datums[i], JSONTYPE_OTHER,
! TEXTOID, 47 /* textout */, true);
!
if (val_nulls[i])
! out.value(&out, (Datum) 0, JSONTYPE_NULL,
! InvalidOid, InvalidOid, false);
else
! out.value(&out, val_datums[i], JSONTYPE_OTHER,
! TEXTOID, 47 /* textout */, false);
}
! out.object_end(&out);
pfree(key_datums);
pfree(key_nulls);
pfree(val_datums);
pfree(val_nulls);
! PG_RETURN_TEXT_P(cstring_to_text_with_len(out.result.data, out.result.len));
}
diff --git a/src/include/utils/jsonapi.h b/src/include/utils/jsonapi.h
new file mode 100644
index 296d20a..8b9af6c
*** a/src/include/utils/jsonapi.h
--- b/src/include/utils/jsonapi.h
*************** extern JsonLexContext *makeJsonLexContex
*** 124,127 ****
--- 124,194 ----
*/
extern bool IsValidJsonNumber(const char *str, int len);
+
+ /*
+ * Generalized structures for producing JSON output.
+ */
+ typedef enum /* type categories for datum_to_json */
+ {
+ JSONTYPE_NULL, /* null, so we didn't bother to identify */
+ JSONTYPE_BOOL, /* boolean (built-in types only) */
+ JSONTYPE_NUMERIC, /* numeric (ditto) */
+ JSONTYPE_DATE, /* we use special formatting for datetimes */
+ JSONTYPE_TIMESTAMP,
+ JSONTYPE_TIMESTAMPTZ,
+ JSONTYPE_JSON, /* JSON itself (and JSONB) */
+ JSONTYPE_ARRAY, /* array */
+ JSONTYPE_COMPOSITE, /* composite */
+ JSONTYPE_CAST, /* something with an explicit cast to JSON */
+ JSONTYPE_OTHER /* all else */
+ } JsonTypeCategory;
+
+ struct JsonOutContext;
+
+ typedef void (*json_out_struct_action)(struct JsonOutContext *out);
+ typedef void (*json_out_value_action)(struct JsonOutContext *out,
+ Datum val, JsonTypeCategory tcategory,
+ Oid typoid, Oid outfuncoid, bool key_scalar);
+ typedef void (*json_out_post_action)(struct JsonOutContext *out,
+ bool key_scalar);
+
+ #define JSON_OUT_USE_LINE_FEEDS 1
+ #define JSON_OUT_USE_SPACES 2
+
+ typedef struct JsonOutContext {
+ json_out_struct_action object_start;
+ json_out_struct_action object_end;
+ json_out_struct_action array_start;
+ json_out_struct_action array_end;
+ json_out_struct_action before_value;
+ json_out_value_action value;
+ json_out_post_action after_value;
+
+ StringInfoData result;
+ int flags;
+ bool need_comma;
+ int depth;
+ /* these are used in json_agg to cache the type information */
+ JsonTypeCategory agg_tcategory;
+ Oid agg_outfuncoid;
+ } JsonOutContext;
+
+ extern void json_out_init_context(JsonOutContext *out, int flags);
+ extern void json_out_before_value(JsonOutContext *out);
+ extern void json_out_after_value(JsonOutContext *out, bool key_scalar);
+ extern void json_out_object_start(JsonOutContext *out);
+ extern void json_out_object_end(JsonOutContext *out);
+ extern void json_out_array_start(JsonOutContext *out);
+ extern void json_out_array_end(JsonOutContext *out);
+ extern void json_out_value(JsonOutContext *out, Datum val,
+ JsonTypeCategory tcategory, Oid typoid, Oid outfuncoid,
+ bool key_scalar);
+ extern void json_out_cstring(JsonOutContext *out, const char *str,
+ bool key_scalar);
+
+ extern void composite_to_json(Datum composite, JsonOutContext *out);
+ extern void datum_to_json(Datum val, bool is_null, JsonOutContext *out,
+ JsonTypeCategory tcategory, Oid typoid, Oid outfuncoid,
+ bool key_scalar);
+
#endif /* JSONAPI_H */
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
new file mode 100644
index 3942c3b..fbc9b5b
*** a/src/test/regress/expected/json.out
--- b/src/test/regress/expected/json.out
*************** SELECT json_agg(q)
*** 457,476 ****
ROW(y.*,ARRAY[4,5,6])] AS z
FROM generate_series(1,2) x,
generate_series(4,5) y) q;
! json_agg
! -----------------------------------------------------------------------
! [{"b":"a1","c":4,"z":[{"f1":1,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}, +
! {"b":"a1","c":5,"z":[{"f1":1,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}, +
! {"b":"a2","c":4,"z":[{"f1":2,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}, +
{"b":"a2","c":5,"z":[{"f1":2,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}]
(1 row)
SELECT json_agg(q)
FROM rows q;
! json_agg
! -----------------------
! [{"x":1,"y":"txt1"}, +
! {"x":2,"y":"txt2"}, +
{"x":3,"y":"txt3"}]
(1 row)
--- 457,476 ----
ROW(y.*,ARRAY[4,5,6])] AS z
FROM generate_series(1,2) x,
generate_series(4,5) y) q;
! json_agg
! ----------------------------------------------------------------------
! [{"b":"a1","c":4,"z":[{"f1":1,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]},+
! {"b":"a1","c":5,"z":[{"f1":1,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]},+
! {"b":"a2","c":4,"z":[{"f1":2,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]},+
{"b":"a2","c":5,"z":[{"f1":2,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}]
(1 row)
SELECT json_agg(q)
FROM rows q;
! json_agg
! ----------------------
! [{"x":1,"y":"txt1"},+
! {"x":2,"y":"txt2"},+
{"x":3,"y":"txt3"}]
(1 row)
*************** SELECT json_build_array('a',1,'b',1.2,'c
*** 1505,1522 ****
(1 row)
SELECT json_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
! json_build_object
! ----------------------------------------------------------------------------
! {"a" : 1, "b" : 1.2, "c" : true, "d" : null, "e" : {"x": 3, "y": [1,2,3]}}
(1 row)
SELECT json_build_object(
'a', json_build_object('b',false,'c',99),
'd', json_build_object('e',array[9,8,7]::int[],
'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
! json_build_object
! -------------------------------------------------------------------------------------------------
! {"a" : {"b" : false, "c" : 99}, "d" : {"e" : [9,8,7], "f" : {"relkind":"r","name":"pg_class"}}}
(1 row)
-- empty objects/arrays
--- 1505,1522 ----
(1 row)
SELECT json_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
! json_build_object
! -----------------------------------------------------------------------
! {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1,2,3]}}
(1 row)
SELECT json_build_object(
'a', json_build_object('b',false,'c',99),
'd', json_build_object('e',array[9,8,7]::int[],
'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
! json_build_object
! -------------------------------------------------------------------------------------------
! {"a": {"b": false, "c": 99}, "d": {"e": [9,8,7], "f": {"relkind":"r","name":"pg_class"}}}
(1 row)
-- empty objects/arrays
*************** SELECT json_build_object();
*** 1536,1542 ****
SELECT json_build_object(1,2);
json_build_object
-------------------
! {"1" : 2}
(1 row)
-- keys must be scalar and not null
--- 1536,1542 ----
SELECT json_build_object(1,2);
json_build_object
-------------------
! {"1": 2}
(1 row)
-- keys must be scalar and not null
*************** INSERT INTO foo VALUES (847002,'t16','GE
*** 1555,1578 ****
INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
SELECT json_build_object('turbines',json_object_agg(serial_num,json_build_object('name',name,'type',type)))
FROM foo;
! json_build_object
! -------------------------------------------------------------------------------------------------------------------------------------------------------------------------
! {"turbines" : { "847001" : {"name" : "t15", "type" : "GE1043"}, "847002" : {"name" : "t16", "type" : "GE1043"}, "847003" : {"name" : "sub-alpha", "type" : "GESS90"} }}
(1 row)
-- json_object
-- one dimension
SELECT json_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
! json_object
! -------------------------------------------------------
! {"a" : "1", "b" : "2", "3" : null, "d e f" : "a b c"}
(1 row)
-- same but with two dimensions
SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
! json_object
! -------------------------------------------------------
! {"a" : "1", "b" : "2", "3" : null, "d e f" : "a b c"}
(1 row)
-- odd number error
--- 1555,1578 ----
INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
SELECT json_build_object('turbines',json_object_agg(serial_num,json_build_object('name',name,'type',type)))
FROM foo;
! json_build_object
! -------------------------------------------------------------------------------------------------------------------------------------------------------------
! {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
(1 row)
-- json_object
-- one dimension
SELECT json_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
! json_object
! ---------------------------------------------------
! {"a": "1", "b": "2", "3": null, "d e f": "a b c"}
(1 row)
-- same but with two dimensions
SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
! json_object
! ---------------------------------------------------
! {"a": "1", "b": "2", "3": null, "d e f": "a b c"}
(1 row)
-- odd number error
*************** SELECT json_object('{{{a,b},{c,d}},{{b,c
*** 1589,1597 ****
ERROR: wrong number of array subscripts
--two argument form of json_object
select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
! json_object
! ------------------------------------------------------
! {"a" : "1", "b" : "2", "c" : "3", "d e f" : "a b c"}
(1 row)
-- too many dimensions
--- 1589,1597 ----
ERROR: wrong number of array subscripts
--two argument form of json_object
select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
! json_object
! --------------------------------------------------
! {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
(1 row)
-- too many dimensions
*************** select json_object('{a,b,NULL,"d e f"}',
*** 1607,1615 ****
ERROR: null value not allowed for object key
-- empty key is allowed
select json_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
! json_object
! -----------------------------------------------------
! {"a" : "1", "b" : "2", "" : "3", "d e f" : "a b c"}
(1 row)
-- json_to_record and json_to_recordset
--- 1607,1615 ----
ERROR: null value not allowed for object key
-- empty key is allowed
select json_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
! json_object
! -------------------------------------------------
! {"a": "1", "b": "2", "": "3", "d e f": "a b c"}
(1 row)
-- json_to_record and json_to_recordset
--
Sent via pgsql-hackers mailing list ([email protected])
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers