JsonbValue to Jsonb conversion
Hi all,
I'm faced with some troubles about the jsonb implementation, and I hope
I'll get little advice =)
If I understand correctly, an abstract function for jsonb modification
should have the following stages:
Jsonb -> JsonbValue -> Modification -> JsonbValue -> Jsonb
One can convert the *JsonbValue* to the *Jsonb* only by *JsonbValueToJsonb*
function. So, my question is can be *JsonbValue*, that contains few
*jbvBinary* elements, converted to *Jsonb* by this function? It will be
very useful, if you want modify only small part of your JsonbValue (e.g.
replace value of some key). But when I'm trying to do this, an exception
"unknown type of jsonb container" appears. Maybe I missed something? Or is
there another approach to do this conversion?
On 09/23/2014 12:23 PM, Dmitry Dolgov wrote:
Hi all,
I'm faced with some troubles about the jsonb implementation, and I
hope I'll get little advice =)
If I understand correctly, an abstract function for jsonb modification
should have the following stages:Jsonb -> JsonbValue -> Modification -> JsonbValue -> Jsonb
One can convert the *JsonbValue* to the *Jsonb* only by
*JsonbValueToJsonb* function. So, my question is can be *JsonbValue*,
that contains few *jbvBinary* elements, converted to *Jsonb* by this
function? It will be very useful, if you want modify only small part
of your JsonbValue (e.g. replace value of some key). But when I'm
trying to do this, an exception "unknown type of jsonb container"
appears. Maybe I missed something? Or is there another approach to do
this conversion?
If you can come up with a way of handling the jbvBinary values then by
all means send a patch.
But this problem is fairly easily worked around by using an iterator
over the binary value. The attached patch, which is work in progress for
adding in the currently missing json functions for jsonb, contains a
sort of example of doing this in jsonb_agg_transfn.
cheers
andrew
Attachments:
jsonbmissingfuncs.patchtext/x-patch; name=jsonbmissingfuncs.patchDownload
diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c
index 2fd87fc..82dae72 100644
--- a/src/backend/utils/adt/jsonb.c
+++ b/src/backend/utils/adt/jsonb.c
@@ -12,11 +12,20 @@
*/
#include "postgres.h"
+#include "miscadmin.h"
+#include "access/htup_details.h"
+#include "access/transam.h"
+#include "catalog/pg_cast.h"
+#include "catalog/pg_type.h"
#include "libpq/pqformat.h"
#include "utils/builtins.h"
+#include "utils/datetime.h"
+#include "utils/lsyscache.h"
#include "utils/json.h"
#include "utils/jsonapi.h"
#include "utils/jsonb.h"
+#include "utils/syscache.h"
+#include "utils/typcache.h"
typedef struct JsonbInState
{
@@ -24,6 +33,23 @@ typedef struct JsonbInState
JsonbValue *res;
} JsonbInState;
+/* unlike with json categories, we need to treat json and jsonb differently */
+typedef enum /* type categories for datum_to_jsonb */
+{
+ JSONBTYPE_NULL, /* null, so we didn't bother to identify */
+ JSONBTYPE_BOOL, /* boolean (built-in types only) */
+ JSONBTYPE_NUMERIC, /* numeric (ditto) */
+ JSONBTYPE_TIMESTAMP, /* we use special formatting for timestamp */
+ JSONBTYPE_TIMESTAMPTZ, /* ... and timestamptz */
+ JSONBTYPE_JSON, /* JSON */
+ JSONBTYPE_JSONB, /* JSONB */
+ JSONBTYPE_ARRAY, /* array */
+ JSONBTYPE_COMPOSITE, /* composite */
+ JSONBTYPE_JSONCAST, /* something with an explicit cast to JSON */
+ JSONBTYPE_JSONBCAST, /* something with an explicit cast to JSONB */
+ JSONBTYPE_OTHER /* all else */
+} JsonbTypeCategory;
+
static inline Datum jsonb_from_cstring(char *json, int len);
static size_t checkStringLen(size_t len);
static void jsonb_in_object_start(void *pstate);
@@ -33,6 +59,22 @@ static void jsonb_in_array_end(void *pstate);
static void jsonb_in_object_field_start(void *pstate, char *fname, bool isnull);
static void jsonb_put_escaped_value(StringInfo out, JsonbValue *scalarVal);
static void jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype);
+static void jsonb_categorize_type(Oid typoid,
+ JsonbTypeCategory *tcategory,
+ Oid *outfuncoid);
+static void composite_to_jsonb(Datum composite, JsonbInState *result);
+static void array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims,
+ Datum *vals, bool *nulls, int *valcount,
+ JsonbTypeCategory tcategory, Oid outfuncoid);
+static void array_to_jsonb_internal(Datum array, JsonbInState *result);
+static void jsonb_categorize_type(Oid typoid,
+ JsonbTypeCategory *tcategory,
+ Oid *outfuncoid);
+static void datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+ JsonbTypeCategory tcategory, Oid outfuncoid,
+ bool key_scalar);
+static void add_jsonb(Datum val, bool is_null, JsonbInState *result,
+ Oid val_type, bool key_scalar);
/*
* jsonb type input function
@@ -462,3 +504,1070 @@ JsonbToCString(StringInfo out, JsonbContainer *in, int estimated_len)
return out->data;
}
+
+
+/*
+ * Determine how we want to render values of a given type in datum_to_jsonb.
+ *
+ * Given the datatype OID, return its JsonbTypeCategory, as well as the type's
+ * output function OID. If the returned category is JSONBTYPE_CAST, we
+ * return the OID of the type->JSON cast function instead.
+ */
+static void
+jsonb_categorize_type(Oid typoid,
+ JsonbTypeCategory *tcategory,
+ Oid *outfuncoid)
+{
+ bool typisvarlena;
+
+ /* Look through any domain */
+ typoid = getBaseType(typoid);
+
+ /* We'll usually need to return the type output function */
+ getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+
+ /* Check for known types */
+ switch (typoid)
+ {
+ case BOOLOID:
+ *tcategory = JSONBTYPE_BOOL;
+ break;
+
+ case INT2OID:
+ case INT4OID:
+ case INT8OID:
+ case FLOAT4OID:
+ case FLOAT8OID:
+ case NUMERICOID:
+ *tcategory = JSONBTYPE_NUMERIC;
+ break;
+
+ case TIMESTAMPOID:
+ *tcategory = JSONBTYPE_TIMESTAMP;
+ break;
+
+ case TIMESTAMPTZOID:
+ *tcategory = JSONBTYPE_TIMESTAMPTZ;
+ break;
+
+ case JSONBOID:
+ *tcategory = JSONBTYPE_JSONB;
+ break;
+
+ case JSONOID:
+ *tcategory = JSONBTYPE_JSON;
+ break;
+
+ default:
+ /* Check for arrays and composites */
+ if (OidIsValid(get_element_type(typoid)))
+ *tcategory = JSONBTYPE_ARRAY;
+ else if (type_is_rowtype(typoid))
+ *tcategory = JSONBTYPE_COMPOSITE;
+ else
+ {
+ /* It's probably the general case ... */
+ *tcategory = JSONBTYPE_OTHER;
+ /* but let's look for a cast to json or jsonb, if it's not built-in */
+ if (typoid >= FirstNormalObjectId)
+ {
+ HeapTuple tuple;
+
+ tuple = SearchSysCache2(CASTSOURCETARGET,
+ ObjectIdGetDatum(typoid),
+ ObjectIdGetDatum(JSONBOID));
+ if (HeapTupleIsValid(tuple))
+ {
+ Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
+
+ if (castForm->castmethod == COERCION_METHOD_FUNCTION)
+ {
+ *tcategory = JSONBTYPE_JSONBCAST;
+ *outfuncoid = castForm->castfunc;
+ }
+
+ ReleaseSysCache(tuple);
+ }
+ else
+ {
+ tuple = SearchSysCache2(CASTSOURCETARGET,
+ ObjectIdGetDatum(typoid),
+ ObjectIdGetDatum(JSONOID));
+ if (HeapTupleIsValid(tuple))
+ {
+ Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
+
+ if (castForm->castmethod == COERCION_METHOD_FUNCTION)
+ {
+ *tcategory = JSONBTYPE_JSONCAST;
+ *outfuncoid = castForm->castfunc;
+ }
+
+ ReleaseSysCache(tuple);
+ }
+ }
+ }
+ break;
+ }
+ }
+}
+
+/*
+ * Turn a Datum into JSON text, appending the string to "result".
+ *
+ * tcategory and outfuncoid are from a previous call to json_categorize_type,
+ * except that if is_null is true then they can be invalid.
+ *
+ * If key_scalar is true, the value is being printed as a key, so insist
+ * it's of an acceptable type, and force it to be quoted.
+ */
+static void
+datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+ JsonbTypeCategory tcategory, Oid outfuncoid,
+ bool key_scalar)
+{
+ char *outputstr;
+ bool numeric_error;
+ JsonbValue jb;
+ bool scalar_jsonb = false;
+
+ if (is_null)
+ {
+ jb.type = jbvNull;
+ }
+ else if (key_scalar &&
+ (tcategory == JSONBTYPE_ARRAY ||
+ tcategory == JSONBTYPE_COMPOSITE ||
+ tcategory == JSONBTYPE_JSON ||
+ tcategory == JSONBTYPE_JSONB ||
+ tcategory == JSONBTYPE_JSONCAST ||
+ tcategory == JSONBTYPE_JSONBCAST))
+ {
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("key value must be scalar, not array, composite or json")));
+ }
+ else
+ {
+ if (tcategory == JSONBTYPE_JSONCAST || tcategory == JSONBTYPE_JSONBCAST)
+ val = OidFunctionCall1(outfuncoid, val);
+
+ switch (tcategory)
+ {
+ case JSONBTYPE_ARRAY:
+ array_to_jsonb_internal(val, result);
+ break;
+ case JSONBTYPE_COMPOSITE:
+ composite_to_jsonb(val, result);
+ break;
+ case JSONBTYPE_BOOL:
+ if (key_scalar)
+ {
+ outputstr = DatumGetBool(val) ? "true" : "false";
+ jb.type = jbvString;
+ jb.val.string.len = strlen(outputstr);
+ jb.val.string.val = outputstr;
+ }
+ else
+ {
+ jb.type = jbvBool;
+ jb.val.boolean = DatumGetBool(val);
+ }
+ break;
+ case JSONBTYPE_NUMERIC:
+ outputstr = OidOutputFunctionCall(outfuncoid, val);
+ if (key_scalar)
+ {
+ /* always quote keys */
+ jb.type = jbvString;
+ jb.val.string.len = strlen(outputstr);
+ jb.val.string.val = outputstr;
+ }
+ else
+ {
+ /*
+ * Make it numeric if it's a valid JSON number,
+ * otherwise a string. Invalid numeric output will always
+ * have an 'N' or 'n' in it (I think).
+ */
+ numeric_error = (strchr(outputstr,'N') != NULL ||
+ strchr(outputstr,'n') != NULL);
+ if (!numeric_error)
+ {
+ jb.type = jbvNumeric;
+ jb.val.numeric = DatumGetNumeric(DirectFunctionCall3(numeric_in, CStringGetDatum(outputstr), 0, -1));
+ pfree(outputstr);
+ }
+ else
+ {
+ jb.type = jbvString;
+ jb.val.string.len = strlen(outputstr);
+ jb.val.string.val = outputstr;
+ }
+ }
+ break;
+ case JSONBTYPE_TIMESTAMP:
+ {
+ Timestamp timestamp;
+ struct pg_tm tm;
+ fsec_t fsec;
+ char buf[MAXDATELEN + 1];
+
+ timestamp = DatumGetTimestamp(val);
+
+ /* XSD doesn't support infinite values */
+ if (TIMESTAMP_NOT_FINITE(timestamp))
+ ereport(ERROR,
+ (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+ errmsg("timestamp out of range"),
+ errdetail("JSON does not support infinite timestamp values.")));
+ else if (timestamp2tm(timestamp, NULL, &tm, &fsec, NULL, NULL) == 0)
+ EncodeDateTime(&tm, fsec, false, 0, NULL, USE_XSD_DATES, buf);
+ else
+ ereport(ERROR,
+ (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+ errmsg("timestamp out of range")));
+
+ jb.type = jbvString;
+ jb.val.string.len = strlen(buf);
+ jb.val.string.val = pstrdup(buf);
+ }
+ break;
+ case JSONBTYPE_TIMESTAMPTZ:
+ {
+ TimestampTz timestamp;
+ struct pg_tm tm;
+ int tz;
+ fsec_t fsec;
+ const char *tzn = NULL;
+ char buf[MAXDATELEN + 1];
+
+ timestamp = DatumGetTimestamp(val);
+
+ /* XSD doesn't support infinite values */
+ if (TIMESTAMP_NOT_FINITE(timestamp))
+ ereport(ERROR,
+ (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+ errmsg("timestamp out of range"),
+ errdetail("JSON does not support infinite timestamp values.")));
+ else if (timestamp2tm(timestamp, &tz, &tm, &fsec, &tzn, NULL) == 0)
+ EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf);
+ else
+ ereport(ERROR,
+ (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+ errmsg("timestamp out of range")));
+
+ jb.type = jbvString;
+ jb.val.string.len = strlen(buf);
+ jb.val.string.val = pstrdup(buf);
+ }
+ break;
+ case JSONBTYPE_JSONCAST:
+ case JSONBTYPE_JSON:
+ {
+ /* parse the json right into the existing result object */
+ JsonLexContext *lex;
+ JsonSemAction sem;
+ text *json = DatumGetTextP(val);
+
+ lex = makeJsonLexContext(json, true);
+
+ sem.semstate = (void *) result;
+
+ sem.object_start = jsonb_in_object_start;
+ sem.array_start = jsonb_in_array_start;
+ sem.object_end = jsonb_in_object_end;
+ sem.array_end = jsonb_in_array_end;
+ sem.scalar = jsonb_in_scalar;
+ sem.object_field_start = jsonb_in_object_field_start;
+
+ pg_parse_json(lex, &sem);
+
+ }
+ break;
+ case JSONBTYPE_JSONBCAST:
+ case JSONBTYPE_JSONB:
+ {
+ Jsonb *jsonb = DatumGetJsonb(val);
+ int type;
+ JsonbIterator *it;
+
+ it = JsonbIteratorInit(&jsonb->root);
+
+ if (JB_ROOT_IS_SCALAR(jsonb))
+ {
+ (void) JsonbIteratorNext(&it, &jb, true);
+ Assert(jb.type == jbvArray);
+ (void) JsonbIteratorNext(&it, &jb, true);
+ scalar_jsonb = true;
+ }
+ else
+ {
+ while ((type = JsonbIteratorNext(&it, &jb, false))
+ != WJB_DONE)
+ {
+ if (type == WJB_END_ARRAY || type == WJB_END_OBJECT ||
+ type == WJB_BEGIN_ARRAY || type == WJB_BEGIN_OBJECT)
+ result->res = pushJsonbValue(&result->parseState,
+ type, NULL);
+ else
+ result->res = pushJsonbValue(&result->parseState,
+ type, &jb);
+ }
+ }
+ }
+ break;
+ default:
+ outputstr = OidOutputFunctionCall(outfuncoid, val);
+ if (key_scalar && *outputstr == '\0')
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("key value must not be empty")));
+ jb.type = jbvString;
+ jb.val.string.len = checkStringLen(strlen(outputstr));
+ jb.val.string.val = outputstr;
+ break;
+ }
+ }
+ if (tcategory >= JSONBTYPE_JSON && tcategory <= JSONBTYPE_JSONBCAST &&
+ ! scalar_jsonb)
+ {
+ /* work has been done recursively */
+ return;
+ }
+ else if (result->parseState == NULL)
+ {
+ /* single root scalar */
+ JsonbValue va;
+
+ va.type = jbvArray;
+ va.val.array.rawScalar = true;
+ va.val.array.nElems = 1;
+
+ result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, &va);
+ result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+ result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+ }
+ else
+ {
+ JsonbValue *o = &result->parseState->contVal;
+
+ switch (o->type)
+ {
+ case jbvArray:
+ result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+ break;
+ case jbvObject:
+ result->res = pushJsonbValue(&result->parseState,
+ key_scalar ? WJB_KEY : WJB_VALUE,
+ &jb);
+ break;
+ default:
+ elog(ERROR, "unexpected parent of nested structure");
+ }
+ }
+}
+
+/*
+ * Process a single dimension of an array.
+ * If it's the innermost dimension, output the values, otherwise call
+ * ourselves recursively to process the next dimension.
+ */
+static void
+array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims, Datum *vals,
+ bool *nulls, int *valcount, JsonbTypeCategory tcategory,
+ Oid outfuncoid)
+{
+ int i;
+
+ Assert(dim < ndims);
+
+ result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+
+ for (i = 1; i <= dims[dim]; i++)
+ {
+ if (dim + 1 == ndims)
+ {
+ datum_to_jsonb(vals[*valcount], nulls[*valcount], result, tcategory,
+ outfuncoid, false);
+ (*valcount)++;
+ }
+ else
+ {
+ array_dim_to_jsonb(result, dim + 1, ndims, dims, vals, nulls,
+ valcount, tcategory, outfuncoid);
+ }
+ }
+
+ result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+}
+
+/*
+ * Turn an array into JSON.
+ */
+static void
+array_to_jsonb_internal(Datum array, JsonbInState *result)
+{
+ ArrayType *v = DatumGetArrayTypeP(array);
+ Oid element_type = ARR_ELEMTYPE(v);
+ int *dim;
+ int ndim;
+ int nitems;
+ int count = 0;
+ Datum *elements;
+ bool *nulls;
+ int16 typlen;
+ bool typbyval;
+ char typalign;
+ JsonbTypeCategory tcategory;
+ Oid outfuncoid;
+
+ ndim = ARR_NDIM(v);
+ dim = ARR_DIMS(v);
+ nitems = ArrayGetNItems(ndim, dim);
+
+ if (nitems <= 0)
+ {
+ result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+ result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+ return;
+ }
+
+ get_typlenbyvalalign(element_type,
+ &typlen, &typbyval, &typalign);
+
+ jsonb_categorize_type(element_type,
+ &tcategory, &outfuncoid);
+
+ deconstruct_array(v, element_type, typlen, typbyval,
+ typalign, &elements, &nulls,
+ &nitems);
+
+ array_dim_to_jsonb(result, 0, ndim, dim, elements, nulls, &count, tcategory,
+ outfuncoid);
+
+ pfree(elements);
+ pfree(nulls);
+}
+
+/*
+ * Turn a composite / record into JSON.
+ */
+static void
+composite_to_jsonb(Datum composite, JsonbInState *result)
+{
+ HeapTupleHeader td;
+ Oid tupType;
+ int32 tupTypmod;
+ TupleDesc tupdesc;
+ HeapTupleData tmptup,
+ *tuple;
+ int i;
+
+ td = DatumGetHeapTupleHeader(composite);
+
+ /* Extract rowtype info and find a tupdesc */
+ tupType = HeapTupleHeaderGetTypeId(td);
+ tupTypmod = HeapTupleHeaderGetTypMod(td);
+ tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+
+ /* Build a temporary HeapTuple control structure */
+ tmptup.t_len = HeapTupleHeaderGetDatumLength(td);
+ tmptup.t_data = td;
+ tuple = &tmptup;
+
+ result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_OBJECT, NULL);
+
+ for (i = 0; i < tupdesc->natts; i++)
+ {
+ Datum val;
+ bool isnull;
+ char *attname;
+ JsonbTypeCategory tcategory;
+ Oid outfuncoid;
+ JsonbValue v;
+
+ if (tupdesc->attrs[i]->attisdropped)
+ continue;
+
+ attname = NameStr(tupdesc->attrs[i]->attname);
+
+ v.type = jbvString;
+ /* don't need checkStringLen here - can't exceed maximum name length */
+ v.val.string.len = strlen(attname);
+ v.val.string.val = attname;
+
+ result->res = pushJsonbValue(&result->parseState, WJB_KEY, &v);
+
+ val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+ if (isnull)
+ {
+ tcategory = JSONBTYPE_NULL;
+ outfuncoid = InvalidOid;
+ }
+ else
+ jsonb_categorize_type(tupdesc->attrs[i]->atttypid,
+ &tcategory, &outfuncoid);
+
+ datum_to_jsonb(val, isnull, result, tcategory, outfuncoid, false);
+ }
+
+ result->res = pushJsonbValue(&result->parseState, WJB_END_OBJECT, NULL);
+ ReleaseTupleDesc(tupdesc);
+}
+
+/*
+ * Append JSON text for "val" to "result".
+ *
+ * This is just a thin wrapper around datum_to_json. If the same type will be
+ * printed many times, avoid using this; better to do the json_categorize_type
+ * lookups only once.
+ */
+
+static void
+add_jsonb(Datum val, bool is_null, JsonbInState *result,
+ Oid val_type, bool key_scalar)
+{
+ JsonbTypeCategory tcategory;
+ Oid outfuncoid;
+
+ if (val_type == InvalidOid)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("could not determine input data type")));
+
+ if (is_null)
+ {
+ tcategory = JSONBTYPE_NULL;
+ outfuncoid = InvalidOid;
+ }
+ else
+ jsonb_categorize_type(val_type,
+ &tcategory, &outfuncoid);
+
+ datum_to_jsonb(val, is_null, result, tcategory, outfuncoid, key_scalar);
+}
+
+/*
+ * SQL function to_jsonb(anyvalue)
+ */
+Datum
+to_jsonb(PG_FUNCTION_ARGS)
+{
+ Datum val = PG_GETARG_DATUM(0);
+ Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
+ JsonbInState result;
+ JsonbTypeCategory tcategory;
+ Oid outfuncoid;
+
+ if (val_type == InvalidOid)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("could not determine input data type")));
+
+ jsonb_categorize_type(val_type,
+ &tcategory, &outfuncoid);
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ datum_to_jsonb(val, false, &result, tcategory, outfuncoid, false);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_object(variadic "any")
+ */
+Datum
+jsonb_build_object(PG_FUNCTION_ARGS)
+{
+ int nargs = PG_NARGS();
+ int i;
+ Datum arg;
+ Oid val_type;
+ JsonbInState result;
+
+ if (nargs % 2 != 0)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("invalid number or arguments: object must be matched key value pairs")));
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+ for (i = 0; i < nargs; i += 2)
+ {
+
+ /* process key */
+
+ if (PG_ARGISNULL(i))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("arg %d: key cannot be null", i + 1)));
+ val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+
+ /*
+ * turn a constant (more or less literal) value that's of unknown type
+ * into text. Unknowns come in as a cstring pointer.
+ */
+ if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+ {
+ val_type = TEXTOID;
+ if (PG_ARGISNULL(i))
+ arg = (Datum) 0;
+ else
+ arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+ }
+ else
+ {
+ arg = PG_GETARG_DATUM(i);
+ }
+ if (val_type == InvalidOid || val_type == UNKNOWNOID)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("arg %d: could not determine data type", i + 1)));
+
+ add_jsonb(arg, false, &result, val_type, true);
+
+ /* process value */
+
+ val_type = get_fn_expr_argtype(fcinfo->flinfo, i + 1);
+ /* see comments above */
+ if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i + 1))
+ {
+ val_type = TEXTOID;
+ if (PG_ARGISNULL(i + 1))
+ arg = (Datum) 0;
+ else
+ arg = CStringGetTextDatum(PG_GETARG_POINTER(i + 1));
+ }
+ else
+ {
+ arg = PG_GETARG_DATUM(i + 1);
+ }
+ if (val_type == InvalidOid || val_type == UNKNOWNOID)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("arg %d: could not determine data type", i + 2)));
+ add_jsonb(arg, PG_ARGISNULL(i + 1), &result, val_type, false);
+
+ }
+
+ result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_object where it gets 0 arguments.
+ */
+Datum
+jsonb_build_object_noargs(PG_FUNCTION_ARGS)
+{
+ JsonbInState result;
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+ result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_array(variadic "any")
+ */
+Datum
+jsonb_build_array(PG_FUNCTION_ARGS)
+{
+ int nargs = PG_NARGS();
+ int i;
+ Datum arg;
+ Oid val_type;
+ JsonbInState result;
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+
+ for (i = 0; i < nargs; i++)
+ {
+ val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+ arg = PG_GETARG_DATUM(i + 1);
+ /* see comments in jsonb_build_object above */
+ if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+ {
+ val_type = TEXTOID;
+ if (PG_ARGISNULL(i))
+ arg = (Datum) 0;
+ else
+ arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+ }
+ else
+ {
+ arg = PG_GETARG_DATUM(i);
+ }
+ if (val_type == InvalidOid || val_type == UNKNOWNOID)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("arg %d: could not determine data type", i + 1)));
+ add_jsonb(arg, PG_ARGISNULL(i), &result, val_type, false);
+ }
+
+ result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_array where it gets 0 arguments.
+ */
+Datum
+jsonb_build_array_noargs(PG_FUNCTION_ARGS)
+{
+ JsonbInState result;
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+ result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * SQL function jsonb_object(text[])
+ *
+ * take a one or two dimensional array of text as name value pairs
+ * for a json object.
+ *
+ */
+Datum
+jsonb_object(PG_FUNCTION_ARGS)
+{
+ ArrayType *in_array = PG_GETARG_ARRAYTYPE_P(0);
+ int ndims = ARR_NDIM(in_array);
+ Datum *in_datums;
+ bool *in_nulls;
+ int in_count,
+ count,
+ i;
+ JsonbInState result;
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+ switch (ndims)
+ {
+ case 0:
+ goto close_object;
+ break;
+
+ case 1:
+ if ((ARR_DIMS(in_array)[0]) % 2)
+ ereport(ERROR,
+ (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+ errmsg("array must have even number of elements")));
+ break;
+
+ case 2:
+ if ((ARR_DIMS(in_array)[1]) != 2)
+ ereport(ERROR,
+ (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+ errmsg("array must have two columns")));
+ break;
+
+ default:
+ ereport(ERROR,
+ (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+ errmsg("wrong number of array subscripts")));
+ }
+
+ deconstruct_array(in_array,
+ TEXTOID, -1, false, 'i',
+ &in_datums, &in_nulls, &in_count);
+
+ count = in_count / 2;
+
+ for (i = 0; i < count; ++i)
+ {
+ JsonbValue v;
+ char *str;
+ int len;
+
+ if (in_nulls[i * 2])
+ ereport(ERROR,
+ (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+ errmsg("null value not allowed for object key")));
+
+ str = TextDatumGetCString(in_datums[i*2]);
+ len = strlen(str);
+
+ v.type = jbvString;
+
+ v.val.string.len = len;
+ v.val.string.val = str;
+
+ result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+ if (in_nulls[i*2 + 1])
+ {
+ v.type = jbvNull;
+ }
+ else
+ {
+ str = TextDatumGetCString(in_datums[i*2 + 1]);
+ len = strlen(str);
+
+ v.type = jbvString;
+
+ v.val.string.len = len;
+ v.val.string.val = str;
+ }
+
+ result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+ }
+
+ pfree(in_datums);
+ pfree(in_nulls);
+
+close_object:
+ result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_object(text[], text[])
+ *
+ * take separate name and value arrays of text to construct a json object
+ * pairwise.
+ */
+Datum
+jsonb_object_two_arg(PG_FUNCTION_ARGS)
+{
+ ArrayType *key_array = PG_GETARG_ARRAYTYPE_P(0);
+ ArrayType *val_array = PG_GETARG_ARRAYTYPE_P(1);
+ int nkdims = ARR_NDIM(key_array);
+ int nvdims = ARR_NDIM(val_array);
+ Datum *key_datums,
+ *val_datums;
+ bool *key_nulls,
+ *val_nulls;
+ int key_count,
+ val_count,
+ i;
+ JsonbInState result;
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+ if (nkdims > 1 || nkdims != nvdims)
+ ereport(ERROR,
+ (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+ errmsg("wrong number of array subscripts")));
+
+ if (nkdims == 0)
+ PG_RETURN_DATUM(CStringGetTextDatum("{}"));
+
+ deconstruct_array(key_array,
+ TEXTOID, -1, false, 'i',
+ &key_datums, &key_nulls, &key_count);
+
+ deconstruct_array(val_array,
+ TEXTOID, -1, false, 'i',
+ &val_datums, &val_nulls, &val_count);
+
+ if (key_count != val_count)
+ ereport(ERROR,
+ (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+ errmsg("mismatched array dimensions")));
+
+ for (i = 0; i < key_count; ++i)
+ {
+ JsonbValue v;
+ char *str;
+ int len;
+
+ if (key_nulls[i])
+ ereport(ERROR,
+ (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+ errmsg("null value not allowed for object key")));
+
+ str = TextDatumGetCString(key_datums[i]);
+ len = strlen(str);
+
+ v.type = jbvString;
+
+ v.val.string.len = len;
+ v.val.string.val = str;
+
+ result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+ if (val_nulls[i])
+ {
+ v.type = jbvNull;
+ }
+ else
+ {
+ str = TextDatumGetCString(val_datums[i]);
+ len = strlen(str);
+
+ v.type = jbvString;
+
+ v.val.string.len = len;
+ v.val.string.val = str;
+ }
+
+ result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+ }
+
+ result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+ pfree(key_datums);
+ pfree(key_nulls);
+ pfree(val_datums);
+ pfree(val_nulls);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * jsonb_agg aggregate function
+ */
+Datum
+jsonb_agg_transfn(PG_FUNCTION_ARGS)
+{
+ Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+ MemoryContext oldcontext, aggcontext;
+ JsonbInState elem;
+ JsonbTypeCategory tcategory;
+ Oid outfuncoid;
+ Datum val;
+ JsonbInState *result;
+ bool single_scalar = false;
+ JsonbIterator *it;
+ Jsonb *jbelem;
+ JsonbValue v;
+ int type;
+
+ if (val_type == InvalidOid)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("could not determine input data type")));
+
+ if (!AggCheckCallContext(fcinfo, &aggcontext))
+ {
+ /* cannot be called directly because of internal-type argument */
+ elog(ERROR, "json_agg_transfn called in non-aggregate context");
+ }
+
+ /* turn the argument into jsonb in the normal function context */
+
+ val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+ jsonb_categorize_type(val_type,
+ &tcategory, &outfuncoid);
+
+ memset(&elem, 0, sizeof(JsonbInState));
+
+ datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+ jbelem = JsonbValueToJsonb(elem.res);
+
+ /* switch to the aggregate context for accumulation operations */
+
+ oldcontext = MemoryContextSwitchTo(aggcontext);
+
+ /* set up the accumulator on the first go round */
+
+ if (PG_ARGISNULL(0))
+ {
+ result = palloc0(sizeof(JsonbInState));
+ result->res = pushJsonbValue(&result->parseState,
+ WJB_BEGIN_ARRAY, NULL);
+
+ }
+ else
+ {
+ result = (JsonbInState *) PG_GETARG_POINTER(0);
+ }
+
+ it = JsonbIteratorInit(&jbelem->root);
+
+ while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+ {
+ switch (type)
+ {
+ case WJB_BEGIN_ARRAY:
+ if (v.val.array.rawScalar)
+ single_scalar = true;
+ else
+ result->res = pushJsonbValue(&result->parseState,
+ type, NULL);
+ break;
+ case WJB_END_ARRAY:
+ if (! single_scalar )
+ result->res = pushJsonbValue(&result->parseState,
+ type, NULL);
+ break;
+ case WJB_BEGIN_OBJECT:
+ case WJB_END_OBJECT:
+ result->res = pushJsonbValue(&result->parseState,
+ type, NULL);
+ break;
+ case WJB_ELEM:
+ case WJB_KEY:
+ case WJB_VALUE:
+ if (v.type == jbvString)
+ {
+ /* copy string values in the aggreagate context */
+ char *buf = palloc(v.val.string.len + 1);;
+ snprintf(buf, v.val.string.len +1, "%s", v.val.string.val);
+ v.val.string.val = buf;
+ }
+ else if (v.type == jbvNumeric)
+ {
+ /* same for numeric */
+ v.val.numeric = DirectFunctionCall1(numeric_uplus,NumericGetDatum(v.val.numeric));
+
+ }
+ result->res = pushJsonbValue(&result->parseState,
+ type, &v);
+ break;
+ }
+ }
+
+ MemoryContextSwitchTo(oldcontext);
+
+ PG_RETURN_POINTER(result);
+}
+
+Datum
+jsonb_agg_finalfn(PG_FUNCTION_ARGS)
+{
+ JsonbInState *result;
+ Jsonb *out;
+
+ /* cannot be called directly because of internal-type argument */
+ Assert(AggCheckCallContext(fcinfo, NULL));
+
+ if (PG_ARGISNULL(0))
+ PG_RETURN_NULL(); /* returns null iff no input values */
+
+ result = (JsonbInState *) PG_GETARG_POINTER(0);
+
+ result->res = pushJsonbValue(&result->parseState,
+ WJB_END_ARRAY, NULL);
+
+
+ out = JsonbValueToJsonb(result->res);
+
+ PG_RETURN_POINTER(out);
+}
diff --git a/src/backend/utils/adt/jsonb_util.c b/src/backend/utils/adt/jsonb_util.c
index 04f35bf..d546fd1 100644
--- a/src/backend/utils/adt/jsonb_util.c
+++ b/src/backend/utils/adt/jsonb_util.c
@@ -1328,7 +1328,7 @@ convertJsonbValue(StringInfo buffer, JEntry *header, JsonbValue *val, int level)
else if (val->type == jbvObject)
convertJsonbObject(buffer, header, val, level);
else
- elog(ERROR, "unknown type of jsonb container");
+ elog(ERROR, "unknown type of jsonb container to convert");
}
static void
diff --git a/src/include/catalog/pg_aggregate.h b/src/include/catalog/pg_aggregate.h
index 3ba9e5e..1ca0158 100644
--- a/src/include/catalog/pg_aggregate.h
+++ b/src/include/catalog/pg_aggregate.h
@@ -286,6 +286,9 @@ DATA(insert ( 3545 n 0 bytea_string_agg_transfn bytea_string_agg_finalfn - -
DATA(insert ( 3175 n 0 json_agg_transfn json_agg_finalfn - - - f f 0 2281 0 0 0 _null_ _null_ ));
DATA(insert ( 3197 n 0 json_object_agg_transfn json_object_agg_finalfn - - - f f 0 2281 0 0 0 _null_ _null_ ));
+/* jsonb */
+DATA(insert ( 3267 n 0 jsonb_agg_transfn jsonb_agg_finalfn - - - f f 0 2281 0 0 0 _null_ _null_ ));
+
/* ordered-set and hypothetical-set aggregates */
DATA(insert ( 3972 o 1 ordered_set_transition percentile_disc_final - - - t f 0 2281 0 0 0 _null_ _null_ ));
DATA(insert ( 3974 o 1 ordered_set_transition percentile_cont_float8_final - - - f f 0 2281 0 0 0 _null_ _null_ ));
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index d30d21a..923c1ee 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4595,6 +4595,27 @@ DESCR("I/O");
DATA(insert OID = 3803 ( jsonb_send PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 17 "3802" _null_ _null_ _null_ _null_ jsonb_send _null_ _null_ _null_ ));
DESCR("I/O");
+DATA(insert OID = 3263 ( jsonb_object PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 3802 "1009" _null_ _null_ _null_ _null_ jsonb_object _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3264 ( jsonb_object PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 3802 "1009 1009" _null_ _null_ _null_ _null_ jsonb_object_two_arg _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3787 ( to_jsonb PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 3802 "2283" _null_ _null_ _null_ _null_ to_jsonb _null_ _null_ _null_ ));
+DESCR("map input to jsonb");
+DATA(insert OID = 3265 ( jsonb_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ jsonb_agg_transfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate transition function");
+DATA(insert OID = 3266 ( jsonb_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_agg_finalfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate final function");
+DATA(insert OID = 3267 ( jsonb_agg PGNSP PGUID 12 1 0 0 0 t f f f f f i 1 0 3802 "2283" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate input into jsonb");
+DATA(insert OID = 3259 ( jsonb_build_array PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_array _null_ _null_ _null_ ));
+DESCR("build a jsonb array from any inputs");
+DATA(insert OID = 3260 ( jsonb_build_array PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802 "" _null_ _null_ _null_ _null_ jsonb_build_array_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb array");
+DATA(insert OID = 3261 ( jsonb_build_object PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_object _null_ _null_ _null_ ));
+DESCR("build a jsonb object from pairwise key/value inputs");
+DATA(insert OID = 3262 ( jsonb_build_object PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802 "" _null_ _null_ _null_ _null_ jsonb_build_object_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb object");
+
DATA(insert OID = 3478 ( jsonb_object_field PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field _null_ _null_ _null_ ));
DATA(insert OID = 3214 ( jsonb_object_field_text PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 25 "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field_text _null_ _null_ _null_ ));
DATA(insert OID = 3215 ( jsonb_array_element PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 23" _null_ _null_ "{from_json, element_index}" _null_ jsonb_array_element _null_ _null_ _null_ ));
diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h
index 91e3e14..68312d2 100644
--- a/src/include/utils/jsonb.h
+++ b/src/include/utils/jsonb.h
@@ -315,6 +315,20 @@ extern Datum jsonb_recv(PG_FUNCTION_ARGS);
extern Datum jsonb_send(PG_FUNCTION_ARGS);
extern Datum jsonb_typeof(PG_FUNCTION_ARGS);
+/* generator routines */
+extern Datum to_jsonb(PG_FUNCTION_ARGS);
+
+extern Datum jsonb_build_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_object_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_two_arg(PG_FUNCTION_ARGS);
+
+/* jsonb_agg functions */
+extern Datum jsonb_agg_transfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_agg_finalfn(PG_FUNCTION_ARGS);
+
/* Indexing-related ops */
extern Datum jsonb_exists(PG_FUNCTION_ARGS);
extern Datum jsonb_exists_any(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
index ed266d5..81e0f74 100644
--- a/src/test/regress/sql/jsonb.sql
+++ b/src/test/regress/sql/jsonb.sql
@@ -256,6 +256,86 @@ SELECT jsonb_typeof('"hello"') AS string;
SELECT jsonb_typeof('"true"') AS string;
SELECT jsonb_typeof('"1.0"') AS string;
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object(
+ 'a', jsonb_build_object('b',false,'c',99),
+ 'd', jsonb_build_object('e',array[9,8,7]::int[],
+ 'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+
+SELECT jsonb_build_object();
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+
+SELECT jsonb_build_object('turbines',json_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+
+-- jsonb_object
+
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+
+--two argument form of jsonb_object
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- mismatched dimensions
+
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+
+-- null key error
+
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+
+-- empty key is allowed
+
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+
+
+
-- extract_path, extract_path_as_text
SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
Hi
I am working on review of this patch.
There is new warnings:
jsonb.c: In function ‘jsonb_agg_transfn’:
jsonb.c:1540:20: warning: assignment makes pointer from integer without a
cast
v.val.numeric = DirectFunctionCall1(numeric_uplus,
NumericGetDatum(v.val.numeric));
^
jsonb.c: In function ‘jsonb_object_agg_transfn’:
jsonb.c:1745:20: warning: assignment makes pointer from integer without a
cast
v.val.numeric = DirectFunctionCall1(numeric_uplus,
NumericGetDatum(v.val.numeric));
[pavel@localhost postgresql]$ gcc --version
gcc (GCC) 4.9.1 20140930 (Red Hat 4.9.1-11)
Check fails
parallel group (19 tests): alter_table plancache temp domain prepare limit
plpgsql conversion sequence copy2 rangefuncs returning truncate xml with
without_oid largeobject polymorphism rowtypes
plancache ... FAILED (test process exited with exit
code 2)
limit ... FAILED (test process exited with exit
code 2)
plpgsql ... FAILED (test process exited with exit
code 2)
copy2 ... FAILED (test process exited with exit
code 2)
temp ... FAILED (test process exited with exit
code 2)
domain ... FAILED (test process exited with exit
code 2)
rangefuncs ... FAILED (test process exited with exit
code 2)
prepare ... FAILED (test process exited with exit
code 2)
without_oid ... FAILED (test process exited with exit
code 2)
conversion ... FAILED (test process exited with exit
code 2)
truncate ... FAILED (test process exited with exit
code 2)
alter_table ... FAILED (test process exited with exit
code 2)
sequence ... FAILED (test process exited with exit
code 2)
polymorphism ... FAILED (test process exited with exit
code 2)
rowtypes ... FAILED (test process exited with exit
code 2)
returning ... FAILED (test process exited with exit
code 2)
largeobject ... FAILED (test process exited with exit
code 2)
with ... FAILED (test process exited with exit
code 2)
xml ... FAILED (test process exited with exit
code 2)
test stats ... FAILED (test process exited with exit
code 2)
[pavel@localhost postgresql]$ uname -a
Linux localhost.localdomain 3.16.3-302.fc21.x86_64 #1 SMP Fri Sep 26
14:27:20 UTC 2014 x86_64 x86_64 x86_64 GNU/Linux
backtrace
Core was generated by `postgres: pavel regression [local]
SELECT '.
Program terminated with signal SIGSEGV, Segmentation fault.
(gdb) bt
#0 0x0000000001e95300 in ?? ()
#1 0x00000000007c048b in parse_object_field (lex=0x1ede9d8,
sem=0x7fff3c3c4660) at json.c:398
#2 0x00000000007c0524 in parse_object (lex=0x1ede9d8, sem=0x7fff3c3c4660)
at json.c:430
#3 0x00000000007c0214 in pg_parse_json (lex=0x1ede9d8, sem=0x7fff3c3c4660)
at json.c:297
#4 0x00000000007c5d91 in datum_to_jsonb (val=32118224, is_null=0 '\000',
result=0x7fff3c3c4800, tcategory=JSONBTYPE_JSON,
outfuncoid=322, key_scalar=0 '\000') at jsonb.c:789
#5 0x00000000007c68be in add_jsonb (val=32118224, is_null=0 '\000',
result=0x7fff3c3c4800, val_type=114, key_scalar=0 '\000')
at jsonb.c:1050
#6 0x00000000007c6d08 in jsonb_build_object (fcinfo=0x1edcb80) at
jsonb.c:1155
#7 0x000000000060bfc5 in ExecMakeFunctionResultNoSets (fcache=0x1edcb10,
econtext=0x1edc920, isNull=0x1edd568 "", isDone=0x1edd680)
at execQual.c:1992
#8 0x000000000060c8bc in ExecEvalFunc (fcache=0x1edcb10,
econtext=0x1edc920, isNull=0x1edd568 "", isDone=0x1edd680)
at execQual.c:2383
#9 0x0000000000612869 in ExecTargetList (targetlist=0x1edd650,
econtext=0x1edc920, values=0x1edd550, isnull=0x1edd568 "",
itemIsDone=0x1edd680, isDone=0x7fff3c3c4a84) at execQual.c:5265
#10 0x0000000000612e9d in ExecProject (projInfo=0x1edd580,
isDone=0x7fff3c3c4a84) at execQual.c:5480
#11 0x000000000062c046 in ExecResult (node=0x1edc810) at nodeResult.c:155
#12 0x0000000000608997 in ExecProcNode (node=0x1edc810) at
execProcnode.c:373
#13 0x000000000060696e in ExecutePlan (estate=0x1edc700,
planstate=0x1edc810, operation=CMD_SELECT, sendTuples=1 '\001',
numberTuples=0, direction=ForwardScanDirection, dest=0x1ea18b0) at
execMain.c:1481
#14 0x0000000000604de8 in standard_ExecutorRun (queryDesc=0x1edc2f0,
direction=ForwardScanDirection, count=0) at execMain.c:308
#15 0x0000000000604ce5 in ExecutorRun (queryDesc=0x1edc2f0,
direction=ForwardScanDirection, count=0) at execMain.c:256
#16 0x000000000075615a in PortalRunSelect (portal=0x1eda2e0, forward=1
'\001', count=0, dest=0x1ea18b0) at pquery.c:946
#17 0x0000000000755e34 in PortalRun (portal=0x1eda2e0,
count=9223372036854775807, isTopLevel=1 '\001', dest=0x1ea18b0,
altdest=0x1ea18b0, completionTag=0x7fff3c3c4dc0 "") at pquery.c:790
#18 0x00000000007502c2 in exec_simple_query (
query_string=0x1ea0050 "SELECT jsonb_build_object('e',json '{\"x\": 3,
\"y\": [1,2,3]}');") at postgres.c:1045
#19 0x0000000000754284 in PostgresMain (argc=1, argv=0x1e45448,
dbname=0x1e452f8 "postgres", username=0x1e452e0 "pavel")
at postgres.c:4010
#20 0x00000000006e7954 in BackendRun (port=0x1e64d20) at postmaster.c:4118
#21 0x00000000006e70ac in BackendStartup (port=0x1e64d20) at
postmaster.c:3793
#22 0x00000000006e3ba9 in ServerLoop () at postmaster.c:1572
#23 0x00000000006e327c in PostmasterMain (argc=3, argv=0x1e44540) at
postmaster.c:1219
Problematic statement:
SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3,
"y": [1,2,3]}');
or SELECT jsonb_build_object('e',json '{"x": 3, "y": [1,2,3]}');
Tested on HEAD
Regards
Pavel
2014-09-24 2:20 GMT+02:00 Andrew Dunstan <andrew@dunslane.net>:
Show quoted text
On 09/23/2014 12:23 PM, Dmitry Dolgov wrote:
Hi all,
I'm faced with some troubles about the jsonb implementation, and I hope
I'll get little advice =)
If I understand correctly, an abstract function for jsonb modification
should have the following stages:Jsonb -> JsonbValue -> Modification -> JsonbValue -> Jsonb
One can convert the *JsonbValue* to the *Jsonb* only by
*JsonbValueToJsonb* function. So, my question is can be *JsonbValue*, that
contains few *jbvBinary* elements, converted to *Jsonb* by this function?
It will be very useful, if you want modify only small part of your
JsonbValue (e.g. replace value of some key). But when I'm trying to do
this, an exception "unknown type of jsonb container" appears. Maybe I
missed something? Or is there another approach to do this conversion?If you can come up with a way of handling the jbvBinary values then by all
means send a patch.But this problem is fairly easily worked around by using an iterator over
the binary value. The attached patch, which is work in progress for adding
in the currently missing json functions for jsonb, contains a sort of
example of doing this in jsonb_agg_transfn.cheers
andrew
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On 10/13/2014 06:41 AM, Pavel Stehule wrote:
Hi
I am working on review of this patch.
The patch attached to the message you are replying to was never intended
to be reviewed. It was only given by way of illustration of a technique.
The original patch to be reviewed is on the message
</messages/by-id/5425D277.4030804@dunslane.net> as
shown on the commitfest app. I have just submitted a revised patch to
fix the compiler warnings you complained of, at
</messages/by-id/543BD598.4020809@dunslane.net> I
have not found any segfaults in the regression tests.
And please don't top-post on the PostgreSQL lists.
cheers
andrew
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
2014-10-13 15:45 GMT+02:00 Andrew Dunstan <andrew@dunslane.net>:
On 10/13/2014 06:41 AM, Pavel Stehule wrote:
Hi
I am working on review of this patch.
The patch attached to the message you are replying to was never intended
to be reviewed. It was only given by way of illustration of a technique.The original patch to be reviewed is on the message <
/messages/by-id/5425D277.4030804@dunslane.net> as
shown on the commitfest app. I have just submitted a revised patch to fix
the compiler warnings you complained of, at <http://www.postgresql.org/
message-id/543BD598.4020809@dunslane.net> I have not found any segfaults
in the regression tests.
I checked this last version - warning is out, but SIGFAULT on jsonb test is
there .. I rechecked it with clang compiler, but result is same
I try to search why
And please don't top-post on the PostgreSQL lists.
I am sorry
Regards
Pavel
Show quoted text
cheers
andrew
Pavel Stehule <pavel.stehule@gmail.com> writes:
I checked this last version - warning is out, but SIGFAULT on jsonb test is
there .. I rechecked it with clang compiler, but result is same
Stack trace please?
regards, tom lane
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
2014-10-13 16:19 GMT+02:00 Tom Lane <tgl@sss.pgh.pa.us>:
Pavel Stehule <pavel.stehule@gmail.com> writes:
I checked this last version - warning is out, but SIGFAULT on jsonb test
is
there .. I rechecked it with clang compiler, but result is same
Stack trace please?
(gdb) bt
#0 0x0000000000000072 in ?? ()
#1 0x000000000087d598 in parse_array_element (lex=0x2880118,
sem=0x7fffb4f02508) at json.c:461
#2 0x0000000000878da7 in parse_array (lex=0x2880118, sem=0x7fffb4f02508)
at json.c:505
#3 0x000000000087d837 in parse_object_field (lex=0x2880118,
sem=0x7fffb4f02508) at json.c:391
#4 0x0000000000878cb2 in parse_object (lex=0x2880118, sem=0x7fffb4f02508)
at json.c:432
#5 0x000000000087831c in pg_parse_json (lex=0x2880118, sem=0x7fffb4f02508)
at json.c:297
#6 0x000000000087f484 in datum_to_jsonb (val=42202912, is_null=0 '\000',
result=0x7fffb4f02800,
tcategory=JSONBTYPE_JSON, outfuncoid=322, key_scalar=0 '\000') at
jsonb.c:789
#7 0x000000000087fce7 in add_jsonb (val=42202912, is_null=0 '\000',
result=0x7fffb4f02800, val_type=114,
key_scalar=0 '\000') at jsonb.c:1050
#8 0x000000000087fbcc in jsonb_build_object (fcinfo=0x287e2c0) at
jsonb.c:1155
#9 0x000000000066d179 in ExecMakeFunctionResultNoSets (fcache=0x287e250,
econtext=0x287e060, isNull=0x287eca8 "",
isDone=0x287edc0) at execQual.c:1992
#10 0x000000000066776f in ExecEvalFunc (fcache=0x287e250,
econtext=0x287e060, isNull=0x287eca8 "", isDone=0x287edc0)
at execQual.c:2383
#11 0x000000000066c3bb in ExecTargetList (targetlist=0x287ed90,
econtext=0x287e060, values=0x287ec90,
isnull=0x287eca8 "", itemIsDone=0x287edc0, isDone=0x7fffb4f02aac) at
execQual.c:5265
#12 0x000000000066c2c2 in ExecProject (projInfo=0x287ecc0,
isDone=0x7fffb4f02aac) at execQual.c:5480
#13 0x0000000000689ceb in ExecResult (node=0x287df50) at nodeResult.c:155
#14 0x0000000000661987 in ExecProcNode (node=0x287df50) at
execProcnode.c:373
#15 0x000000000065dd46 in ExecutePlan (estate=0x287de40,
planstate=0x287df50, operation=CMD_SELECT,
sendTuples=1 '\001', numberTuples=0, direction=ForwardScanDirection,
dest=0x283fa00) at execMain.c:1481
#16 0x000000000065dc70 in standard_ExecutorRun (queryDesc=0x2809d50,
direction=ForwardScanDirection, count=0)
at execMain.c:308
#17 0x000000000065db3f in ExecutorRun (queryDesc=0x2809d50,
direction=ForwardScanDirection, count=0)
at execMain.c:256
#18 0x00000000007ec70c in PortalRunSelect (portal=0x2807bc0, forward=1
'\001', count=0, dest=0x283fa00)
at pquery.c:946
#19 0x00000000007ec229 in PortalRun (portal=0x2807bc0,
count=9223372036854775807, isTopLevel=1 '\001',
dest=0x283fa00, altdest=0x283fa00, completionTag=0x7fffb4f02ec0 "") at
pquery.c:790
#20 0x00000000007e7f7c in exec_simple_query (
query_string=0x283e1a0 "SELECT jsonb_build_object('e',json '{\"x\": 3,
\"y\": [1,2,3]}');") at postgres.c:1045
#21 0x00000000007e72cb in PostgresMain (argc=1, argv=0x27e5838,
dbname=0x27e56e8 "postgres",
---Type <return> to continue, or q <return> to quit---q
username=0x27e56d0 "paveQuit
Regards
Pavel
Show quoted text
regards, tom lane
Hi
A JsonSemAction sem is not well initialized
a array_element_start is not initialized and enforces sigfault on my comp
*** ./utils/adt/jsonb.c.orig 2014-10-13 16:37:00.479708142 +0200
--- ./utils/adt/jsonb.c 2014-10-13 16:36:33.704650644 +0200
***************
*** 786,791 ****
--- 786,793 ----
sem.scalar = jsonb_in_scalar;
sem.object_field_start = jsonb_in_object_field_start;
+ sem.array_element_start = NULL;
+
pg_parse_json(lex, &sem);
}
I am not sure, if this fix is valid, but all tests are passed now
Regards
Pavel
2014-10-13 16:21 GMT+02:00 Pavel Stehule <pavel.stehule@gmail.com>:
Show quoted text
2014-10-13 16:19 GMT+02:00 Tom Lane <tgl@sss.pgh.pa.us>:
Pavel Stehule <pavel.stehule@gmail.com> writes:
I checked this last version - warning is out, but SIGFAULT on jsonb
test is
there .. I rechecked it with clang compiler, but result is same
Stack trace please?
(gdb) bt
#0 0x0000000000000072 in ?? ()
#1 0x000000000087d598 in parse_array_element (lex=0x2880118,
sem=0x7fffb4f02508) at json.c:461
#2 0x0000000000878da7 in parse_array (lex=0x2880118, sem=0x7fffb4f02508)
at json.c:505
#3 0x000000000087d837 in parse_object_field (lex=0x2880118,
sem=0x7fffb4f02508) at json.c:391
#4 0x0000000000878cb2 in parse_object (lex=0x2880118, sem=0x7fffb4f02508)
at json.c:432
#5 0x000000000087831c in pg_parse_json (lex=0x2880118,
sem=0x7fffb4f02508) at json.c:297
#6 0x000000000087f484 in datum_to_jsonb (val=42202912, is_null=0 '\000',
result=0x7fffb4f02800,
tcategory=JSONBTYPE_JSON, outfuncoid=322, key_scalar=0 '\000') at
jsonb.c:789
#7 0x000000000087fce7 in add_jsonb (val=42202912, is_null=0 '\000',
result=0x7fffb4f02800, val_type=114,
key_scalar=0 '\000') at jsonb.c:1050
#8 0x000000000087fbcc in jsonb_build_object (fcinfo=0x287e2c0) at
jsonb.c:1155
#9 0x000000000066d179 in ExecMakeFunctionResultNoSets (fcache=0x287e250,
econtext=0x287e060, isNull=0x287eca8 "",
isDone=0x287edc0) at execQual.c:1992
#10 0x000000000066776f in ExecEvalFunc (fcache=0x287e250,
econtext=0x287e060, isNull=0x287eca8 "", isDone=0x287edc0)
at execQual.c:2383
#11 0x000000000066c3bb in ExecTargetList (targetlist=0x287ed90,
econtext=0x287e060, values=0x287ec90,
isnull=0x287eca8 "", itemIsDone=0x287edc0, isDone=0x7fffb4f02aac) at
execQual.c:5265
#12 0x000000000066c2c2 in ExecProject (projInfo=0x287ecc0,
isDone=0x7fffb4f02aac) at execQual.c:5480
#13 0x0000000000689ceb in ExecResult (node=0x287df50) at nodeResult.c:155
#14 0x0000000000661987 in ExecProcNode (node=0x287df50) at
execProcnode.c:373
#15 0x000000000065dd46 in ExecutePlan (estate=0x287de40,
planstate=0x287df50, operation=CMD_SELECT,
sendTuples=1 '\001', numberTuples=0, direction=ForwardScanDirection,
dest=0x283fa00) at execMain.c:1481
#16 0x000000000065dc70 in standard_ExecutorRun (queryDesc=0x2809d50,
direction=ForwardScanDirection, count=0)
at execMain.c:308
#17 0x000000000065db3f in ExecutorRun (queryDesc=0x2809d50,
direction=ForwardScanDirection, count=0)
at execMain.c:256
#18 0x00000000007ec70c in PortalRunSelect (portal=0x2807bc0, forward=1
'\001', count=0, dest=0x283fa00)
at pquery.c:946
#19 0x00000000007ec229 in PortalRun (portal=0x2807bc0,
count=9223372036854775807, isTopLevel=1 '\001',
dest=0x283fa00, altdest=0x283fa00, completionTag=0x7fffb4f02ec0 "") at
pquery.c:790
#20 0x00000000007e7f7c in exec_simple_query (
query_string=0x283e1a0 "SELECT jsonb_build_object('e',json '{\"x\": 3,
\"y\": [1,2,3]}');") at postgres.c:1045
#21 0x00000000007e72cb in PostgresMain (argc=1, argv=0x27e5838,
dbname=0x27e56e8 "postgres",
---Type <return> to continue, or q <return> to quit---q
username=0x27e56d0 "paveQuitRegards
Pavel
regards, tom lane
2014-10-13 15:45 GMT+02:00 Andrew Dunstan <andrew@dunslane.net>:
On 10/13/2014 06:41 AM, Pavel Stehule wrote:
Hi
I am working on review of this patch.
The patch attached to the message you are replying to was never intended
to be reviewed. It was only given by way of illustration of a technique.The original patch to be reviewed is on the message <
/messages/by-id/5425D277.4030804@dunslane.net> as
shown on the commitfest app. I have just submitted a revised patch to fix
the compiler warnings you complained of, at <http://www.postgresql.org/
message-id/543BD598.4020809@dunslane.net> I have not found any segfaults
in the regression tests.And please don't top-post on the PostgreSQL lists.
Attached small fix of uninitialized local variable
Regards
Pavel
Show quoted text
cheers
andrew
Attachments:
jsonbmissingfuncs3-2.patchtext/x-patch; charset=US-ASCII; name=jsonbmissingfuncs3-2.patchDownload
diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c
new file mode 100644
index 9beebb3..c9b84f8
*** a/src/backend/utils/adt/jsonb.c
--- b/src/backend/utils/adt/jsonb.c
***************
*** 12,22 ****
--- 12,31 ----
*/
#include "postgres.h"
+ #include "miscadmin.h"
+ #include "access/htup_details.h"
+ #include "access/transam.h"
+ #include "catalog/pg_cast.h"
+ #include "catalog/pg_type.h"
#include "libpq/pqformat.h"
#include "utils/builtins.h"
+ #include "utils/datetime.h"
+ #include "utils/lsyscache.h"
#include "utils/json.h"
#include "utils/jsonapi.h"
#include "utils/jsonb.h"
+ #include "utils/syscache.h"
+ #include "utils/typcache.h"
typedef struct JsonbInState
{
*************** typedef struct JsonbInState
*** 24,29 ****
--- 33,55 ----
JsonbValue *res;
} JsonbInState;
+ /* unlike with json categories, we need to treat json and jsonb differently */
+ typedef enum /* type categories for datum_to_jsonb */
+ {
+ JSONBTYPE_NULL, /* null, so we didn't bother to identify */
+ JSONBTYPE_BOOL, /* boolean (built-in types only) */
+ JSONBTYPE_NUMERIC, /* numeric (ditto) */
+ JSONBTYPE_TIMESTAMP, /* we use special formatting for timestamp */
+ JSONBTYPE_TIMESTAMPTZ, /* ... and timestamptz */
+ JSONBTYPE_JSON, /* JSON */
+ JSONBTYPE_JSONB, /* JSONB */
+ JSONBTYPE_ARRAY, /* array */
+ JSONBTYPE_COMPOSITE, /* composite */
+ JSONBTYPE_JSONCAST, /* something with an explicit cast to JSON */
+ JSONBTYPE_JSONBCAST, /* something with an explicit cast to JSONB */
+ JSONBTYPE_OTHER /* all else */
+ } JsonbTypeCategory;
+
static inline Datum jsonb_from_cstring(char *json, int len);
static size_t checkStringLen(size_t len);
static void jsonb_in_object_start(void *pstate);
*************** static void jsonb_in_array_end(void *pst
*** 33,38 ****
--- 59,80 ----
static void jsonb_in_object_field_start(void *pstate, char *fname, bool isnull);
static void jsonb_put_escaped_value(StringInfo out, JsonbValue *scalarVal);
static void jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype);
+ static void jsonb_categorize_type(Oid typoid,
+ JsonbTypeCategory * tcategory,
+ Oid *outfuncoid);
+ static void composite_to_jsonb(Datum composite, JsonbInState *result);
+ static void array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims,
+ Datum *vals, bool *nulls, int *valcount,
+ JsonbTypeCategory tcategory, Oid outfuncoid);
+ static void array_to_jsonb_internal(Datum array, JsonbInState *result);
+ static void jsonb_categorize_type(Oid typoid,
+ JsonbTypeCategory * tcategory,
+ Oid *outfuncoid);
+ static void datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+ JsonbTypeCategory tcategory, Oid outfuncoid,
+ bool key_scalar);
+ static void add_jsonb(Datum val, bool is_null, JsonbInState *result,
+ Oid val_type, bool key_scalar);
/*
* jsonb type input function
*************** JsonbToCString(StringInfo out, JsonbCont
*** 462,464 ****
--- 504,1786 ----
return out->data;
}
+
+
+ /*
+ * Determine how we want to render values of a given type in datum_to_jsonb.
+ *
+ * Given the datatype OID, return its JsonbTypeCategory, as well as the type's
+ * output function OID. If the returned category is JSONBTYPE_CAST, we
+ * return the OID of the type->JSON cast function instead.
+ */
+ static void
+ jsonb_categorize_type(Oid typoid,
+ JsonbTypeCategory * tcategory,
+ Oid *outfuncoid)
+ {
+ bool typisvarlena;
+
+ /* Look through any domain */
+ typoid = getBaseType(typoid);
+
+ /* We'll usually need to return the type output function */
+ getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+
+ /* Check for known types */
+ switch (typoid)
+ {
+ case BOOLOID:
+ *tcategory = JSONBTYPE_BOOL;
+ break;
+
+ case INT2OID:
+ case INT4OID:
+ case INT8OID:
+ case FLOAT4OID:
+ case FLOAT8OID:
+ case NUMERICOID:
+ *tcategory = JSONBTYPE_NUMERIC;
+ break;
+
+ case TIMESTAMPOID:
+ *tcategory = JSONBTYPE_TIMESTAMP;
+ break;
+
+ case TIMESTAMPTZOID:
+ *tcategory = JSONBTYPE_TIMESTAMPTZ;
+ break;
+
+ case JSONBOID:
+ *tcategory = JSONBTYPE_JSONB;
+ break;
+
+ case JSONOID:
+ *tcategory = JSONBTYPE_JSON;
+ break;
+
+ default:
+ /* Check for arrays and composites */
+ if (OidIsValid(get_element_type(typoid)))
+ *tcategory = JSONBTYPE_ARRAY;
+ else if (type_is_rowtype(typoid))
+ *tcategory = JSONBTYPE_COMPOSITE;
+ else
+ {
+ /* It's probably the general case ... */
+ *tcategory = JSONBTYPE_OTHER;
+
+ /*
+ * but let's look for a cast to json or jsonb, if it's not
+ * built-in
+ */
+ if (typoid >= FirstNormalObjectId)
+ {
+ HeapTuple tuple;
+
+ tuple = SearchSysCache2(CASTSOURCETARGET,
+ ObjectIdGetDatum(typoid),
+ ObjectIdGetDatum(JSONBOID));
+ if (HeapTupleIsValid(tuple))
+ {
+ Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
+
+ if (castForm->castmethod == COERCION_METHOD_FUNCTION)
+ {
+ *tcategory = JSONBTYPE_JSONBCAST;
+ *outfuncoid = castForm->castfunc;
+ }
+
+ ReleaseSysCache(tuple);
+ }
+ else
+ {
+ tuple = SearchSysCache2(CASTSOURCETARGET,
+ ObjectIdGetDatum(typoid),
+ ObjectIdGetDatum(JSONOID));
+ if (HeapTupleIsValid(tuple))
+ {
+ Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
+
+ if (castForm->castmethod == COERCION_METHOD_FUNCTION)
+ {
+ *tcategory = JSONBTYPE_JSONCAST;
+ *outfuncoid = castForm->castfunc;
+ }
+
+ ReleaseSysCache(tuple);
+ }
+ }
+ }
+ break;
+ }
+ }
+ }
+
+ /*
+ * Turn a Datum into jsonb, adding it to the result JsonbInState.
+ *
+ * tcategory and outfuncoid are from a previous call to json_categorize_type,
+ * except that if is_null is true then they can be invalid.
+ *
+ * If key_scalar is true, the value is stores as a key, so insist
+ * it's of an acceptable type, and force it to be a jbvString.
+ */
+ static void
+ datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+ JsonbTypeCategory tcategory, Oid outfuncoid,
+ bool key_scalar)
+ {
+ char *outputstr;
+ bool numeric_error;
+ JsonbValue jb;
+ bool scalar_jsonb = false;
+
+ if (is_null)
+ {
+ jb.type = jbvNull;
+ }
+ else if (key_scalar &&
+ (tcategory == JSONBTYPE_ARRAY ||
+ tcategory == JSONBTYPE_COMPOSITE ||
+ tcategory == JSONBTYPE_JSON ||
+ tcategory == JSONBTYPE_JSONB ||
+ tcategory == JSONBTYPE_JSONCAST ||
+ tcategory == JSONBTYPE_JSONBCAST))
+ {
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("key value must be scalar, not array, composite or json")));
+ }
+ else
+ {
+ if (tcategory == JSONBTYPE_JSONCAST || tcategory == JSONBTYPE_JSONBCAST)
+ val = OidFunctionCall1(outfuncoid, val);
+
+ switch (tcategory)
+ {
+ case JSONBTYPE_ARRAY:
+ array_to_jsonb_internal(val, result);
+ break;
+ case JSONBTYPE_COMPOSITE:
+ composite_to_jsonb(val, result);
+ break;
+ case JSONBTYPE_BOOL:
+ if (key_scalar)
+ {
+ outputstr = DatumGetBool(val) ? "true" : "false";
+ jb.type = jbvString;
+ jb.val.string.len = strlen(outputstr);
+ jb.val.string.val = outputstr;
+ }
+ else
+ {
+ jb.type = jbvBool;
+ jb.val.boolean = DatumGetBool(val);
+ }
+ break;
+ case JSONBTYPE_NUMERIC:
+ outputstr = OidOutputFunctionCall(outfuncoid, val);
+ if (key_scalar)
+ {
+ /* always quote keys */
+ jb.type = jbvString;
+ jb.val.string.len = strlen(outputstr);
+ jb.val.string.val = outputstr;
+ }
+ else
+ {
+ /*
+ * Make it numeric if it's a valid JSON number, otherwise
+ * a string. Invalid numeric output will always have an
+ * 'N' or 'n' in it (I think).
+ */
+ numeric_error = (strchr(outputstr, 'N') != NULL ||
+ strchr(outputstr, 'n') != NULL);
+ if (!numeric_error)
+ {
+ jb.type = jbvNumeric;
+ jb.val.numeric = DatumGetNumeric(DirectFunctionCall3(numeric_in, CStringGetDatum(outputstr), 0, -1));
+
+ pfree(outputstr);
+ }
+ else
+ {
+ jb.type = jbvString;
+ jb.val.string.len = strlen(outputstr);
+ jb.val.string.val = outputstr;
+ }
+ }
+ break;
+ case JSONBTYPE_TIMESTAMP:
+ {
+ Timestamp timestamp;
+ struct pg_tm tm;
+ fsec_t fsec;
+ char buf[MAXDATELEN + 1];
+
+ timestamp = DatumGetTimestamp(val);
+
+ /* XSD doesn't support infinite values */
+ if (TIMESTAMP_NOT_FINITE(timestamp))
+ ereport(ERROR,
+ (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+ errmsg("timestamp out of range"),
+ errdetail("JSON does not support infinite timestamp values.")));
+ else if (timestamp2tm(timestamp, NULL, &tm, &fsec, NULL, NULL) == 0)
+ EncodeDateTime(&tm, fsec, false, 0, NULL, USE_XSD_DATES, buf);
+ else
+ ereport(ERROR,
+ (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+ errmsg("timestamp out of range")));
+
+ jb.type = jbvString;
+ jb.val.string.len = strlen(buf);
+ jb.val.string.val = pstrdup(buf);
+ }
+ break;
+ case JSONBTYPE_TIMESTAMPTZ:
+ {
+ TimestampTz timestamp;
+ struct pg_tm tm;
+ int tz;
+ fsec_t fsec;
+ const char *tzn = NULL;
+ char buf[MAXDATELEN + 1];
+
+ timestamp = DatumGetTimestamp(val);
+
+ /* XSD doesn't support infinite values */
+ if (TIMESTAMP_NOT_FINITE(timestamp))
+ ereport(ERROR,
+ (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+ errmsg("timestamp out of range"),
+ errdetail("JSON does not support infinite timestamp values.")));
+ else if (timestamp2tm(timestamp, &tz, &tm, &fsec, &tzn, NULL) == 0)
+ EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf);
+ else
+ ereport(ERROR,
+ (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+ errmsg("timestamp out of range")));
+
+ jb.type = jbvString;
+ jb.val.string.len = strlen(buf);
+ jb.val.string.val = pstrdup(buf);
+ }
+ break;
+ case JSONBTYPE_JSONCAST:
+ case JSONBTYPE_JSON:
+ {
+ /* parse the json right into the existing result object */
+ JsonLexContext *lex;
+ JsonSemAction sem;
+ text *json = DatumGetTextP(val);
+
+ lex = makeJsonLexContext(json, true);
+
+ memset(&sem, 0, sizeof(sem));
+
+ sem.semstate = (void *) result;
+
+ sem.object_start = jsonb_in_object_start;
+ sem.array_start = jsonb_in_array_start;
+ sem.object_end = jsonb_in_object_end;
+ sem.array_end = jsonb_in_array_end;
+ sem.scalar = jsonb_in_scalar;
+ sem.object_field_start = jsonb_in_object_field_start;
+
+ pg_parse_json(lex, &sem);
+ }
+ break;
+ case JSONBTYPE_JSONBCAST:
+ case JSONBTYPE_JSONB:
+ {
+ Jsonb *jsonb = DatumGetJsonb(val);
+ int type;
+ JsonbIterator *it;
+
+ it = JsonbIteratorInit(&jsonb->root);
+
+ if (JB_ROOT_IS_SCALAR(jsonb))
+ {
+ (void) JsonbIteratorNext(&it, &jb, true);
+ Assert(jb.type == jbvArray);
+ (void) JsonbIteratorNext(&it, &jb, true);
+ scalar_jsonb = true;
+ }
+ else
+ {
+ while ((type = JsonbIteratorNext(&it, &jb, false))
+ != WJB_DONE)
+ {
+ if (type == WJB_END_ARRAY || type == WJB_END_OBJECT ||
+ type == WJB_BEGIN_ARRAY || type == WJB_BEGIN_OBJECT)
+ result->res = pushJsonbValue(&result->parseState,
+ type, NULL);
+ else
+ result->res = pushJsonbValue(&result->parseState,
+ type, &jb);
+ }
+ }
+ }
+ break;
+ default:
+ outputstr = OidOutputFunctionCall(outfuncoid, val);
+ jb.type = jbvString;
+ jb.val.string.len = checkStringLen(strlen(outputstr));
+ jb.val.string.val = outputstr;
+ break;
+ }
+ }
+ if (tcategory >= JSONBTYPE_JSON && tcategory <= JSONBTYPE_JSONBCAST &&
+ !scalar_jsonb)
+ {
+ /* work has been done recursively */
+ return;
+ }
+ else if (result->parseState == NULL)
+ {
+ /* single root scalar */
+ JsonbValue va;
+
+ va.type = jbvArray;
+ va.val.array.rawScalar = true;
+ va.val.array.nElems = 1;
+
+ result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, &va);
+ result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+ result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+ }
+ else
+ {
+ JsonbValue *o = &result->parseState->contVal;
+
+ switch (o->type)
+ {
+ case jbvArray:
+ result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+ break;
+ case jbvObject:
+ result->res = pushJsonbValue(&result->parseState,
+ key_scalar ? WJB_KEY : WJB_VALUE,
+ &jb);
+ break;
+ default:
+ elog(ERROR, "unexpected parent of nested structure");
+ }
+ }
+ }
+
+ /*
+ * Process a single dimension of an array.
+ * If it's the innermost dimension, output the values, otherwise call
+ * ourselves recursively to process the next dimension.
+ */
+ static void
+ array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims, Datum *vals,
+ bool *nulls, int *valcount, JsonbTypeCategory tcategory,
+ Oid outfuncoid)
+ {
+ int i;
+
+ Assert(dim < ndims);
+
+ result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+
+ for (i = 1; i <= dims[dim]; i++)
+ {
+ if (dim + 1 == ndims)
+ {
+ datum_to_jsonb(vals[*valcount], nulls[*valcount], result, tcategory,
+ outfuncoid, false);
+ (*valcount)++;
+ }
+ else
+ {
+ array_dim_to_jsonb(result, dim + 1, ndims, dims, vals, nulls,
+ valcount, tcategory, outfuncoid);
+ }
+ }
+
+ result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+ }
+
+ /*
+ * Turn an array into JSON.
+ */
+ static void
+ array_to_jsonb_internal(Datum array, JsonbInState *result)
+ {
+ ArrayType *v = DatumGetArrayTypeP(array);
+ Oid element_type = ARR_ELEMTYPE(v);
+ int *dim;
+ int ndim;
+ int nitems;
+ int count = 0;
+ Datum *elements;
+ bool *nulls;
+ int16 typlen;
+ bool typbyval;
+ char typalign;
+ JsonbTypeCategory tcategory;
+ Oid outfuncoid;
+
+ ndim = ARR_NDIM(v);
+ dim = ARR_DIMS(v);
+ nitems = ArrayGetNItems(ndim, dim);
+
+ if (nitems <= 0)
+ {
+ result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+ result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+ return;
+ }
+
+ get_typlenbyvalalign(element_type,
+ &typlen, &typbyval, &typalign);
+
+ jsonb_categorize_type(element_type,
+ &tcategory, &outfuncoid);
+
+ deconstruct_array(v, element_type, typlen, typbyval,
+ typalign, &elements, &nulls,
+ &nitems);
+
+ array_dim_to_jsonb(result, 0, ndim, dim, elements, nulls, &count, tcategory,
+ outfuncoid);
+
+ pfree(elements);
+ pfree(nulls);
+ }
+
+ /*
+ * Turn a composite / record into JSON.
+ */
+ static void
+ composite_to_jsonb(Datum composite, JsonbInState *result)
+ {
+ HeapTupleHeader td;
+ Oid tupType;
+ int32 tupTypmod;
+ TupleDesc tupdesc;
+ HeapTupleData tmptup,
+ *tuple;
+ int i;
+
+ td = DatumGetHeapTupleHeader(composite);
+
+ /* Extract rowtype info and find a tupdesc */
+ tupType = HeapTupleHeaderGetTypeId(td);
+ tupTypmod = HeapTupleHeaderGetTypMod(td);
+ tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+
+ /* Build a temporary HeapTuple control structure */
+ tmptup.t_len = HeapTupleHeaderGetDatumLength(td);
+ tmptup.t_data = td;
+ tuple = &tmptup;
+
+ result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_OBJECT, NULL);
+
+ for (i = 0; i < tupdesc->natts; i++)
+ {
+ Datum val;
+ bool isnull;
+ char *attname;
+ JsonbTypeCategory tcategory;
+ Oid outfuncoid;
+ JsonbValue v;
+
+ if (tupdesc->attrs[i]->attisdropped)
+ continue;
+
+ attname = NameStr(tupdesc->attrs[i]->attname);
+
+ v.type = jbvString;
+ /* don't need checkStringLen here - can't exceed maximum name length */
+ v.val.string.len = strlen(attname);
+ v.val.string.val = attname;
+
+ result->res = pushJsonbValue(&result->parseState, WJB_KEY, &v);
+
+ val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+ if (isnull)
+ {
+ tcategory = JSONBTYPE_NULL;
+ outfuncoid = InvalidOid;
+ }
+ else
+ jsonb_categorize_type(tupdesc->attrs[i]->atttypid,
+ &tcategory, &outfuncoid);
+
+ datum_to_jsonb(val, isnull, result, tcategory, outfuncoid, false);
+ }
+
+ result->res = pushJsonbValue(&result->parseState, WJB_END_OBJECT, NULL);
+ ReleaseTupleDesc(tupdesc);
+ }
+
+ /*
+ * Append JSON text for "val" to "result".
+ *
+ * This is just a thin wrapper around datum_to_json. If the same type will be
+ * printed many times, avoid using this; better to do the json_categorize_type
+ * lookups only once.
+ */
+
+ static void
+ add_jsonb(Datum val, bool is_null, JsonbInState *result,
+ Oid val_type, bool key_scalar)
+ {
+ JsonbTypeCategory tcategory;
+ Oid outfuncoid;
+
+ if (val_type == InvalidOid)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("could not determine input data type")));
+
+ if (is_null)
+ {
+ tcategory = JSONBTYPE_NULL;
+ outfuncoid = InvalidOid;
+ }
+ else
+ jsonb_categorize_type(val_type,
+ &tcategory, &outfuncoid);
+
+ datum_to_jsonb(val, is_null, result, tcategory, outfuncoid, key_scalar);
+ }
+
+ /*
+ * SQL function to_jsonb(anyvalue)
+ */
+ Datum
+ to_jsonb(PG_FUNCTION_ARGS)
+ {
+ Datum val = PG_GETARG_DATUM(0);
+ Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
+ JsonbInState result;
+ JsonbTypeCategory tcategory;
+ Oid outfuncoid;
+
+ if (val_type == InvalidOid)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("could not determine input data type")));
+
+ jsonb_categorize_type(val_type,
+ &tcategory, &outfuncoid);
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ datum_to_jsonb(val, false, &result, tcategory, outfuncoid, false);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+ }
+
+ /*
+ * SQL function jsonb_build_object(variadic "any")
+ */
+ Datum
+ jsonb_build_object(PG_FUNCTION_ARGS)
+ {
+ int nargs = PG_NARGS();
+ int i;
+ Datum arg;
+ Oid val_type;
+ JsonbInState result;
+
+ if (nargs % 2 != 0)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("invalid number or arguments: object must be matched key value pairs")));
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+ for (i = 0; i < nargs; i += 2)
+ {
+
+ /* process key */
+
+ if (PG_ARGISNULL(i))
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("arg %d: key cannot be null", i + 1)));
+ val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+
+ /*
+ * turn a constant (more or less literal) value that's of unknown type
+ * into text. Unknowns come in as a cstring pointer.
+ */
+ if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+ {
+ val_type = TEXTOID;
+ if (PG_ARGISNULL(i))
+ arg = (Datum) 0;
+ else
+ arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+ }
+ else
+ {
+ arg = PG_GETARG_DATUM(i);
+ }
+ if (val_type == InvalidOid || val_type == UNKNOWNOID)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("arg %d: could not determine data type", i + 1)));
+
+ add_jsonb(arg, false, &result, val_type, true);
+
+ /* process value */
+
+ val_type = get_fn_expr_argtype(fcinfo->flinfo, i + 1);
+ /* see comments above */
+ if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i + 1))
+ {
+ val_type = TEXTOID;
+ if (PG_ARGISNULL(i + 1))
+ arg = (Datum) 0;
+ else
+ arg = CStringGetTextDatum(PG_GETARG_POINTER(i + 1));
+ }
+ else
+ {
+ arg = PG_GETARG_DATUM(i + 1);
+ }
+ if (val_type == InvalidOid || val_type == UNKNOWNOID)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("arg %d: could not determine data type", i + 2)));
+ add_jsonb(arg, PG_ARGISNULL(i + 1), &result, val_type, false);
+
+ }
+
+ result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+ }
+
+ /*
+ * degenerate case of jsonb_build_object where it gets 0 arguments.
+ */
+ Datum
+ jsonb_build_object_noargs(PG_FUNCTION_ARGS)
+ {
+ JsonbInState result;
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+ result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+ }
+
+ /*
+ * SQL function jsonb_build_array(variadic "any")
+ */
+ Datum
+ jsonb_build_array(PG_FUNCTION_ARGS)
+ {
+ int nargs = PG_NARGS();
+ int i;
+ Datum arg;
+ Oid val_type;
+ JsonbInState result;
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+
+ for (i = 0; i < nargs; i++)
+ {
+ val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+ arg = PG_GETARG_DATUM(i + 1);
+ /* see comments in jsonb_build_object above */
+ if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+ {
+ val_type = TEXTOID;
+ if (PG_ARGISNULL(i))
+ arg = (Datum) 0;
+ else
+ arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+ }
+ else
+ {
+ arg = PG_GETARG_DATUM(i);
+ }
+ if (val_type == InvalidOid || val_type == UNKNOWNOID)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("arg %d: could not determine data type", i + 1)));
+ add_jsonb(arg, PG_ARGISNULL(i), &result, val_type, false);
+ }
+
+ result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+ }
+
+ /*
+ * degenerate case of jsonb_build_array where it gets 0 arguments.
+ */
+ Datum
+ jsonb_build_array_noargs(PG_FUNCTION_ARGS)
+ {
+ JsonbInState result;
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+ result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+ }
+
+
+ /*
+ * SQL function jsonb_object(text[])
+ *
+ * take a one or two dimensional array of text as name value pairs
+ * for a json object.
+ *
+ */
+ Datum
+ jsonb_object(PG_FUNCTION_ARGS)
+ {
+ ArrayType *in_array = PG_GETARG_ARRAYTYPE_P(0);
+ int ndims = ARR_NDIM(in_array);
+ Datum *in_datums;
+ bool *in_nulls;
+ int in_count,
+ count,
+ i;
+ JsonbInState result;
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+ switch (ndims)
+ {
+ case 0:
+ goto close_object;
+ break;
+
+ case 1:
+ if ((ARR_DIMS(in_array)[0]) % 2)
+ ereport(ERROR,
+ (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+ errmsg("array must have even number of elements")));
+ break;
+
+ case 2:
+ if ((ARR_DIMS(in_array)[1]) != 2)
+ ereport(ERROR,
+ (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+ errmsg("array must have two columns")));
+ break;
+
+ default:
+ ereport(ERROR,
+ (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+ errmsg("wrong number of array subscripts")));
+ }
+
+ deconstruct_array(in_array,
+ TEXTOID, -1, false, 'i',
+ &in_datums, &in_nulls, &in_count);
+
+ count = in_count / 2;
+
+ for (i = 0; i < count; ++i)
+ {
+ JsonbValue v;
+ char *str;
+ int len;
+
+ if (in_nulls[i * 2])
+ ereport(ERROR,
+ (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+ errmsg("null value not allowed for object key")));
+
+ str = TextDatumGetCString(in_datums[i * 2]);
+ len = strlen(str);
+
+ v.type = jbvString;
+
+ v.val.string.len = len;
+ v.val.string.val = str;
+
+ result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+ if (in_nulls[i * 2 + 1])
+ {
+ v.type = jbvNull;
+ }
+ else
+ {
+ str = TextDatumGetCString(in_datums[i * 2 + 1]);
+ len = strlen(str);
+
+ v.type = jbvString;
+
+ v.val.string.len = len;
+ v.val.string.val = str;
+ }
+
+ result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+ }
+
+ pfree(in_datums);
+ pfree(in_nulls);
+
+ close_object:
+ result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+ }
+
+ /*
+ * SQL function jsonb_object(text[], text[])
+ *
+ * take separate name and value arrays of text to construct a json object
+ * pairwise.
+ */
+ Datum
+ jsonb_object_two_arg(PG_FUNCTION_ARGS)
+ {
+ ArrayType *key_array = PG_GETARG_ARRAYTYPE_P(0);
+ ArrayType *val_array = PG_GETARG_ARRAYTYPE_P(1);
+ int nkdims = ARR_NDIM(key_array);
+ int nvdims = ARR_NDIM(val_array);
+ Datum *key_datums,
+ *val_datums;
+ bool *key_nulls,
+ *val_nulls;
+ int key_count,
+ val_count,
+ i;
+ JsonbInState result;
+
+ memset(&result, 0, sizeof(JsonbInState));
+
+ result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+ if (nkdims > 1 || nkdims != nvdims)
+ ereport(ERROR,
+ (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+ errmsg("wrong number of array subscripts")));
+
+ if (nkdims == 0)
+ PG_RETURN_DATUM(CStringGetTextDatum("{}"));
+
+ deconstruct_array(key_array,
+ TEXTOID, -1, false, 'i',
+ &key_datums, &key_nulls, &key_count);
+
+ deconstruct_array(val_array,
+ TEXTOID, -1, false, 'i',
+ &val_datums, &val_nulls, &val_count);
+
+ if (key_count != val_count)
+ ereport(ERROR,
+ (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+ errmsg("mismatched array dimensions")));
+
+ for (i = 0; i < key_count; ++i)
+ {
+ JsonbValue v;
+ char *str;
+ int len;
+
+ if (key_nulls[i])
+ ereport(ERROR,
+ (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+ errmsg("null value not allowed for object key")));
+
+ str = TextDatumGetCString(key_datums[i]);
+ len = strlen(str);
+
+ v.type = jbvString;
+
+ v.val.string.len = len;
+ v.val.string.val = str;
+
+ result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+ if (val_nulls[i])
+ {
+ v.type = jbvNull;
+ }
+ else
+ {
+ str = TextDatumGetCString(val_datums[i]);
+ len = strlen(str);
+
+ v.type = jbvString;
+
+ v.val.string.len = len;
+ v.val.string.val = str;
+ }
+
+ result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+ }
+
+ result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+ pfree(key_datums);
+ pfree(key_nulls);
+ pfree(val_datums);
+ pfree(val_nulls);
+
+ PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+ }
+
+
+ /*
+ * jsonb_agg aggregate function
+ */
+ Datum
+ jsonb_agg_transfn(PG_FUNCTION_ARGS)
+ {
+ Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+ MemoryContext oldcontext,
+ aggcontext;
+ JsonbInState elem;
+ JsonbTypeCategory tcategory;
+ Oid outfuncoid;
+ Datum val;
+ JsonbInState *result;
+ bool single_scalar = false;
+ JsonbIterator *it;
+ Jsonb *jbelem;
+ JsonbValue v;
+ int type;
+
+ if (val_type == InvalidOid)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("could not determine input data type")));
+
+ if (!AggCheckCallContext(fcinfo, &aggcontext))
+ {
+ /* cannot be called directly because of internal-type argument */
+ elog(ERROR, "jsonb_agg_transfn called in non-aggregate context");
+ }
+
+ /* turn the argument into jsonb in the normal function context */
+
+ val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+ jsonb_categorize_type(val_type,
+ &tcategory, &outfuncoid);
+
+ memset(&elem, 0, sizeof(JsonbInState));
+
+ datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+ jbelem = JsonbValueToJsonb(elem.res);
+
+ /* switch to the aggregate context for accumulation operations */
+
+ oldcontext = MemoryContextSwitchTo(aggcontext);
+
+ /* set up the accumulator on the first go round */
+
+ if (PG_ARGISNULL(0))
+ {
+ result = palloc0(sizeof(JsonbInState));
+ result->res = pushJsonbValue(&result->parseState,
+ WJB_BEGIN_ARRAY, NULL);
+
+ }
+ else
+ {
+ result = (JsonbInState *) PG_GETARG_POINTER(0);
+ }
+
+ it = JsonbIteratorInit(&jbelem->root);
+
+ while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+ {
+ switch (type)
+ {
+ case WJB_BEGIN_ARRAY:
+ if (v.val.array.rawScalar)
+ single_scalar = true;
+ else
+ result->res = pushJsonbValue(&result->parseState,
+ type, NULL);
+ break;
+ case WJB_END_ARRAY:
+ if (!single_scalar)
+ result->res = pushJsonbValue(&result->parseState,
+ type, NULL);
+ break;
+ case WJB_BEGIN_OBJECT:
+ case WJB_END_OBJECT:
+ result->res = pushJsonbValue(&result->parseState,
+ type, NULL);
+ break;
+ case WJB_ELEM:
+ case WJB_KEY:
+ case WJB_VALUE:
+ if (v.type == jbvString)
+ {
+ /* copy string values in the aggreagate context */
+ char *buf = palloc(v.val.string.len + 1);;
+ snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+ v.val.string.val = buf;
+ }
+ else if (v.type == jbvNumeric)
+ {
+ /* same for numeric */
+ v.val.numeric =
+ DatumGetNumeric(DirectFunctionCall1(numeric_uplus,
+ NumericGetDatum(v.val.numeric)));
+
+ }
+ result->res = pushJsonbValue(&result->parseState,
+ type, &v);
+ break;
+ }
+ }
+
+ MemoryContextSwitchTo(oldcontext);
+
+ PG_RETURN_POINTER(result);
+ }
+
+ Datum
+ jsonb_agg_finalfn(PG_FUNCTION_ARGS)
+ {
+ JsonbInState *result;
+ Jsonb *out;
+
+ /* cannot be called directly because of internal-type argument */
+ Assert(AggCheckCallContext(fcinfo, NULL));
+
+ if (PG_ARGISNULL(0))
+ PG_RETURN_NULL(); /* returns null iff no input values */
+
+ result = (JsonbInState *) PG_GETARG_POINTER(0);
+
+ result->res = pushJsonbValue(&result->parseState,
+ WJB_END_ARRAY, NULL);
+
+
+ out = JsonbValueToJsonb(result->res);
+
+ PG_RETURN_POINTER(out);
+ }
+
+ /*
+ * jsonb_object_agg aggregate function
+ */
+ Datum
+ jsonb_object_agg_transfn(PG_FUNCTION_ARGS)
+ {
+ Oid val_type;
+ MemoryContext oldcontext,
+ aggcontext;
+ JsonbInState elem;
+ JsonbTypeCategory tcategory;
+ Oid outfuncoid;
+ Datum val;
+ JsonbInState *result;
+ bool single_scalar;
+ JsonbIterator *it;
+ Jsonb *jbkey,
+ *jbval;
+ JsonbValue v;
+ int type;
+
+ if (!AggCheckCallContext(fcinfo, &aggcontext))
+ {
+ /* cannot be called directly because of internal-type argument */
+ elog(ERROR, "jsonb_object_agg_transfn called in non-aggregate context");
+ }
+
+ /* turn the argument into jsonb in the normal function context */
+
+ val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+
+ if (val_type == InvalidOid)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("could not determine input data type")));
+
+ val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+ jsonb_categorize_type(val_type,
+ &tcategory, &outfuncoid);
+
+ memset(&elem, 0, sizeof(JsonbInState));
+
+ datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, true);
+
+ jbkey = JsonbValueToJsonb(elem.res);
+
+ val_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
+
+ if (val_type == InvalidOid)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("could not determine input data type")));
+
+ val = PG_ARGISNULL(2) ? (Datum) 0 : PG_GETARG_DATUM(2);
+
+ jsonb_categorize_type(val_type,
+ &tcategory, &outfuncoid);
+
+ memset(&elem, 0, sizeof(JsonbInState));
+
+ datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+ jbval = JsonbValueToJsonb(elem.res);
+
+ /* switch to the aggregate context for accumulation operations */
+
+ oldcontext = MemoryContextSwitchTo(aggcontext);
+
+ /* set up the accumulator on the first go round */
+
+ if (PG_ARGISNULL(0))
+ {
+ result = palloc0(sizeof(JsonbInState));
+ result->res = pushJsonbValue(&result->parseState,
+ WJB_BEGIN_OBJECT, NULL);
+
+ }
+ else
+ {
+ result = (JsonbInState *) PG_GETARG_POINTER(0);
+ }
+
+ it = JsonbIteratorInit(&jbkey->root);
+
+ /*
+ * keys should be scalar, and we should have already checked for that
+ * above when calling datum_to_jsonb, so we only need to look for these
+ * things.
+ */
+
+ while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+ {
+ switch (type)
+ {
+ case WJB_BEGIN_ARRAY:
+ if (!v.val.array.rawScalar)
+ elog(ERROR, "unexpected structure for key");
+ break;
+ case WJB_ELEM:
+ if (v.type == jbvString)
+ {
+ /* copy string values in the aggreagate context */
+ char *buf = palloc(v.val.string.len + 1);;
+ snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+ v.val.string.val = buf;
+ }
+ else
+ {
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg("object keys must be strings")));
+ }
+ result->res = pushJsonbValue(&result->parseState,
+ WJB_KEY, &v);
+ break;
+ case WJB_END_ARRAY:
+ break;
+ default:
+ elog(ERROR, "unexpected structure for key");
+ break;
+ }
+ }
+
+ it = JsonbIteratorInit(&jbval->root);
+
+ single_scalar = false;
+
+ /*
+ * values can be anything, including structured and null, so we treate
+ * them as in json_agg_transfn, except that single scalars are always
+ * pushed as WJB_VALUE items.
+ */
+
+ while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+ {
+ switch (type)
+ {
+ case WJB_BEGIN_ARRAY:
+ if (v.val.array.rawScalar)
+ single_scalar = true;
+ else
+ result->res = pushJsonbValue(&result->parseState,
+ type, NULL);
+ break;
+ case WJB_END_ARRAY:
+ if (!single_scalar)
+ result->res = pushJsonbValue(&result->parseState,
+ type, NULL);
+ break;
+ case WJB_BEGIN_OBJECT:
+ case WJB_END_OBJECT:
+ result->res = pushJsonbValue(&result->parseState,
+ type, NULL);
+ break;
+ case WJB_ELEM:
+ case WJB_KEY:
+ case WJB_VALUE:
+ if (v.type == jbvString)
+ {
+ /* copy string values in the aggreagate context */
+ char *buf = palloc(v.val.string.len + 1);;
+ snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+ v.val.string.val = buf;
+ }
+ else if (v.type == jbvNumeric)
+ {
+ /* same for numeric */
+ v.val.numeric =
+ DatumGetNumeric(DirectFunctionCall1(numeric_uplus,
+ NumericGetDatum(v.val.numeric)));
+
+ }
+ result->res = pushJsonbValue(&result->parseState,
+ single_scalar ? WJB_VALUE : type,
+ &v);
+ break;
+ }
+ }
+
+ MemoryContextSwitchTo(oldcontext);
+
+ PG_RETURN_POINTER(result);
+ }
+
+ Datum
+ jsonb_object_agg_finalfn(PG_FUNCTION_ARGS)
+ {
+ JsonbInState *result;
+ Jsonb *out;
+
+ /* cannot be called directly because of internal-type argument */
+ Assert(AggCheckCallContext(fcinfo, NULL));
+
+ if (PG_ARGISNULL(0))
+ PG_RETURN_NULL(); /* returns null iff no input values */
+
+ result = (JsonbInState *) PG_GETARG_POINTER(0);
+
+ result->res = pushJsonbValue(&result->parseState,
+ WJB_END_OBJECT, NULL);
+
+
+ out = JsonbValueToJsonb(result->res);
+
+ PG_RETURN_POINTER(out);
+ }
diff --git a/src/backend/utils/adt/jsonb_util.c b/src/backend/utils/adt/jsonb_util.c
new file mode 100644
index 2ff8539..4cce30d
*** a/src/backend/utils/adt/jsonb_util.c
--- b/src/backend/utils/adt/jsonb_util.c
*************** convertJsonbValue(StringInfo buffer, JEn
*** 1427,1433 ****
else if (val->type == jbvObject)
convertJsonbObject(buffer, header, val, level);
else
! elog(ERROR, "unknown type of jsonb container");
}
static void
--- 1427,1433 ----
else if (val->type == jbvObject)
convertJsonbObject(buffer, header, val, level);
else
! elog(ERROR, "unknown type of jsonb container to convert");
}
static void
diff --git a/src/include/catalog/pg_aggregate.h b/src/include/catalog/pg_aggregate.h
new file mode 100644
index 3ba9e5e..8e0735b
*** a/src/include/catalog/pg_aggregate.h
--- b/src/include/catalog/pg_aggregate.h
*************** DATA(insert ( 3545 n 0 bytea_string_agg_
*** 286,291 ****
--- 286,295 ----
DATA(insert ( 3175 n 0 json_agg_transfn json_agg_finalfn - - - f f 0 2281 0 0 0 _null_ _null_ ));
DATA(insert ( 3197 n 0 json_object_agg_transfn json_object_agg_finalfn - - - f f 0 2281 0 0 0 _null_ _null_ ));
+ /* jsonb */
+ DATA(insert ( 3267 n 0 jsonb_agg_transfn jsonb_agg_finalfn - - - f f 0 2281 0 0 0 _null_ _null_ ));
+ DATA(insert ( 3270 n 0 jsonb_object_agg_transfn jsonb_object_agg_finalfn - - - f f 0 2281 0 0 0 _null_ _null_ ));
+
/* ordered-set and hypothetical-set aggregates */
DATA(insert ( 3972 o 1 ordered_set_transition percentile_disc_final - - - t f 0 2281 0 0 0 _null_ _null_ ));
DATA(insert ( 3974 o 1 ordered_set_transition percentile_cont_float8_final - - - f f 0 2281 0 0 0 _null_ _null_ ));
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
new file mode 100644
index 4736532..4609947
*** a/src/include/catalog/pg_proc.h
--- b/src/include/catalog/pg_proc.h
*************** DESCR("I/O");
*** 4599,4604 ****
--- 4599,4631 ----
DATA(insert OID = 3803 ( jsonb_send PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 17 "3802" _null_ _null_ _null_ _null_ jsonb_send _null_ _null_ _null_ ));
DESCR("I/O");
+ DATA(insert OID = 3263 ( jsonb_object PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 3802 "1009" _null_ _null_ _null_ _null_ jsonb_object _null_ _null_ _null_ ));
+ DESCR("map text array of key value pairs to jsonb object");
+ DATA(insert OID = 3264 ( jsonb_object PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 3802 "1009 1009" _null_ _null_ _null_ _null_ jsonb_object_two_arg _null_ _null_ _null_ ));
+ DESCR("map text array of key value pairs to jsonb object");
+ DATA(insert OID = 3787 ( to_jsonb PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 3802 "2283" _null_ _null_ _null_ _null_ to_jsonb _null_ _null_ _null_ ));
+ DESCR("map input to jsonb");
+ DATA(insert OID = 3265 ( jsonb_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ jsonb_agg_transfn _null_ _null_ _null_ ));
+ DESCR("jsonb aggregate transition function");
+ DATA(insert OID = 3266 ( jsonb_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_agg_finalfn _null_ _null_ _null_ ));
+ DESCR("jsonb aggregate final function");
+ DATA(insert OID = 3267 ( jsonb_agg PGNSP PGUID 12 1 0 0 0 t f f f f f i 1 0 3802 "2283" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+ DESCR("aggregate input into jsonb");
+ DATA(insert OID = 3268 ( jsonb_object_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 3 0 2281 "2281 2276 2276" _null_ _null_ _null_ _null_ jsonb_object_agg_transfn _null_ _null_ _null_ ));
+ DESCR("jsonb object aggregate transition function");
+ DATA(insert OID = 3269 ( jsonb_object_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_object_agg_finalfn _null_ _null_ _null_ ));
+ DESCR("jsonb object aggregate final function");
+ DATA(insert OID = 3270 ( jsonb_object_agg PGNSP PGUID 12 1 0 0 0 t f f f f f i 2 0 3802 "2276 2276" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+ DESCR("aggregate inputs into jsonb object");
+ DATA(insert OID = 3259 ( jsonb_build_array PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_array _null_ _null_ _null_ ));
+ DESCR("build a jsonb array from any inputs");
+ DATA(insert OID = 3260 ( jsonb_build_array PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802 "" _null_ _null_ _null_ _null_ jsonb_build_array_noargs _null_ _null_ _null_ ));
+ DESCR("build an empty jsonb array");
+ DATA(insert OID = 3261 ( jsonb_build_object PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_object _null_ _null_ _null_ ));
+ DESCR("build a jsonb object from pairwise key/value inputs");
+ DATA(insert OID = 3262 ( jsonb_build_object PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802 "" _null_ _null_ _null_ _null_ jsonb_build_object_noargs _null_ _null_ _null_ ));
+ DESCR("build an empty jsonb object");
+
DATA(insert OID = 3478 ( jsonb_object_field PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field _null_ _null_ _null_ ));
DATA(insert OID = 3214 ( jsonb_object_field_text PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 25 "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field_text _null_ _null_ _null_ ));
DATA(insert OID = 3215 ( jsonb_array_element PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 23" _null_ _null_ "{from_json, element_index}" _null_ jsonb_array_element _null_ _null_ _null_ ));
diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h
new file mode 100644
index b89e4cb..d261aaa
*** a/src/include/utils/jsonb.h
--- b/src/include/utils/jsonb.h
*************** extern Datum jsonb_recv(PG_FUNCTION_ARGS
*** 350,355 ****
--- 350,371 ----
extern Datum jsonb_send(PG_FUNCTION_ARGS);
extern Datum jsonb_typeof(PG_FUNCTION_ARGS);
+ /* generator routines */
+ extern Datum to_jsonb(PG_FUNCTION_ARGS);
+
+ extern Datum jsonb_build_object(PG_FUNCTION_ARGS);
+ extern Datum jsonb_build_object_noargs(PG_FUNCTION_ARGS);
+ extern Datum jsonb_build_array(PG_FUNCTION_ARGS);
+ extern Datum jsonb_build_array_noargs(PG_FUNCTION_ARGS);
+ extern Datum jsonb_object(PG_FUNCTION_ARGS);
+ extern Datum jsonb_object_two_arg(PG_FUNCTION_ARGS);
+
+ /* jsonb_agg, json_object_agg functions */
+ extern Datum jsonb_agg_transfn(PG_FUNCTION_ARGS);
+ extern Datum jsonb_agg_finalfn(PG_FUNCTION_ARGS);
+ extern Datum jsonb_object_agg_transfn(PG_FUNCTION_ARGS);
+ extern Datum jsonb_object_agg_finalfn(PG_FUNCTION_ARGS);
+
/* Indexing-related ops */
extern Datum jsonb_exists(PG_FUNCTION_ARGS);
extern Datum jsonb_exists_any(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
new file mode 100644
index 9146f59..df9302d
*** a/src/test/regress/expected/jsonb.out
--- b/src/test/regress/expected/jsonb.out
*************** SELECT array_to_json(ARRAY [jsonb '{"a":
*** 301,306 ****
--- 301,328 ----
[{"a": 1},{"b": [2, 3]}]
(1 row)
+ --jsonb_agg
+ CREATE TEMP TABLE rows AS
+ SELECT x, 'txt' || x as y
+ FROM generate_series(1,3) AS x;
+ SELECT jsonb_agg(q)
+ FROM ( SELECT $$a$$ || x AS b, y AS c,
+ ARRAY[ROW(x.*,ARRAY[1,2,3]),
+ ROW(y.*,ARRAY[4,5,6])] AS z
+ FROM generate_series(1,2) x,
+ generate_series(4,5) y) q;
+ jsonb_agg
+ --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+ (1 row)
+
+ SELECT jsonb_agg(q)
+ FROM rows q;
+ jsonb_agg
+ -----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+ (1 row)
+
-- jsonb extraction functions
CREATE TEMP TABLE test_jsonb (
json_type text,
*************** SELECT jsonb_typeof('"1.0"') AS string;
*** 1256,1261 ****
--- 1278,1397 ----
string
(1 row)
+ -- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+ SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+ jsonb_build_array
+ -------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+ (1 row)
+
+ SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+ jsonb_build_object
+ -------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+ (1 row)
+
+ SELECT jsonb_build_object(
+ 'a', jsonb_build_object('b',false,'c',99),
+ 'd', jsonb_build_object('e',array[9,8,7]::int[],
+ 'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+ jsonb_build_object
+ ------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+ (1 row)
+
+ -- empty objects/arrays
+ SELECT jsonb_build_array();
+ jsonb_build_array
+ -------------------
+ []
+ (1 row)
+
+ SELECT jsonb_build_object();
+ jsonb_build_object
+ --------------------
+ {}
+ (1 row)
+
+ -- make sure keys are quoted
+ SELECT jsonb_build_object(1,2);
+ jsonb_build_object
+ --------------------
+ {"1": 2}
+ (1 row)
+
+ -- keys must be scalar and not null
+ SELECT jsonb_build_object(null,2);
+ ERROR: arg 1: key cannot be null
+ SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ ERROR: key value must be scalar, not array, composite or json
+ SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ ERROR: key value must be scalar, not array, composite or json
+ SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ ERROR: key value must be scalar, not array, composite or json
+ CREATE TEMP TABLE foo (serial_num int, name text, type text);
+ INSERT INTO foo VALUES (847001,'t15','GE1043');
+ INSERT INTO foo VALUES (847002,'t16','GE1043');
+ INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+ SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+ FROM foo;
+ jsonb_build_object
+ -------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+ (1 row)
+
+ -- jsonb_object
+ -- one dimension
+ SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+ jsonb_object
+ ---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+ (1 row)
+
+ -- same but with two dimensions
+ SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ jsonb_object
+ ---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+ (1 row)
+
+ -- odd number error
+ SELECT jsonb_object('{a,b,c}');
+ ERROR: array must have even number of elements
+ -- one column error
+ SELECT jsonb_object('{{a},{b}}');
+ ERROR: array must have two columns
+ -- too many columns error
+ SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ ERROR: array must have two columns
+ -- too many dimensions error
+ SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ ERROR: wrong number of array subscripts
+ --two argument form of jsonb_object
+ select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+ jsonb_object
+ --------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+ (1 row)
+
+ -- too many dimensions
+ SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ ERROR: wrong number of array subscripts
+ -- mismatched dimensions
+ select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ ERROR: mismatched array dimensions
+ select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ ERROR: mismatched array dimensions
+ -- null key error
+ select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ ERROR: null value not allowed for object key
+ -- empty key is allowed
+ select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+ jsonb_object
+ -------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+ (1 row)
+
-- extract_path, extract_path_as_text
SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
jsonb_extract_path
diff --git a/src/test/regress/expected/jsonb_1.out b/src/test/regress/expected/jsonb_1.out
new file mode 100644
index 83d61f8..a4af230
*** a/src/test/regress/expected/jsonb_1.out
--- b/src/test/regress/expected/jsonb_1.out
*************** SELECT array_to_json(ARRAY [jsonb '{"a":
*** 301,306 ****
--- 301,328 ----
[{"a": 1},{"b": [2, 3]}]
(1 row)
+ --jsonb_agg
+ CREATE TEMP TABLE rows AS
+ SELECT x, 'txt' || x as y
+ FROM generate_series(1,3) AS x;
+ SELECT jsonb_agg(q)
+ FROM ( SELECT $$a$$ || x AS b, y AS c,
+ ARRAY[ROW(x.*,ARRAY[1,2,3]),
+ ROW(y.*,ARRAY[4,5,6])] AS z
+ FROM generate_series(1,2) x,
+ generate_series(4,5) y) q;
+ jsonb_agg
+ --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+ (1 row)
+
+ SELECT jsonb_agg(q)
+ FROM rows q;
+ jsonb_agg
+ -----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+ (1 row)
+
-- jsonb extraction functions
CREATE TEMP TABLE test_jsonb (
json_type text,
*************** SELECT jsonb_typeof('"1.0"') AS string;
*** 1256,1261 ****
--- 1278,1397 ----
string
(1 row)
+ -- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+ SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+ jsonb_build_array
+ -------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+ (1 row)
+
+ SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+ jsonb_build_object
+ -------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+ (1 row)
+
+ SELECT jsonb_build_object(
+ 'a', jsonb_build_object('b',false,'c',99),
+ 'd', jsonb_build_object('e',array[9,8,7]::int[],
+ 'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+ jsonb_build_object
+ ------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+ (1 row)
+
+ -- empty objects/arrays
+ SELECT jsonb_build_array();
+ jsonb_build_array
+ -------------------
+ []
+ (1 row)
+
+ SELECT jsonb_build_object();
+ jsonb_build_object
+ --------------------
+ {}
+ (1 row)
+
+ -- make sure keys are quoted
+ SELECT jsonb_build_object(1,2);
+ jsonb_build_object
+ --------------------
+ {"1": 2}
+ (1 row)
+
+ -- keys must be scalar and not null
+ SELECT jsonb_build_object(null,2);
+ ERROR: arg 1: key cannot be null
+ SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ ERROR: key value must be scalar, not array, composite or json
+ SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ ERROR: key value must be scalar, not array, composite or json
+ SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ ERROR: key value must be scalar, not array, composite or json
+ CREATE TEMP TABLE foo (serial_num int, name text, type text);
+ INSERT INTO foo VALUES (847001,'t15','GE1043');
+ INSERT INTO foo VALUES (847002,'t16','GE1043');
+ INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+ SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+ FROM foo;
+ jsonb_build_object
+ -------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+ (1 row)
+
+ -- jsonb_object
+ -- one dimension
+ SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+ jsonb_object
+ ---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+ (1 row)
+
+ -- same but with two dimensions
+ SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ jsonb_object
+ ---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+ (1 row)
+
+ -- odd number error
+ SELECT jsonb_object('{a,b,c}');
+ ERROR: array must have even number of elements
+ -- one column error
+ SELECT jsonb_object('{{a},{b}}');
+ ERROR: array must have two columns
+ -- too many columns error
+ SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ ERROR: array must have two columns
+ -- too many dimensions error
+ SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ ERROR: wrong number of array subscripts
+ --two argument form of jsonb_object
+ select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+ jsonb_object
+ --------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+ (1 row)
+
+ -- too many dimensions
+ SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ ERROR: wrong number of array subscripts
+ -- mismatched dimensions
+ select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ ERROR: mismatched array dimensions
+ select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ ERROR: mismatched array dimensions
+ -- null key error
+ select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ ERROR: null value not allowed for object key
+ -- empty key is allowed
+ select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+ jsonb_object
+ -------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+ (1 row)
+
-- extract_path, extract_path_as_text
SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
jsonb_extract_path
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
new file mode 100644
index f1ed021..a808ed4
*** a/src/test/regress/sql/jsonb.sql
--- b/src/test/regress/sql/jsonb.sql
*************** SELECT ' '::jsonb; -- ERROR, no val
*** 62,67 ****
--- 62,83 ----
-- make sure jsonb is passed through json generators without being escaped
SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
+ --jsonb_agg
+
+ CREATE TEMP TABLE rows AS
+ SELECT x, 'txt' || x as y
+ FROM generate_series(1,3) AS x;
+
+ SELECT jsonb_agg(q)
+ FROM ( SELECT $$a$$ || x AS b, y AS c,
+ ARRAY[ROW(x.*,ARRAY[1,2,3]),
+ ROW(y.*,ARRAY[4,5,6])] AS z
+ FROM generate_series(1,2) x,
+ generate_series(4,5) y) q;
+
+ SELECT jsonb_agg(q)
+ FROM rows q;
+
-- jsonb extraction functions
CREATE TEMP TABLE test_jsonb (
json_type text,
*************** SELECT jsonb_typeof('"hello"') AS string
*** 263,268 ****
--- 279,364 ----
SELECT jsonb_typeof('"true"') AS string;
SELECT jsonb_typeof('"1.0"') AS string;
+ -- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+
+ SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+ SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+ SELECT jsonb_build_object(
+ 'a', jsonb_build_object('b',false,'c',99),
+ 'd', jsonb_build_object('e',array[9,8,7]::int[],
+ 'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+
+
+ -- empty objects/arrays
+ SELECT jsonb_build_array();
+
+ SELECT jsonb_build_object();
+
+ -- make sure keys are quoted
+ SELECT jsonb_build_object(1,2);
+
+ -- keys must be scalar and not null
+ SELECT jsonb_build_object(null,2);
+
+ SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+
+ SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+
+ SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+
+ CREATE TEMP TABLE foo (serial_num int, name text, type text);
+ INSERT INTO foo VALUES (847001,'t15','GE1043');
+ INSERT INTO foo VALUES (847002,'t16','GE1043');
+ INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+
+ SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+ FROM foo;
+
+ -- jsonb_object
+
+ -- one dimension
+ SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+
+ -- same but with two dimensions
+ SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+ -- odd number error
+ SELECT jsonb_object('{a,b,c}');
+
+ -- one column error
+ SELECT jsonb_object('{{a},{b}}');
+
+ -- too many columns error
+ SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+
+ -- too many dimensions error
+ SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+
+ --two argument form of jsonb_object
+
+ select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+
+ -- too many dimensions
+ SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+ -- mismatched dimensions
+
+ select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+
+ select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+
+ -- null key error
+
+ select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+
+ -- empty key is allowed
+
+ select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+
+
+
-- extract_path, extract_path_as_text
SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
On 10/13/2014 10:39 AM, Pavel Stehule wrote:
Hi
A JsonSemAction sem is not well initialized
a array_element_start is not initialized and enforces sigfault on my comp
*** ./utils/adt/jsonb.c.orig 2014-10-13 16:37:00.479708142 +0200 --- ./utils/adt/jsonb.c 2014-10-13 16:36:33.704650644 +0200 *************** *** 786,791 **** --- 786,793 ---- sem.scalar = jsonb_in_scalar; sem.object_field_start = jsonb_in_object_field_start;+ sem.array_element_start = NULL; + pg_parse_json(lex, &sem);}
I am not sure, if this fix is valid, but all tests are passed now
Good find. I think what we should probably do is initialize the whole
thing with:
memset(&sem, 0, sizeof(JsonSemAction));
before assigning anything to its fields. That would be consistent with
what we do elsewhere.
I'll make that change and submit a new patch.
Please stop using this thread, however. It's inappropriate for reviewing
this patch.
cheers
andrew
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers