jsonb generator functions

Started by Andrew Dunstanover 11 years ago22 messages
#1Andrew Dunstan
andrew@dunslane.net
1 attachment(s)

Here is a patch for the generator and aggregate functions for jsonb that
we didn't manage to get done in time for 9.4. They are all equivalents
of the similarly names json functions. Included are

to_jsonb
jsonb_build_object
jsonb_build_array
jsonb_object
jsonb_agg
jsonb_object_agg

Still to come: documentation.

Adding to the next commitfest.

cheers

andrew

Attachments:

jsonbmissingfuncs2.patchtext/x-patch; name=jsonbmissingfuncs2.patchDownload
diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c
index 2fd87fc..6c23b75 100644
--- a/src/backend/utils/adt/jsonb.c
+++ b/src/backend/utils/adt/jsonb.c
@@ -12,11 +12,20 @@
  */
 #include "postgres.h"
 
+#include "miscadmin.h"
+#include "access/htup_details.h"
+#include "access/transam.h"
+#include "catalog/pg_cast.h"
+#include "catalog/pg_type.h"
 #include "libpq/pqformat.h"
 #include "utils/builtins.h"
+#include "utils/datetime.h"
+#include "utils/lsyscache.h"
 #include "utils/json.h"
 #include "utils/jsonapi.h"
 #include "utils/jsonb.h"
+#include "utils/syscache.h"
+#include "utils/typcache.h"
 
 typedef struct JsonbInState
 {
@@ -24,6 +33,23 @@ typedef struct JsonbInState
 	JsonbValue *res;
 } JsonbInState;
 
+/* unlike with json categories, we need to treat json and jsonb differently */
+typedef enum					/* type categories for datum_to_jsonb */
+{
+	JSONBTYPE_NULL,				/* null, so we didn't bother to identify */
+	JSONBTYPE_BOOL,				/* boolean (built-in types only) */
+	JSONBTYPE_NUMERIC,			/* numeric (ditto) */
+	JSONBTYPE_TIMESTAMP,		/* we use special formatting for timestamp */
+	JSONBTYPE_TIMESTAMPTZ,		/* ... and timestamptz */
+	JSONBTYPE_JSON,				/* JSON */
+	JSONBTYPE_JSONB,			/* JSONB */
+	JSONBTYPE_ARRAY,			/* array */
+	JSONBTYPE_COMPOSITE,		/* composite */
+	JSONBTYPE_JSONCAST,			/* something with an explicit cast to JSON */
+	JSONBTYPE_JSONBCAST,		/* something with an explicit cast to JSONB */
+	JSONBTYPE_OTHER				/* all else */
+}	JsonbTypeCategory;
+
 static inline Datum jsonb_from_cstring(char *json, int len);
 static size_t checkStringLen(size_t len);
 static void jsonb_in_object_start(void *pstate);
@@ -33,6 +59,22 @@ static void jsonb_in_array_end(void *pstate);
 static void jsonb_in_object_field_start(void *pstate, char *fname, bool isnull);
 static void jsonb_put_escaped_value(StringInfo out, JsonbValue *scalarVal);
 static void jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype);
+static void jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid);
+static void composite_to_jsonb(Datum composite, JsonbInState *result);
+static void array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims,
+				   Datum *vals, bool *nulls, int *valcount,
+				   JsonbTypeCategory tcategory, Oid outfuncoid);
+static void array_to_jsonb_internal(Datum array, JsonbInState *result);
+static void jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid);
+static void datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+			   JsonbTypeCategory tcategory, Oid outfuncoid,
+			   bool key_scalar);
+static void add_jsonb(Datum val, bool is_null, JsonbInState *result,
+		  Oid val_type, bool key_scalar);
 
 /*
  * jsonb type input function
@@ -462,3 +504,1278 @@ JsonbToCString(StringInfo out, JsonbContainer *in, int estimated_len)
 
 	return out->data;
 }
+
+
+/*
+ * Determine how we want to render values of a given type in datum_to_jsonb.
+ *
+ * Given the datatype OID, return its JsonbTypeCategory, as well as the type's
+ * output function OID.  If the returned category is JSONBTYPE_CAST, we
+ * return the OID of the type->JSON cast function instead.
+ */
+static void
+jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid)
+{
+	bool		typisvarlena;
+
+	/* Look through any domain */
+	typoid = getBaseType(typoid);
+
+	/* We'll usually need to return the type output function */
+	getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+
+	/* Check for known types */
+	switch (typoid)
+	{
+		case BOOLOID:
+			*tcategory = JSONBTYPE_BOOL;
+			break;
+
+		case INT2OID:
+		case INT4OID:
+		case INT8OID:
+		case FLOAT4OID:
+		case FLOAT8OID:
+		case NUMERICOID:
+			*tcategory = JSONBTYPE_NUMERIC;
+			break;
+
+		case TIMESTAMPOID:
+			*tcategory = JSONBTYPE_TIMESTAMP;
+			break;
+
+		case TIMESTAMPTZOID:
+			*tcategory = JSONBTYPE_TIMESTAMPTZ;
+			break;
+
+		case JSONBOID:
+			*tcategory = JSONBTYPE_JSONB;
+			break;
+
+		case JSONOID:
+			*tcategory = JSONBTYPE_JSON;
+			break;
+
+		default:
+			/* Check for arrays and composites */
+			if (OidIsValid(get_element_type(typoid)))
+				*tcategory = JSONBTYPE_ARRAY;
+			else if (type_is_rowtype(typoid))
+				*tcategory = JSONBTYPE_COMPOSITE;
+			else
+			{
+				/* It's probably the general case ... */
+				*tcategory = JSONBTYPE_OTHER;
+
+				/*
+				 * but let's look for a cast to json or jsonb, if it's not
+				 * built-in
+				 */
+				if (typoid >= FirstNormalObjectId)
+				{
+					HeapTuple	tuple;
+
+					tuple = SearchSysCache2(CASTSOURCETARGET,
+											ObjectIdGetDatum(typoid),
+											ObjectIdGetDatum(JSONBOID));
+					if (HeapTupleIsValid(tuple))
+					{
+						Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
+
+						if (castForm->castmethod == COERCION_METHOD_FUNCTION)
+						{
+							*tcategory = JSONBTYPE_JSONBCAST;
+							*outfuncoid = castForm->castfunc;
+						}
+
+						ReleaseSysCache(tuple);
+					}
+					else
+					{
+						tuple = SearchSysCache2(CASTSOURCETARGET,
+												ObjectIdGetDatum(typoid),
+												ObjectIdGetDatum(JSONOID));
+						if (HeapTupleIsValid(tuple))
+						{
+							Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
+
+							if (castForm->castmethod == COERCION_METHOD_FUNCTION)
+							{
+								*tcategory = JSONBTYPE_JSONCAST;
+								*outfuncoid = castForm->castfunc;
+							}
+
+							ReleaseSysCache(tuple);
+						}
+					}
+				}
+				break;
+			}
+	}
+}
+
+/*
+ * Turn a Datum into jsonb, adding it to the result JsonbInState.
+ *
+ * tcategory and outfuncoid are from a previous call to json_categorize_type,
+ * except that if is_null is true then they can be invalid.
+ *
+ * If key_scalar is true, the value is stores as a key, so insist
+ * it's of an acceptable type, and force it to be a jbvString.
+ */
+static void
+datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+			   JsonbTypeCategory tcategory, Oid outfuncoid,
+			   bool key_scalar)
+{
+	char	   *outputstr;
+	bool		numeric_error;
+	JsonbValue	jb;
+	bool		scalar_jsonb = false;
+
+	if (is_null)
+	{
+		jb.type = jbvNull;
+	}
+	else if (key_scalar &&
+			 (tcategory == JSONBTYPE_ARRAY ||
+			  tcategory == JSONBTYPE_COMPOSITE ||
+			  tcategory == JSONBTYPE_JSON ||
+			  tcategory == JSONBTYPE_JSONB ||
+			  tcategory == JSONBTYPE_JSONCAST ||
+			  tcategory == JSONBTYPE_JSONBCAST))
+	{
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+		  errmsg("key value must be scalar, not array, composite or json")));
+	}
+	else
+	{
+		if (tcategory == JSONBTYPE_JSONCAST || tcategory == JSONBTYPE_JSONBCAST)
+			val = OidFunctionCall1(outfuncoid, val);
+
+		switch (tcategory)
+		{
+			case JSONBTYPE_ARRAY:
+				array_to_jsonb_internal(val, result);
+				break;
+			case JSONBTYPE_COMPOSITE:
+				composite_to_jsonb(val, result);
+				break;
+			case JSONBTYPE_BOOL:
+				if (key_scalar)
+				{
+					outputstr = DatumGetBool(val) ? "true" : "false";
+					jb.type = jbvString;
+					jb.val.string.len = strlen(outputstr);
+					jb.val.string.val = outputstr;
+				}
+				else
+				{
+					jb.type = jbvBool;
+					jb.val.boolean = DatumGetBool(val);
+				}
+				break;
+			case JSONBTYPE_NUMERIC:
+				outputstr = OidOutputFunctionCall(outfuncoid, val);
+				if (key_scalar)
+				{
+					/* always quote keys */
+					jb.type = jbvString;
+					jb.val.string.len = strlen(outputstr);
+					jb.val.string.val = outputstr;
+				}
+				else
+				{
+					/*
+					 * Make it numeric if it's a valid JSON number, otherwise
+					 * a string. Invalid numeric output will always have an
+					 * 'N' or 'n' in it (I think).
+					 */
+					numeric_error = (strchr(outputstr, 'N') != NULL ||
+									 strchr(outputstr, 'n') != NULL);
+					if (!numeric_error)
+					{
+						jb.type = jbvNumeric;
+						jb.val.numeric = DatumGetNumeric(DirectFunctionCall3(numeric_in, CStringGetDatum(outputstr), 0, -1));
+
+						pfree(outputstr);
+					}
+					else
+					{
+						jb.type = jbvString;
+						jb.val.string.len = strlen(outputstr);
+						jb.val.string.val = outputstr;
+					}
+				}
+				break;
+			case JSONBTYPE_TIMESTAMP:
+				{
+					Timestamp	timestamp;
+					struct pg_tm tm;
+					fsec_t		fsec;
+					char		buf[MAXDATELEN + 1];
+
+					timestamp = DatumGetTimestamp(val);
+
+					/* XSD doesn't support infinite values */
+					if (TIMESTAMP_NOT_FINITE(timestamp))
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range"),
+								 errdetail("JSON does not support infinite timestamp values.")));
+					else if (timestamp2tm(timestamp, NULL, &tm, &fsec, NULL, NULL) == 0)
+						EncodeDateTime(&tm, fsec, false, 0, NULL, USE_XSD_DATES, buf);
+					else
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range")));
+
+					jb.type = jbvString;
+					jb.val.string.len = strlen(buf);
+					jb.val.string.val = pstrdup(buf);
+				}
+				break;
+			case JSONBTYPE_TIMESTAMPTZ:
+				{
+					TimestampTz timestamp;
+					struct pg_tm tm;
+					int			tz;
+					fsec_t		fsec;
+					const char *tzn = NULL;
+					char		buf[MAXDATELEN + 1];
+
+					timestamp = DatumGetTimestamp(val);
+
+					/* XSD doesn't support infinite values */
+					if (TIMESTAMP_NOT_FINITE(timestamp))
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range"),
+								 errdetail("JSON does not support infinite timestamp values.")));
+					else if (timestamp2tm(timestamp, &tz, &tm, &fsec, &tzn, NULL) == 0)
+						EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf);
+					else
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range")));
+
+					jb.type = jbvString;
+					jb.val.string.len = strlen(buf);
+					jb.val.string.val = pstrdup(buf);
+				}
+				break;
+			case JSONBTYPE_JSONCAST:
+			case JSONBTYPE_JSON:
+				{
+					/* parse the json right into the existing result object */
+					JsonLexContext *lex;
+					JsonSemAction sem;
+					text	   *json = DatumGetTextP(val);
+
+					lex = makeJsonLexContext(json, true);
+
+					sem.semstate = (void *) result;
+
+					sem.object_start = jsonb_in_object_start;
+					sem.array_start = jsonb_in_array_start;
+					sem.object_end = jsonb_in_object_end;
+					sem.array_end = jsonb_in_array_end;
+					sem.scalar = jsonb_in_scalar;
+					sem.object_field_start = jsonb_in_object_field_start;
+
+					pg_parse_json(lex, &sem);
+
+				}
+				break;
+			case JSONBTYPE_JSONBCAST:
+			case JSONBTYPE_JSONB:
+				{
+					Jsonb	   *jsonb = DatumGetJsonb(val);
+					int			type;
+					JsonbIterator *it;
+
+					it = JsonbIteratorInit(&jsonb->root);
+
+					if (JB_ROOT_IS_SCALAR(jsonb))
+					{
+						(void) JsonbIteratorNext(&it, &jb, true);
+						Assert(jb.type == jbvArray);
+						(void) JsonbIteratorNext(&it, &jb, true);
+						scalar_jsonb = true;
+					}
+					else
+					{
+						while ((type = JsonbIteratorNext(&it, &jb, false))
+							   != WJB_DONE)
+						{
+							if (type == WJB_END_ARRAY || type == WJB_END_OBJECT ||
+								type == WJB_BEGIN_ARRAY || type == WJB_BEGIN_OBJECT)
+								result->res = pushJsonbValue(&result->parseState,
+															 type, NULL);
+							else
+								result->res = pushJsonbValue(&result->parseState,
+															 type, &jb);
+						}
+					}
+				}
+				break;
+			default:
+				outputstr = OidOutputFunctionCall(outfuncoid, val);
+				jb.type = jbvString;
+				jb.val.string.len = checkStringLen(strlen(outputstr));
+				jb.val.string.val = outputstr;
+				break;
+		}
+	}
+	if (tcategory >= JSONBTYPE_JSON && tcategory <= JSONBTYPE_JSONBCAST &&
+		!scalar_jsonb)
+	{
+		/* work has been done recursively */
+		return;
+	}
+	else if (result->parseState == NULL)
+	{
+		/* single root scalar */
+		JsonbValue	va;
+
+		va.type = jbvArray;
+		va.val.array.rawScalar = true;
+		va.val.array.nElems = 1;
+
+		result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, &va);
+		result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+		result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+	}
+	else
+	{
+		JsonbValue *o = &result->parseState->contVal;
+
+		switch (o->type)
+		{
+			case jbvArray:
+				result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+				break;
+			case jbvObject:
+				result->res = pushJsonbValue(&result->parseState,
+											 key_scalar ? WJB_KEY : WJB_VALUE,
+											 &jb);
+				break;
+			default:
+				elog(ERROR, "unexpected parent of nested structure");
+		}
+	}
+}
+
+/*
+ * Process a single dimension of an array.
+ * If it's the innermost dimension, output the values, otherwise call
+ * ourselves recursively to process the next dimension.
+ */
+static void
+array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims, Datum *vals,
+				   bool *nulls, int *valcount, JsonbTypeCategory tcategory,
+				   Oid outfuncoid)
+{
+	int			i;
+
+	Assert(dim < ndims);
+
+	result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+
+	for (i = 1; i <= dims[dim]; i++)
+	{
+		if (dim + 1 == ndims)
+		{
+			datum_to_jsonb(vals[*valcount], nulls[*valcount], result, tcategory,
+						   outfuncoid, false);
+			(*valcount)++;
+		}
+		else
+		{
+			array_dim_to_jsonb(result, dim + 1, ndims, dims, vals, nulls,
+							   valcount, tcategory, outfuncoid);
+		}
+	}
+
+	result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+}
+
+/*
+ * Turn an array into JSON.
+ */
+static void
+array_to_jsonb_internal(Datum array, JsonbInState *result)
+{
+	ArrayType  *v = DatumGetArrayTypeP(array);
+	Oid			element_type = ARR_ELEMTYPE(v);
+	int		   *dim;
+	int			ndim;
+	int			nitems;
+	int			count = 0;
+	Datum	   *elements;
+	bool	   *nulls;
+	int16		typlen;
+	bool		typbyval;
+	char		typalign;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	ndim = ARR_NDIM(v);
+	dim = ARR_DIMS(v);
+	nitems = ArrayGetNItems(ndim, dim);
+
+	if (nitems <= 0)
+	{
+		result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+		result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+		return;
+	}
+
+	get_typlenbyvalalign(element_type,
+						 &typlen, &typbyval, &typalign);
+
+	jsonb_categorize_type(element_type,
+						  &tcategory, &outfuncoid);
+
+	deconstruct_array(v, element_type, typlen, typbyval,
+					  typalign, &elements, &nulls,
+					  &nitems);
+
+	array_dim_to_jsonb(result, 0, ndim, dim, elements, nulls, &count, tcategory,
+					   outfuncoid);
+
+	pfree(elements);
+	pfree(nulls);
+}
+
+/*
+ * Turn a composite / record into JSON.
+ */
+static void
+composite_to_jsonb(Datum composite, JsonbInState *result)
+{
+	HeapTupleHeader td;
+	Oid			tupType;
+	int32		tupTypmod;
+	TupleDesc	tupdesc;
+	HeapTupleData tmptup,
+			   *tuple;
+	int			i;
+
+	td = DatumGetHeapTupleHeader(composite);
+
+	/* Extract rowtype info and find a tupdesc */
+	tupType = HeapTupleHeaderGetTypeId(td);
+	tupTypmod = HeapTupleHeaderGetTypMod(td);
+	tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+
+	/* Build a temporary HeapTuple control structure */
+	tmptup.t_len = HeapTupleHeaderGetDatumLength(td);
+	tmptup.t_data = td;
+	tuple = &tmptup;
+
+	result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_OBJECT, NULL);
+
+	for (i = 0; i < tupdesc->natts; i++)
+	{
+		Datum		val;
+		bool		isnull;
+		char	   *attname;
+		JsonbTypeCategory tcategory;
+		Oid			outfuncoid;
+		JsonbValue	v;
+
+		if (tupdesc->attrs[i]->attisdropped)
+			continue;
+
+		attname = NameStr(tupdesc->attrs[i]->attname);
+
+		v.type = jbvString;
+		/* don't need checkStringLen here - can't exceed maximum name length */
+		v.val.string.len = strlen(attname);
+		v.val.string.val = attname;
+
+		result->res = pushJsonbValue(&result->parseState, WJB_KEY, &v);
+
+		val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+		if (isnull)
+		{
+			tcategory = JSONBTYPE_NULL;
+			outfuncoid = InvalidOid;
+		}
+		else
+			jsonb_categorize_type(tupdesc->attrs[i]->atttypid,
+								  &tcategory, &outfuncoid);
+
+		datum_to_jsonb(val, isnull, result, tcategory, outfuncoid, false);
+	}
+
+	result->res = pushJsonbValue(&result->parseState, WJB_END_OBJECT, NULL);
+	ReleaseTupleDesc(tupdesc);
+}
+
+/*
+ * Append JSON text for "val" to "result".
+ *
+ * This is just a thin wrapper around datum_to_json.  If the same type will be
+ * printed many times, avoid using this; better to do the json_categorize_type
+ * lookups only once.
+ */
+
+static void
+add_jsonb(Datum val, bool is_null, JsonbInState *result,
+		  Oid val_type, bool key_scalar)
+{
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	if (is_null)
+	{
+		tcategory = JSONBTYPE_NULL;
+		outfuncoid = InvalidOid;
+	}
+	else
+		jsonb_categorize_type(val_type,
+							  &tcategory, &outfuncoid);
+
+	datum_to_jsonb(val, is_null, result, tcategory, outfuncoid, key_scalar);
+}
+
+/*
+ * SQL function to_jsonb(anyvalue)
+ */
+Datum
+to_jsonb(PG_FUNCTION_ARGS)
+{
+	Datum		val = PG_GETARG_DATUM(0);
+	Oid			val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
+	JsonbInState result;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &result, tcategory, outfuncoid, false);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_object(variadic "any")
+ */
+Datum
+jsonb_build_object(PG_FUNCTION_ARGS)
+{
+	int			nargs = PG_NARGS();
+	int			i;
+	Datum		arg;
+	Oid			val_type;
+	JsonbInState result;
+
+	if (nargs % 2 != 0)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("invalid number or arguments: object must be matched key value pairs")));
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	for (i = 0; i < nargs; i += 2)
+	{
+
+		/* process key */
+
+		if (PG_ARGISNULL(i))
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: key cannot be null", i + 1)));
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+
+		/*
+		 * turn a constant (more or less literal) value that's of unknown type
+		 * into text. Unknowns come in as a cstring pointer.
+		 */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 1)));
+
+		add_jsonb(arg, false, &result, val_type, true);
+
+		/* process value */
+
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i + 1);
+		/* see comments above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i + 1))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i + 1))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i + 1));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i + 1);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 2)));
+		add_jsonb(arg, PG_ARGISNULL(i + 1), &result, val_type, false);
+
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_object where it gets 0 arguments.
+ */
+Datum
+jsonb_build_object_noargs(PG_FUNCTION_ARGS)
+{
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_array(variadic "any")
+ */
+Datum
+jsonb_build_array(PG_FUNCTION_ARGS)
+{
+	int			nargs = PG_NARGS();
+	int			i;
+	Datum		arg;
+	Oid			val_type;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+
+	for (i = 0; i < nargs; i++)
+	{
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+		arg = PG_GETARG_DATUM(i + 1);
+		/* see comments in jsonb_build_object above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 1)));
+		add_jsonb(arg, PG_ARGISNULL(i), &result, val_type, false);
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_array where it gets 0 arguments.
+ */
+Datum
+jsonb_build_array_noargs(PG_FUNCTION_ARGS)
+{
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+	result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * SQL function jsonb_object(text[])
+ *
+ * take a one or two dimensional array of text as name value pairs
+ * for a json object.
+ *
+ */
+Datum
+jsonb_object(PG_FUNCTION_ARGS)
+{
+	ArrayType  *in_array = PG_GETARG_ARRAYTYPE_P(0);
+	int			ndims = ARR_NDIM(in_array);
+	Datum	   *in_datums;
+	bool	   *in_nulls;
+	int			in_count,
+				count,
+				i;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	switch (ndims)
+	{
+		case 0:
+			goto close_object;
+			break;
+
+		case 1:
+			if ((ARR_DIMS(in_array)[0]) % 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have even number of elements")));
+			break;
+
+		case 2:
+			if ((ARR_DIMS(in_array)[1]) != 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have two columns")));
+			break;
+
+		default:
+			ereport(ERROR,
+					(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+					 errmsg("wrong number of array subscripts")));
+	}
+
+	deconstruct_array(in_array,
+					  TEXTOID, -1, false, 'i',
+					  &in_datums, &in_nulls, &in_count);
+
+	count = in_count / 2;
+
+	for (i = 0; i < count; ++i)
+	{
+		JsonbValue	v;
+		char	   *str;
+		int			len;
+
+		if (in_nulls[i * 2])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		str = TextDatumGetCString(in_datums[i * 2]);
+		len = strlen(str);
+
+		v.type = jbvString;
+
+		v.val.string.len = len;
+		v.val.string.val = str;
+
+		result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+		if (in_nulls[i * 2 + 1])
+		{
+			v.type = jbvNull;
+		}
+		else
+		{
+			str = TextDatumGetCString(in_datums[i * 2 + 1]);
+			len = strlen(str);
+
+			v.type = jbvString;
+
+			v.val.string.len = len;
+			v.val.string.val = str;
+		}
+
+		result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+	}
+
+	pfree(in_datums);
+	pfree(in_nulls);
+
+close_object:
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_object(text[], text[])
+ *
+ * take separate name and value arrays of text to construct a json object
+ * pairwise.
+ */
+Datum
+jsonb_object_two_arg(PG_FUNCTION_ARGS)
+{
+	ArrayType  *key_array = PG_GETARG_ARRAYTYPE_P(0);
+	ArrayType  *val_array = PG_GETARG_ARRAYTYPE_P(1);
+	int			nkdims = ARR_NDIM(key_array);
+	int			nvdims = ARR_NDIM(val_array);
+	Datum	   *key_datums,
+			   *val_datums;
+	bool	   *key_nulls,
+			   *val_nulls;
+	int			key_count,
+				val_count,
+				i;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	if (nkdims > 1 || nkdims != nvdims)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("wrong number of array subscripts")));
+
+	if (nkdims == 0)
+		PG_RETURN_DATUM(CStringGetTextDatum("{}"));
+
+	deconstruct_array(key_array,
+					  TEXTOID, -1, false, 'i',
+					  &key_datums, &key_nulls, &key_count);
+
+	deconstruct_array(val_array,
+					  TEXTOID, -1, false, 'i',
+					  &val_datums, &val_nulls, &val_count);
+
+	if (key_count != val_count)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("mismatched array dimensions")));
+
+	for (i = 0; i < key_count; ++i)
+	{
+		JsonbValue	v;
+		char	   *str;
+		int			len;
+
+		if (key_nulls[i])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		str = TextDatumGetCString(key_datums[i]);
+		len = strlen(str);
+
+		v.type = jbvString;
+
+		v.val.string.len = len;
+		v.val.string.val = str;
+
+		result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+		if (val_nulls[i])
+		{
+			v.type = jbvNull;
+		}
+		else
+		{
+			str = TextDatumGetCString(val_datums[i]);
+			len = strlen(str);
+
+			v.type = jbvString;
+
+			v.val.string.len = len;
+			v.val.string.val = str;
+		}
+
+		result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	pfree(key_datums);
+	pfree(key_nulls);
+	pfree(val_datums);
+	pfree(val_nulls);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * jsonb_agg aggregate function
+ */
+Datum
+jsonb_agg_transfn(PG_FUNCTION_ARGS)
+{
+	Oid			val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+	MemoryContext oldcontext,
+				aggcontext;
+	JsonbInState elem;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+	Datum		val;
+	JsonbInState *result;
+	bool		single_scalar = false;
+	JsonbIterator *it;
+	Jsonb	   *jbelem;
+	JsonbValue	v;
+	int			type;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	if (!AggCheckCallContext(fcinfo, &aggcontext))
+	{
+		/* cannot be called directly because of internal-type argument */
+		elog(ERROR, "jsonb_agg_transfn called in non-aggregate context");
+	}
+
+	/* turn the argument into jsonb in the normal function context */
+
+	val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+	jbelem = JsonbValueToJsonb(elem.res);
+
+	/* switch to the aggregate context for accumulation operations */
+
+	oldcontext = MemoryContextSwitchTo(aggcontext);
+
+	/* set up the accumulator on the first go round */
+
+	if (PG_ARGISNULL(0))
+	{
+		result = palloc0(sizeof(JsonbInState));
+		result->res = pushJsonbValue(&result->parseState,
+									 WJB_BEGIN_ARRAY, NULL);
+
+	}
+	else
+	{
+		result = (JsonbInState *) PG_GETARG_POINTER(0);
+	}
+
+	it = JsonbIteratorInit(&jbelem->root);
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (v.val.array.rawScalar)
+					single_scalar = true;
+				else
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_END_ARRAY:
+				if (!single_scalar)
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_BEGIN_OBJECT:
+			case WJB_END_OBJECT:
+				result->res = pushJsonbValue(&result->parseState,
+											 type, NULL);
+				break;
+			case WJB_ELEM:
+			case WJB_KEY:
+			case WJB_VALUE:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else if (v.type == jbvNumeric)
+				{
+					/* same for numeric */
+					v.val.numeric = DirectFunctionCall1(numeric_uplus, NumericGetDatum(v.val.numeric));
+
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 type, &v);
+				break;
+		}
+	}
+
+	MemoryContextSwitchTo(oldcontext);
+
+	PG_RETURN_POINTER(result);
+}
+
+Datum
+jsonb_agg_finalfn(PG_FUNCTION_ARGS)
+{
+	JsonbInState *result;
+	Jsonb	   *out;
+
+	/* cannot be called directly because of internal-type argument */
+	Assert(AggCheckCallContext(fcinfo, NULL));
+
+	if (PG_ARGISNULL(0))
+		PG_RETURN_NULL();		/* returns null iff no input values */
+
+	result = (JsonbInState *) PG_GETARG_POINTER(0);
+
+	result->res = pushJsonbValue(&result->parseState,
+								 WJB_END_ARRAY, NULL);
+
+
+	out = JsonbValueToJsonb(result->res);
+
+	PG_RETURN_POINTER(out);
+}
+
+/*
+ * jsonb_object_agg aggregate function
+ */
+Datum
+jsonb_object_agg_transfn(PG_FUNCTION_ARGS)
+{
+	Oid			val_type;
+	MemoryContext oldcontext,
+				aggcontext;
+	JsonbInState elem;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+	Datum		val;
+	JsonbInState *result;
+	bool		single_scalar;
+	JsonbIterator *it;
+	Jsonb	   *jbkey,
+			   *jbval;
+	JsonbValue	v;
+	int			type;
+
+	if (!AggCheckCallContext(fcinfo, &aggcontext))
+	{
+		/* cannot be called directly because of internal-type argument */
+		elog(ERROR, "jsonb_object_agg_transfn called in non-aggregate context");
+	}
+
+	/* turn the argument into jsonb in the normal function context */
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, true);
+
+	jbkey = JsonbValueToJsonb(elem.res);
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	val = PG_ARGISNULL(2) ? (Datum) 0 : PG_GETARG_DATUM(2);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+	jbval = JsonbValueToJsonb(elem.res);
+
+	/* switch to the aggregate context for accumulation operations */
+
+	oldcontext = MemoryContextSwitchTo(aggcontext);
+
+	/* set up the accumulator on the first go round */
+
+	if (PG_ARGISNULL(0))
+	{
+		result = palloc0(sizeof(JsonbInState));
+		result->res = pushJsonbValue(&result->parseState,
+									 WJB_BEGIN_OBJECT, NULL);
+
+	}
+	else
+	{
+		result = (JsonbInState *) PG_GETARG_POINTER(0);
+	}
+
+	it = JsonbIteratorInit(&jbkey->root);
+
+	/*
+	 * keys should be scalar, and we should have already checked for that
+	 * above when calling datum_to_jsonb, so we only need to look for these
+	 * things.
+	 */
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (!v.val.array.rawScalar)
+					elog(ERROR, "unexpected structure for key");
+				break;
+			case WJB_ELEM:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else
+				{
+					ereport(ERROR,
+							(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+							 errmsg("object keys must be strings")));
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 WJB_KEY, &v);
+				break;
+			case WJB_END_ARRAY:
+				break;
+			default:
+				elog(ERROR, "unexpected structure for key");
+				break;
+		}
+	}
+
+	it = JsonbIteratorInit(&jbval->root);
+
+	single_scalar = false;
+
+	/*
+	 * values can be anything, including structured and null, so we treate
+	 * them as in json_agg_transfn, except that single scalars are always
+	 * pushed as WJB_VALUE items.
+	 */
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (v.val.array.rawScalar)
+					single_scalar = true;
+				else
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_END_ARRAY:
+				if (!single_scalar)
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_BEGIN_OBJECT:
+			case WJB_END_OBJECT:
+				result->res = pushJsonbValue(&result->parseState,
+											 type, NULL);
+				break;
+			case WJB_ELEM:
+			case WJB_KEY:
+			case WJB_VALUE:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else if (v.type == jbvNumeric)
+				{
+					/* same for numeric */
+					v.val.numeric = DirectFunctionCall1(numeric_uplus, NumericGetDatum(v.val.numeric));
+
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 single_scalar ? WJB_VALUE : type,
+											 &v);
+				break;
+		}
+	}
+
+	MemoryContextSwitchTo(oldcontext);
+
+	PG_RETURN_POINTER(result);
+}
+
+Datum
+jsonb_object_agg_finalfn(PG_FUNCTION_ARGS)
+{
+	JsonbInState *result;
+	Jsonb	   *out;
+
+	/* cannot be called directly because of internal-type argument */
+	Assert(AggCheckCallContext(fcinfo, NULL));
+
+	if (PG_ARGISNULL(0))
+		PG_RETURN_NULL();		/* returns null iff no input values */
+
+	result = (JsonbInState *) PG_GETARG_POINTER(0);
+
+	result->res = pushJsonbValue(&result->parseState,
+								 WJB_END_OBJECT, NULL);
+
+
+	out = JsonbValueToJsonb(result->res);
+
+	PG_RETURN_POINTER(out);
+}
diff --git a/src/backend/utils/adt/jsonb_util.c b/src/backend/utils/adt/jsonb_util.c
index 04f35bf..d546fd1 100644
--- a/src/backend/utils/adt/jsonb_util.c
+++ b/src/backend/utils/adt/jsonb_util.c
@@ -1328,7 +1328,7 @@ convertJsonbValue(StringInfo buffer, JEntry *header, JsonbValue *val, int level)
 	else if (val->type == jbvObject)
 		convertJsonbObject(buffer, header, val, level);
 	else
-		elog(ERROR, "unknown type of jsonb container");
+		elog(ERROR, "unknown type of jsonb container to convert");
 }
 
 static void
diff --git a/src/include/catalog/pg_aggregate.h b/src/include/catalog/pg_aggregate.h
index 3ba9e5e..8e0735b 100644
--- a/src/include/catalog/pg_aggregate.h
+++ b/src/include/catalog/pg_aggregate.h
@@ -286,6 +286,10 @@ DATA(insert ( 3545	n 0 bytea_string_agg_transfn	bytea_string_agg_finalfn	-				-
 DATA(insert ( 3175	n 0 json_agg_transfn	json_agg_finalfn			-				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
 DATA(insert ( 3197	n 0 json_object_agg_transfn json_object_agg_finalfn -				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
 
+/* jsonb */
+DATA(insert ( 3267	n 0 jsonb_agg_transfn	jsonb_agg_finalfn			-				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
+DATA(insert ( 3270	n 0 jsonb_object_agg_transfn jsonb_object_agg_finalfn -				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
+
 /* ordered-set and hypothetical-set aggregates */
 DATA(insert ( 3972	o 1 ordered_set_transition			percentile_disc_final					-		-		-		t f 0	2281	0	0		0	_null_ _null_ ));
 DATA(insert ( 3974	o 1 ordered_set_transition			percentile_cont_float8_final			-		-		-		f f 0	2281	0	0		0	_null_ _null_ ));
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index 3ce9849..d2c9032 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4595,6 +4595,33 @@ DESCR("I/O");
 DATA(insert OID =  3803 (  jsonb_send		PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 17 "3802" _null_ _null_ _null_ _null_	jsonb_send _null_ _null_ _null_ ));
 DESCR("I/O");
 
+DATA(insert OID = 3263 (  jsonb_object	 PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 3802 "1009" _null_ _null_ _null_ _null_ jsonb_object _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3264 (  jsonb_object	 PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 3802 "1009 1009" _null_ _null_ _null_ _null_ jsonb_object_two_arg _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3787 (  to_jsonb	   PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 3802 "2283" _null_ _null_ _null_ _null_ to_jsonb _null_ _null_ _null_ ));
+DESCR("map input to jsonb");
+DATA(insert OID = 3265 (  jsonb_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ jsonb_agg_transfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate transition function");
+DATA(insert OID = 3266 (  jsonb_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_agg_finalfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate final function");
+DATA(insert OID = 3267 (  jsonb_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f i 1 0 3802 "2283" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate input into jsonb");
+DATA(insert OID = 3268 (  jsonb_object_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 3 0 2281 "2281 2276 2276" _null_ _null_ _null_ _null_ jsonb_object_agg_transfn _null_ _null_ _null_ ));
+DESCR("jsonb object aggregate transition function");
+DATA(insert OID = 3269 (  jsonb_object_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_object_agg_finalfn _null_ _null_ _null_ ));
+DESCR("jsonb object aggregate final function");
+DATA(insert OID = 3270 (  jsonb_object_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f i 2 0 3802 "2276 2276" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate inputs into jsonb object");
+DATA(insert OID = 3259 (  jsonb_build_array	   PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_array _null_ _null_ _null_ ));
+DESCR("build a jsonb array from any inputs");
+DATA(insert OID = 3260 (  jsonb_build_array	   PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802  "" _null_ _null_ _null_ _null_ jsonb_build_array_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb array");
+DATA(insert OID = 3261 (  jsonb_build_object    PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_object _null_ _null_ _null_ ));
+DESCR("build a jsonb object from pairwise key/value inputs");
+DATA(insert OID = 3262 (  jsonb_build_object    PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802  "" _null_ _null_ _null_ _null_ jsonb_build_object_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb object");
+
 DATA(insert OID = 3478 (  jsonb_object_field			PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field _null_ _null_ _null_ ));
 DATA(insert OID = 3214 (  jsonb_object_field_text	PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 25  "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field_text _null_ _null_ _null_ ));
 DATA(insert OID = 3215 (  jsonb_array_element		PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 23" _null_ _null_ "{from_json, element_index}" _null_ jsonb_array_element _null_ _null_ _null_ ));
diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h
index 91e3e14..7fe12b8 100644
--- a/src/include/utils/jsonb.h
+++ b/src/include/utils/jsonb.h
@@ -315,6 +315,22 @@ extern Datum jsonb_recv(PG_FUNCTION_ARGS);
 extern Datum jsonb_send(PG_FUNCTION_ARGS);
 extern Datum jsonb_typeof(PG_FUNCTION_ARGS);
 
+/* generator routines */
+extern Datum to_jsonb(PG_FUNCTION_ARGS);
+
+extern Datum jsonb_build_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_object_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_two_arg(PG_FUNCTION_ARGS);
+
+/* jsonb_agg, json_object_agg functions */
+extern Datum jsonb_agg_transfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_agg_finalfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_agg_transfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_agg_finalfn(PG_FUNCTION_ARGS);
+
 /* Indexing-related ops */
 extern Datum jsonb_exists(PG_FUNCTION_ARGS);
 extern Datum jsonb_exists_any(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
index eb37da7..d449757 100644
--- a/src/test/regress/expected/jsonb.out
+++ b/src/test/regress/expected/jsonb.out
@@ -301,6 +301,28 @@ SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
  [{"a": 1},{"b": [2, 3]}]
 (1 row)
 
+--jsonb_agg
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+                                                                                                                                                                    jsonb_agg                                                                                                                                                                     
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+(1 row)
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+                               jsonb_agg                               
+-----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+(1 row)
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -1220,6 +1242,120 @@ SELECT jsonb_typeof('"1.0"') AS string;
  string
 (1 row)
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                            jsonb_build_array                            
+-------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+(1 row)
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                           jsonb_build_object                            
+-------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+(1 row)
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+                                       jsonb_build_object                                       
+------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+(1 row)
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+ jsonb_build_array 
+-------------------
+ []
+(1 row)
+
+SELECT jsonb_build_object();
+ jsonb_build_object 
+--------------------
+ {}
+(1 row)
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+ jsonb_build_object 
+--------------------
+ {"1": 2}
+(1 row)
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+ERROR:  arg 1: key cannot be null
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ERROR:  key value must be scalar, not array, composite or json
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+                                                                     jsonb_build_object                                                                      
+-------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+(1 row)
+
+-- jsonb_object
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+ERROR:  array must have even number of elements
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+ERROR:  array must have two columns
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ERROR:  array must have two columns
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ERROR:  wrong number of array subscripts
+--two argument form of jsonb_object
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+                   jsonb_object                   
+--------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+(1 row)
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ERROR:  wrong number of array subscripts
+-- mismatched dimensions
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ERROR:  mismatched array dimensions
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ERROR:  mismatched array dimensions
+-- null key error
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ERROR:  null value not allowed for object key
+-- empty key is allowed
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+                  jsonb_object                   
+-------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
  jsonb_extract_path 
diff --git a/src/test/regress/expected/jsonb_1.out b/src/test/regress/expected/jsonb_1.out
index f3bfc7b..a64158c 100644
--- a/src/test/regress/expected/jsonb_1.out
+++ b/src/test/regress/expected/jsonb_1.out
@@ -301,6 +301,28 @@ SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
  [{"a": 1},{"b": [2, 3]}]
 (1 row)
 
+--jsonb_agg
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+                                                                                                                                                                    jsonb_agg                                                                                                                                                                     
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+(1 row)
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+                               jsonb_agg                               
+-----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+(1 row)
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -1220,6 +1242,120 @@ SELECT jsonb_typeof('"1.0"') AS string;
  string
 (1 row)
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                            jsonb_build_array                            
+-------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+(1 row)
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                           jsonb_build_object                            
+-------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+(1 row)
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+                                       jsonb_build_object                                       
+------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+(1 row)
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+ jsonb_build_array 
+-------------------
+ []
+(1 row)
+
+SELECT jsonb_build_object();
+ jsonb_build_object 
+--------------------
+ {}
+(1 row)
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+ jsonb_build_object 
+--------------------
+ {"1": 2}
+(1 row)
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+ERROR:  arg 1: key cannot be null
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ERROR:  key value must be scalar, not array, composite or json
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+                                                                     jsonb_build_object                                                                      
+-------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+(1 row)
+
+-- jsonb_object
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+ERROR:  array must have even number of elements
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+ERROR:  array must have two columns
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ERROR:  array must have two columns
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ERROR:  wrong number of array subscripts
+--two argument form of jsonb_object
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+                   jsonb_object                   
+--------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+(1 row)
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ERROR:  wrong number of array subscripts
+-- mismatched dimensions
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ERROR:  mismatched array dimensions
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ERROR:  mismatched array dimensions
+-- null key error
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ERROR:  null value not allowed for object key
+-- empty key is allowed
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+                  jsonb_object                   
+-------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
  jsonb_extract_path 
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
index ed266d5..cf79cbf 100644
--- a/src/test/regress/sql/jsonb.sql
+++ b/src/test/regress/sql/jsonb.sql
@@ -62,6 +62,22 @@ SELECT '    '::jsonb;			-- ERROR, no value
 -- make sure jsonb is passed through json generators without being escaped
 SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
 
+--jsonb_agg
+
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -256,6 +272,86 @@ SELECT jsonb_typeof('"hello"') AS string;
 SELECT jsonb_typeof('"true"') AS string;
 SELECT jsonb_typeof('"1.0"') AS string;
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+
+SELECT jsonb_build_object();
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+
+-- jsonb_object
+
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+
+--two argument form of jsonb_object
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- mismatched dimensions
+
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+
+-- null key error
+
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+
+-- empty key is allowed
+
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+
+
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
#2Peter Geoghegan
pg@heroku.com
In reply to: Andrew Dunstan (#1)
Re: jsonb generator functions

On Fri, Sep 26, 2014 at 1:54 PM, Andrew Dunstan <andrew@dunslane.net> wrote:

Here is a patch for the generator and aggregate functions for jsonb that we
didn't manage to get done in time for 9.4.

That's cool, but I hope someone revisits adding a concatenate
operator. That's a biggest omission IMHO. I'm not going to have time
for that.

--
Peter Geoghegan

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

#3Andrew Dunstan
andrew@dunslane.net
In reply to: Peter Geoghegan (#2)
Re: jsonb generator functions

On 09/26/2014 05:00 PM, Peter Geoghegan wrote:

On Fri, Sep 26, 2014 at 1:54 PM, Andrew Dunstan <andrew@dunslane.net> wrote:

Here is a patch for the generator and aggregate functions for jsonb that we
didn't manage to get done in time for 9.4.

That's cool, but I hope someone revisits adding a concatenate
operator. That's a biggest omission IMHO. I'm not going to have time
for that.

This patch is the work that I have publicly promised to do.

Dmitry Dolgov is in fact working on jsonb_concat(), and several other
utility functions.

cheers

andrew

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

#4Andrew Dunstan
andrew@dunslane.net
In reply to: Andrew Dunstan (#1)
1 attachment(s)
Re: jsonb generator functions

On 09/26/2014 04:54 PM, Andrew Dunstan wrote:

Here is a patch for the generator and aggregate functions for jsonb
that we didn't manage to get done in time for 9.4. They are all
equivalents of the similarly names json functions. Included are

to_jsonb
jsonb_build_object
jsonb_build_array
jsonb_object
jsonb_agg
jsonb_object_agg

Still to come: documentation.

Adding to the next commitfest.

Revised patch to fix compiler warnings.

cheers

andrew

Attachments:

jsonbmissingfuncs3.patchtext/x-patch; name=jsonbmissingfuncs3.patchDownload
diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c
index 2fd87fc..2712761 100644
--- a/src/backend/utils/adt/jsonb.c
+++ b/src/backend/utils/adt/jsonb.c
@@ -12,11 +12,20 @@
  */
 #include "postgres.h"
 
+#include "miscadmin.h"
+#include "access/htup_details.h"
+#include "access/transam.h"
+#include "catalog/pg_cast.h"
+#include "catalog/pg_type.h"
 #include "libpq/pqformat.h"
 #include "utils/builtins.h"
+#include "utils/datetime.h"
+#include "utils/lsyscache.h"
 #include "utils/json.h"
 #include "utils/jsonapi.h"
 #include "utils/jsonb.h"
+#include "utils/syscache.h"
+#include "utils/typcache.h"
 
 typedef struct JsonbInState
 {
@@ -24,6 +33,23 @@ typedef struct JsonbInState
 	JsonbValue *res;
 } JsonbInState;
 
+/* unlike with json categories, we need to treat json and jsonb differently */
+typedef enum					/* type categories for datum_to_jsonb */
+{
+	JSONBTYPE_NULL,				/* null, so we didn't bother to identify */
+	JSONBTYPE_BOOL,				/* boolean (built-in types only) */
+	JSONBTYPE_NUMERIC,			/* numeric (ditto) */
+	JSONBTYPE_TIMESTAMP,		/* we use special formatting for timestamp */
+	JSONBTYPE_TIMESTAMPTZ,		/* ... and timestamptz */
+	JSONBTYPE_JSON,				/* JSON */
+	JSONBTYPE_JSONB,			/* JSONB */
+	JSONBTYPE_ARRAY,			/* array */
+	JSONBTYPE_COMPOSITE,		/* composite */
+	JSONBTYPE_JSONCAST,			/* something with an explicit cast to JSON */
+	JSONBTYPE_JSONBCAST,		/* something with an explicit cast to JSONB */
+	JSONBTYPE_OTHER				/* all else */
+}	JsonbTypeCategory;
+
 static inline Datum jsonb_from_cstring(char *json, int len);
 static size_t checkStringLen(size_t len);
 static void jsonb_in_object_start(void *pstate);
@@ -33,6 +59,22 @@ static void jsonb_in_array_end(void *pstate);
 static void jsonb_in_object_field_start(void *pstate, char *fname, bool isnull);
 static void jsonb_put_escaped_value(StringInfo out, JsonbValue *scalarVal);
 static void jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype);
+static void jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid);
+static void composite_to_jsonb(Datum composite, JsonbInState *result);
+static void array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims,
+				   Datum *vals, bool *nulls, int *valcount,
+				   JsonbTypeCategory tcategory, Oid outfuncoid);
+static void array_to_jsonb_internal(Datum array, JsonbInState *result);
+static void jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid);
+static void datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+			   JsonbTypeCategory tcategory, Oid outfuncoid,
+			   bool key_scalar);
+static void add_jsonb(Datum val, bool is_null, JsonbInState *result,
+		  Oid val_type, bool key_scalar);
 
 /*
  * jsonb type input function
@@ -462,3 +504,1282 @@ JsonbToCString(StringInfo out, JsonbContainer *in, int estimated_len)
 
 	return out->data;
 }
+
+
+/*
+ * Determine how we want to render values of a given type in datum_to_jsonb.
+ *
+ * Given the datatype OID, return its JsonbTypeCategory, as well as the type's
+ * output function OID.  If the returned category is JSONBTYPE_CAST, we
+ * return the OID of the type->JSON cast function instead.
+ */
+static void
+jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid)
+{
+	bool		typisvarlena;
+
+	/* Look through any domain */
+	typoid = getBaseType(typoid);
+
+	/* We'll usually need to return the type output function */
+	getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+
+	/* Check for known types */
+	switch (typoid)
+	{
+		case BOOLOID:
+			*tcategory = JSONBTYPE_BOOL;
+			break;
+
+		case INT2OID:
+		case INT4OID:
+		case INT8OID:
+		case FLOAT4OID:
+		case FLOAT8OID:
+		case NUMERICOID:
+			*tcategory = JSONBTYPE_NUMERIC;
+			break;
+
+		case TIMESTAMPOID:
+			*tcategory = JSONBTYPE_TIMESTAMP;
+			break;
+
+		case TIMESTAMPTZOID:
+			*tcategory = JSONBTYPE_TIMESTAMPTZ;
+			break;
+
+		case JSONBOID:
+			*tcategory = JSONBTYPE_JSONB;
+			break;
+
+		case JSONOID:
+			*tcategory = JSONBTYPE_JSON;
+			break;
+
+		default:
+			/* Check for arrays and composites */
+			if (OidIsValid(get_element_type(typoid)))
+				*tcategory = JSONBTYPE_ARRAY;
+			else if (type_is_rowtype(typoid))
+				*tcategory = JSONBTYPE_COMPOSITE;
+			else
+			{
+				/* It's probably the general case ... */
+				*tcategory = JSONBTYPE_OTHER;
+
+				/*
+				 * but let's look for a cast to json or jsonb, if it's not
+				 * built-in
+				 */
+				if (typoid >= FirstNormalObjectId)
+				{
+					HeapTuple	tuple;
+
+					tuple = SearchSysCache2(CASTSOURCETARGET,
+											ObjectIdGetDatum(typoid),
+											ObjectIdGetDatum(JSONBOID));
+					if (HeapTupleIsValid(tuple))
+					{
+						Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
+
+						if (castForm->castmethod == COERCION_METHOD_FUNCTION)
+						{
+							*tcategory = JSONBTYPE_JSONBCAST;
+							*outfuncoid = castForm->castfunc;
+						}
+
+						ReleaseSysCache(tuple);
+					}
+					else
+					{
+						tuple = SearchSysCache2(CASTSOURCETARGET,
+												ObjectIdGetDatum(typoid),
+												ObjectIdGetDatum(JSONOID));
+						if (HeapTupleIsValid(tuple))
+						{
+							Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
+
+							if (castForm->castmethod == COERCION_METHOD_FUNCTION)
+							{
+								*tcategory = JSONBTYPE_JSONCAST;
+								*outfuncoid = castForm->castfunc;
+							}
+
+							ReleaseSysCache(tuple);
+						}
+					}
+				}
+				break;
+			}
+	}
+}
+
+/*
+ * Turn a Datum into jsonb, adding it to the result JsonbInState.
+ *
+ * tcategory and outfuncoid are from a previous call to json_categorize_type,
+ * except that if is_null is true then they can be invalid.
+ *
+ * If key_scalar is true, the value is stores as a key, so insist
+ * it's of an acceptable type, and force it to be a jbvString.
+ */
+static void
+datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+			   JsonbTypeCategory tcategory, Oid outfuncoid,
+			   bool key_scalar)
+{
+	char	   *outputstr;
+	bool		numeric_error;
+	JsonbValue	jb;
+	bool		scalar_jsonb = false;
+
+	if (is_null)
+	{
+		jb.type = jbvNull;
+	}
+	else if (key_scalar &&
+			 (tcategory == JSONBTYPE_ARRAY ||
+			  tcategory == JSONBTYPE_COMPOSITE ||
+			  tcategory == JSONBTYPE_JSON ||
+			  tcategory == JSONBTYPE_JSONB ||
+			  tcategory == JSONBTYPE_JSONCAST ||
+			  tcategory == JSONBTYPE_JSONBCAST))
+	{
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+		  errmsg("key value must be scalar, not array, composite or json")));
+	}
+	else
+	{
+		if (tcategory == JSONBTYPE_JSONCAST || tcategory == JSONBTYPE_JSONBCAST)
+			val = OidFunctionCall1(outfuncoid, val);
+
+		switch (tcategory)
+		{
+			case JSONBTYPE_ARRAY:
+				array_to_jsonb_internal(val, result);
+				break;
+			case JSONBTYPE_COMPOSITE:
+				composite_to_jsonb(val, result);
+				break;
+			case JSONBTYPE_BOOL:
+				if (key_scalar)
+				{
+					outputstr = DatumGetBool(val) ? "true" : "false";
+					jb.type = jbvString;
+					jb.val.string.len = strlen(outputstr);
+					jb.val.string.val = outputstr;
+				}
+				else
+				{
+					jb.type = jbvBool;
+					jb.val.boolean = DatumGetBool(val);
+				}
+				break;
+			case JSONBTYPE_NUMERIC:
+				outputstr = OidOutputFunctionCall(outfuncoid, val);
+				if (key_scalar)
+				{
+					/* always quote keys */
+					jb.type = jbvString;
+					jb.val.string.len = strlen(outputstr);
+					jb.val.string.val = outputstr;
+				}
+				else
+				{
+					/*
+					 * Make it numeric if it's a valid JSON number, otherwise
+					 * a string. Invalid numeric output will always have an
+					 * 'N' or 'n' in it (I think).
+					 */
+					numeric_error = (strchr(outputstr, 'N') != NULL ||
+									 strchr(outputstr, 'n') != NULL);
+					if (!numeric_error)
+					{
+						jb.type = jbvNumeric;
+						jb.val.numeric = DatumGetNumeric(DirectFunctionCall3(numeric_in, CStringGetDatum(outputstr), 0, -1));
+
+						pfree(outputstr);
+					}
+					else
+					{
+						jb.type = jbvString;
+						jb.val.string.len = strlen(outputstr);
+						jb.val.string.val = outputstr;
+					}
+				}
+				break;
+			case JSONBTYPE_TIMESTAMP:
+				{
+					Timestamp	timestamp;
+					struct pg_tm tm;
+					fsec_t		fsec;
+					char		buf[MAXDATELEN + 1];
+
+					timestamp = DatumGetTimestamp(val);
+
+					/* XSD doesn't support infinite values */
+					if (TIMESTAMP_NOT_FINITE(timestamp))
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range"),
+								 errdetail("JSON does not support infinite timestamp values.")));
+					else if (timestamp2tm(timestamp, NULL, &tm, &fsec, NULL, NULL) == 0)
+						EncodeDateTime(&tm, fsec, false, 0, NULL, USE_XSD_DATES, buf);
+					else
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range")));
+
+					jb.type = jbvString;
+					jb.val.string.len = strlen(buf);
+					jb.val.string.val = pstrdup(buf);
+				}
+				break;
+			case JSONBTYPE_TIMESTAMPTZ:
+				{
+					TimestampTz timestamp;
+					struct pg_tm tm;
+					int			tz;
+					fsec_t		fsec;
+					const char *tzn = NULL;
+					char		buf[MAXDATELEN + 1];
+
+					timestamp = DatumGetTimestamp(val);
+
+					/* XSD doesn't support infinite values */
+					if (TIMESTAMP_NOT_FINITE(timestamp))
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range"),
+								 errdetail("JSON does not support infinite timestamp values.")));
+					else if (timestamp2tm(timestamp, &tz, &tm, &fsec, &tzn, NULL) == 0)
+						EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf);
+					else
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range")));
+
+					jb.type = jbvString;
+					jb.val.string.len = strlen(buf);
+					jb.val.string.val = pstrdup(buf);
+				}
+				break;
+			case JSONBTYPE_JSONCAST:
+			case JSONBTYPE_JSON:
+				{
+					/* parse the json right into the existing result object */
+					JsonLexContext *lex;
+					JsonSemAction sem;
+					text	   *json = DatumGetTextP(val);
+
+					lex = makeJsonLexContext(json, true);
+
+					sem.semstate = (void *) result;
+
+					sem.object_start = jsonb_in_object_start;
+					sem.array_start = jsonb_in_array_start;
+					sem.object_end = jsonb_in_object_end;
+					sem.array_end = jsonb_in_array_end;
+					sem.scalar = jsonb_in_scalar;
+					sem.object_field_start = jsonb_in_object_field_start;
+
+					pg_parse_json(lex, &sem);
+
+				}
+				break;
+			case JSONBTYPE_JSONBCAST:
+			case JSONBTYPE_JSONB:
+				{
+					Jsonb	   *jsonb = DatumGetJsonb(val);
+					int			type;
+					JsonbIterator *it;
+
+					it = JsonbIteratorInit(&jsonb->root);
+
+					if (JB_ROOT_IS_SCALAR(jsonb))
+					{
+						(void) JsonbIteratorNext(&it, &jb, true);
+						Assert(jb.type == jbvArray);
+						(void) JsonbIteratorNext(&it, &jb, true);
+						scalar_jsonb = true;
+					}
+					else
+					{
+						while ((type = JsonbIteratorNext(&it, &jb, false))
+							   != WJB_DONE)
+						{
+							if (type == WJB_END_ARRAY || type == WJB_END_OBJECT ||
+								type == WJB_BEGIN_ARRAY || type == WJB_BEGIN_OBJECT)
+								result->res = pushJsonbValue(&result->parseState,
+															 type, NULL);
+							else
+								result->res = pushJsonbValue(&result->parseState,
+															 type, &jb);
+						}
+					}
+				}
+				break;
+			default:
+				outputstr = OidOutputFunctionCall(outfuncoid, val);
+				jb.type = jbvString;
+				jb.val.string.len = checkStringLen(strlen(outputstr));
+				jb.val.string.val = outputstr;
+				break;
+		}
+	}
+	if (tcategory >= JSONBTYPE_JSON && tcategory <= JSONBTYPE_JSONBCAST &&
+		!scalar_jsonb)
+	{
+		/* work has been done recursively */
+		return;
+	}
+	else if (result->parseState == NULL)
+	{
+		/* single root scalar */
+		JsonbValue	va;
+
+		va.type = jbvArray;
+		va.val.array.rawScalar = true;
+		va.val.array.nElems = 1;
+
+		result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, &va);
+		result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+		result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+	}
+	else
+	{
+		JsonbValue *o = &result->parseState->contVal;
+
+		switch (o->type)
+		{
+			case jbvArray:
+				result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+				break;
+			case jbvObject:
+				result->res = pushJsonbValue(&result->parseState,
+											 key_scalar ? WJB_KEY : WJB_VALUE,
+											 &jb);
+				break;
+			default:
+				elog(ERROR, "unexpected parent of nested structure");
+		}
+	}
+}
+
+/*
+ * Process a single dimension of an array.
+ * If it's the innermost dimension, output the values, otherwise call
+ * ourselves recursively to process the next dimension.
+ */
+static void
+array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims, Datum *vals,
+				   bool *nulls, int *valcount, JsonbTypeCategory tcategory,
+				   Oid outfuncoid)
+{
+	int			i;
+
+	Assert(dim < ndims);
+
+	result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+
+	for (i = 1; i <= dims[dim]; i++)
+	{
+		if (dim + 1 == ndims)
+		{
+			datum_to_jsonb(vals[*valcount], nulls[*valcount], result, tcategory,
+						   outfuncoid, false);
+			(*valcount)++;
+		}
+		else
+		{
+			array_dim_to_jsonb(result, dim + 1, ndims, dims, vals, nulls,
+							   valcount, tcategory, outfuncoid);
+		}
+	}
+
+	result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+}
+
+/*
+ * Turn an array into JSON.
+ */
+static void
+array_to_jsonb_internal(Datum array, JsonbInState *result)
+{
+	ArrayType  *v = DatumGetArrayTypeP(array);
+	Oid			element_type = ARR_ELEMTYPE(v);
+	int		   *dim;
+	int			ndim;
+	int			nitems;
+	int			count = 0;
+	Datum	   *elements;
+	bool	   *nulls;
+	int16		typlen;
+	bool		typbyval;
+	char		typalign;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	ndim = ARR_NDIM(v);
+	dim = ARR_DIMS(v);
+	nitems = ArrayGetNItems(ndim, dim);
+
+	if (nitems <= 0)
+	{
+		result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+		result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+		return;
+	}
+
+	get_typlenbyvalalign(element_type,
+						 &typlen, &typbyval, &typalign);
+
+	jsonb_categorize_type(element_type,
+						  &tcategory, &outfuncoid);
+
+	deconstruct_array(v, element_type, typlen, typbyval,
+					  typalign, &elements, &nulls,
+					  &nitems);
+
+	array_dim_to_jsonb(result, 0, ndim, dim, elements, nulls, &count, tcategory,
+					   outfuncoid);
+
+	pfree(elements);
+	pfree(nulls);
+}
+
+/*
+ * Turn a composite / record into JSON.
+ */
+static void
+composite_to_jsonb(Datum composite, JsonbInState *result)
+{
+	HeapTupleHeader td;
+	Oid			tupType;
+	int32		tupTypmod;
+	TupleDesc	tupdesc;
+	HeapTupleData tmptup,
+			   *tuple;
+	int			i;
+
+	td = DatumGetHeapTupleHeader(composite);
+
+	/* Extract rowtype info and find a tupdesc */
+	tupType = HeapTupleHeaderGetTypeId(td);
+	tupTypmod = HeapTupleHeaderGetTypMod(td);
+	tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+
+	/* Build a temporary HeapTuple control structure */
+	tmptup.t_len = HeapTupleHeaderGetDatumLength(td);
+	tmptup.t_data = td;
+	tuple = &tmptup;
+
+	result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_OBJECT, NULL);
+
+	for (i = 0; i < tupdesc->natts; i++)
+	{
+		Datum		val;
+		bool		isnull;
+		char	   *attname;
+		JsonbTypeCategory tcategory;
+		Oid			outfuncoid;
+		JsonbValue	v;
+
+		if (tupdesc->attrs[i]->attisdropped)
+			continue;
+
+		attname = NameStr(tupdesc->attrs[i]->attname);
+
+		v.type = jbvString;
+		/* don't need checkStringLen here - can't exceed maximum name length */
+		v.val.string.len = strlen(attname);
+		v.val.string.val = attname;
+
+		result->res = pushJsonbValue(&result->parseState, WJB_KEY, &v);
+
+		val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+		if (isnull)
+		{
+			tcategory = JSONBTYPE_NULL;
+			outfuncoid = InvalidOid;
+		}
+		else
+			jsonb_categorize_type(tupdesc->attrs[i]->atttypid,
+								  &tcategory, &outfuncoid);
+
+		datum_to_jsonb(val, isnull, result, tcategory, outfuncoid, false);
+	}
+
+	result->res = pushJsonbValue(&result->parseState, WJB_END_OBJECT, NULL);
+	ReleaseTupleDesc(tupdesc);
+}
+
+/*
+ * Append JSON text for "val" to "result".
+ *
+ * This is just a thin wrapper around datum_to_json.  If the same type will be
+ * printed many times, avoid using this; better to do the json_categorize_type
+ * lookups only once.
+ */
+
+static void
+add_jsonb(Datum val, bool is_null, JsonbInState *result,
+		  Oid val_type, bool key_scalar)
+{
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	if (is_null)
+	{
+		tcategory = JSONBTYPE_NULL;
+		outfuncoid = InvalidOid;
+	}
+	else
+		jsonb_categorize_type(val_type,
+							  &tcategory, &outfuncoid);
+
+	datum_to_jsonb(val, is_null, result, tcategory, outfuncoid, key_scalar);
+}
+
+/*
+ * SQL function to_jsonb(anyvalue)
+ */
+Datum
+to_jsonb(PG_FUNCTION_ARGS)
+{
+	Datum		val = PG_GETARG_DATUM(0);
+	Oid			val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
+	JsonbInState result;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &result, tcategory, outfuncoid, false);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_object(variadic "any")
+ */
+Datum
+jsonb_build_object(PG_FUNCTION_ARGS)
+{
+	int			nargs = PG_NARGS();
+	int			i;
+	Datum		arg;
+	Oid			val_type;
+	JsonbInState result;
+
+	if (nargs % 2 != 0)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("invalid number or arguments: object must be matched key value pairs")));
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	for (i = 0; i < nargs; i += 2)
+	{
+
+		/* process key */
+
+		if (PG_ARGISNULL(i))
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: key cannot be null", i + 1)));
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+
+		/*
+		 * turn a constant (more or less literal) value that's of unknown type
+		 * into text. Unknowns come in as a cstring pointer.
+		 */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 1)));
+
+		add_jsonb(arg, false, &result, val_type, true);
+
+		/* process value */
+
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i + 1);
+		/* see comments above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i + 1))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i + 1))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i + 1));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i + 1);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 2)));
+		add_jsonb(arg, PG_ARGISNULL(i + 1), &result, val_type, false);
+
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_object where it gets 0 arguments.
+ */
+Datum
+jsonb_build_object_noargs(PG_FUNCTION_ARGS)
+{
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_array(variadic "any")
+ */
+Datum
+jsonb_build_array(PG_FUNCTION_ARGS)
+{
+	int			nargs = PG_NARGS();
+	int			i;
+	Datum		arg;
+	Oid			val_type;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+
+	for (i = 0; i < nargs; i++)
+	{
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+		arg = PG_GETARG_DATUM(i + 1);
+		/* see comments in jsonb_build_object above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 1)));
+		add_jsonb(arg, PG_ARGISNULL(i), &result, val_type, false);
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_array where it gets 0 arguments.
+ */
+Datum
+jsonb_build_array_noargs(PG_FUNCTION_ARGS)
+{
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+	result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * SQL function jsonb_object(text[])
+ *
+ * take a one or two dimensional array of text as name value pairs
+ * for a json object.
+ *
+ */
+Datum
+jsonb_object(PG_FUNCTION_ARGS)
+{
+	ArrayType  *in_array = PG_GETARG_ARRAYTYPE_P(0);
+	int			ndims = ARR_NDIM(in_array);
+	Datum	   *in_datums;
+	bool	   *in_nulls;
+	int			in_count,
+				count,
+				i;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	switch (ndims)
+	{
+		case 0:
+			goto close_object;
+			break;
+
+		case 1:
+			if ((ARR_DIMS(in_array)[0]) % 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have even number of elements")));
+			break;
+
+		case 2:
+			if ((ARR_DIMS(in_array)[1]) != 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have two columns")));
+			break;
+
+		default:
+			ereport(ERROR,
+					(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+					 errmsg("wrong number of array subscripts")));
+	}
+
+	deconstruct_array(in_array,
+					  TEXTOID, -1, false, 'i',
+					  &in_datums, &in_nulls, &in_count);
+
+	count = in_count / 2;
+
+	for (i = 0; i < count; ++i)
+	{
+		JsonbValue	v;
+		char	   *str;
+		int			len;
+
+		if (in_nulls[i * 2])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		str = TextDatumGetCString(in_datums[i * 2]);
+		len = strlen(str);
+
+		v.type = jbvString;
+
+		v.val.string.len = len;
+		v.val.string.val = str;
+
+		result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+		if (in_nulls[i * 2 + 1])
+		{
+			v.type = jbvNull;
+		}
+		else
+		{
+			str = TextDatumGetCString(in_datums[i * 2 + 1]);
+			len = strlen(str);
+
+			v.type = jbvString;
+
+			v.val.string.len = len;
+			v.val.string.val = str;
+		}
+
+		result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+	}
+
+	pfree(in_datums);
+	pfree(in_nulls);
+
+close_object:
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_object(text[], text[])
+ *
+ * take separate name and value arrays of text to construct a json object
+ * pairwise.
+ */
+Datum
+jsonb_object_two_arg(PG_FUNCTION_ARGS)
+{
+	ArrayType  *key_array = PG_GETARG_ARRAYTYPE_P(0);
+	ArrayType  *val_array = PG_GETARG_ARRAYTYPE_P(1);
+	int			nkdims = ARR_NDIM(key_array);
+	int			nvdims = ARR_NDIM(val_array);
+	Datum	   *key_datums,
+			   *val_datums;
+	bool	   *key_nulls,
+			   *val_nulls;
+	int			key_count,
+				val_count,
+				i;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	if (nkdims > 1 || nkdims != nvdims)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("wrong number of array subscripts")));
+
+	if (nkdims == 0)
+		PG_RETURN_DATUM(CStringGetTextDatum("{}"));
+
+	deconstruct_array(key_array,
+					  TEXTOID, -1, false, 'i',
+					  &key_datums, &key_nulls, &key_count);
+
+	deconstruct_array(val_array,
+					  TEXTOID, -1, false, 'i',
+					  &val_datums, &val_nulls, &val_count);
+
+	if (key_count != val_count)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("mismatched array dimensions")));
+
+	for (i = 0; i < key_count; ++i)
+	{
+		JsonbValue	v;
+		char	   *str;
+		int			len;
+
+		if (key_nulls[i])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		str = TextDatumGetCString(key_datums[i]);
+		len = strlen(str);
+
+		v.type = jbvString;
+
+		v.val.string.len = len;
+		v.val.string.val = str;
+
+		result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+		if (val_nulls[i])
+		{
+			v.type = jbvNull;
+		}
+		else
+		{
+			str = TextDatumGetCString(val_datums[i]);
+			len = strlen(str);
+
+			v.type = jbvString;
+
+			v.val.string.len = len;
+			v.val.string.val = str;
+		}
+
+		result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	pfree(key_datums);
+	pfree(key_nulls);
+	pfree(val_datums);
+	pfree(val_nulls);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * jsonb_agg aggregate function
+ */
+Datum
+jsonb_agg_transfn(PG_FUNCTION_ARGS)
+{
+	Oid			val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+	MemoryContext oldcontext,
+				aggcontext;
+	JsonbInState elem;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+	Datum		val;
+	JsonbInState *result;
+	bool		single_scalar = false;
+	JsonbIterator *it;
+	Jsonb	   *jbelem;
+	JsonbValue	v;
+	int			type;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	if (!AggCheckCallContext(fcinfo, &aggcontext))
+	{
+		/* cannot be called directly because of internal-type argument */
+		elog(ERROR, "jsonb_agg_transfn called in non-aggregate context");
+	}
+
+	/* turn the argument into jsonb in the normal function context */
+
+	val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+	jbelem = JsonbValueToJsonb(elem.res);
+
+	/* switch to the aggregate context for accumulation operations */
+
+	oldcontext = MemoryContextSwitchTo(aggcontext);
+
+	/* set up the accumulator on the first go round */
+
+	if (PG_ARGISNULL(0))
+	{
+		result = palloc0(sizeof(JsonbInState));
+		result->res = pushJsonbValue(&result->parseState,
+									 WJB_BEGIN_ARRAY, NULL);
+
+	}
+	else
+	{
+		result = (JsonbInState *) PG_GETARG_POINTER(0);
+	}
+
+	it = JsonbIteratorInit(&jbelem->root);
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (v.val.array.rawScalar)
+					single_scalar = true;
+				else
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_END_ARRAY:
+				if (!single_scalar)
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_BEGIN_OBJECT:
+			case WJB_END_OBJECT:
+				result->res = pushJsonbValue(&result->parseState,
+											 type, NULL);
+				break;
+			case WJB_ELEM:
+			case WJB_KEY:
+			case WJB_VALUE:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else if (v.type == jbvNumeric)
+				{
+					/* same for numeric */
+				  v.val.numeric = 
+					DatumGetNumeric(DirectFunctionCall1(numeric_uplus, 
+														NumericGetDatum(v.val.numeric)));
+
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 type, &v);
+				break;
+		}
+	}
+
+	MemoryContextSwitchTo(oldcontext);
+
+	PG_RETURN_POINTER(result);
+}
+
+Datum
+jsonb_agg_finalfn(PG_FUNCTION_ARGS)
+{
+	JsonbInState *result;
+	Jsonb	   *out;
+
+	/* cannot be called directly because of internal-type argument */
+	Assert(AggCheckCallContext(fcinfo, NULL));
+
+	if (PG_ARGISNULL(0))
+		PG_RETURN_NULL();		/* returns null iff no input values */
+
+	result = (JsonbInState *) PG_GETARG_POINTER(0);
+
+	result->res = pushJsonbValue(&result->parseState,
+								 WJB_END_ARRAY, NULL);
+
+
+	out = JsonbValueToJsonb(result->res);
+
+	PG_RETURN_POINTER(out);
+}
+
+/*
+ * jsonb_object_agg aggregate function
+ */
+Datum
+jsonb_object_agg_transfn(PG_FUNCTION_ARGS)
+{
+	Oid			val_type;
+	MemoryContext oldcontext,
+				aggcontext;
+	JsonbInState elem;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+	Datum		val;
+	JsonbInState *result;
+	bool		single_scalar;
+	JsonbIterator *it;
+	Jsonb	   *jbkey,
+			   *jbval;
+	JsonbValue	v;
+	int			type;
+
+	if (!AggCheckCallContext(fcinfo, &aggcontext))
+	{
+		/* cannot be called directly because of internal-type argument */
+		elog(ERROR, "jsonb_object_agg_transfn called in non-aggregate context");
+	}
+
+	/* turn the argument into jsonb in the normal function context */
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, true);
+
+	jbkey = JsonbValueToJsonb(elem.res);
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	val = PG_ARGISNULL(2) ? (Datum) 0 : PG_GETARG_DATUM(2);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+	jbval = JsonbValueToJsonb(elem.res);
+
+	/* switch to the aggregate context for accumulation operations */
+
+	oldcontext = MemoryContextSwitchTo(aggcontext);
+
+	/* set up the accumulator on the first go round */
+
+	if (PG_ARGISNULL(0))
+	{
+		result = palloc0(sizeof(JsonbInState));
+		result->res = pushJsonbValue(&result->parseState,
+									 WJB_BEGIN_OBJECT, NULL);
+
+	}
+	else
+	{
+		result = (JsonbInState *) PG_GETARG_POINTER(0);
+	}
+
+	it = JsonbIteratorInit(&jbkey->root);
+
+	/*
+	 * keys should be scalar, and we should have already checked for that
+	 * above when calling datum_to_jsonb, so we only need to look for these
+	 * things.
+	 */
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (!v.val.array.rawScalar)
+					elog(ERROR, "unexpected structure for key");
+				break;
+			case WJB_ELEM:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else
+				{
+					ereport(ERROR,
+							(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+							 errmsg("object keys must be strings")));
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 WJB_KEY, &v);
+				break;
+			case WJB_END_ARRAY:
+				break;
+			default:
+				elog(ERROR, "unexpected structure for key");
+				break;
+		}
+	}
+
+	it = JsonbIteratorInit(&jbval->root);
+
+	single_scalar = false;
+
+	/*
+	 * values can be anything, including structured and null, so we treate
+	 * them as in json_agg_transfn, except that single scalars are always
+	 * pushed as WJB_VALUE items.
+	 */
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (v.val.array.rawScalar)
+					single_scalar = true;
+				else
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_END_ARRAY:
+				if (!single_scalar)
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_BEGIN_OBJECT:
+			case WJB_END_OBJECT:
+				result->res = pushJsonbValue(&result->parseState,
+											 type, NULL);
+				break;
+			case WJB_ELEM:
+			case WJB_KEY:
+			case WJB_VALUE:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else if (v.type == jbvNumeric)
+				{
+					/* same for numeric */
+					v.val.numeric = 
+					  DatumGetNumeric(DirectFunctionCall1(numeric_uplus, 
+														  NumericGetDatum(v.val.numeric)));
+
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 single_scalar ? WJB_VALUE : type,
+											 &v);
+				break;
+		}
+	}
+
+	MemoryContextSwitchTo(oldcontext);
+
+	PG_RETURN_POINTER(result);
+}
+
+Datum
+jsonb_object_agg_finalfn(PG_FUNCTION_ARGS)
+{
+	JsonbInState *result;
+	Jsonb	   *out;
+
+	/* cannot be called directly because of internal-type argument */
+	Assert(AggCheckCallContext(fcinfo, NULL));
+
+	if (PG_ARGISNULL(0))
+		PG_RETURN_NULL();		/* returns null iff no input values */
+
+	result = (JsonbInState *) PG_GETARG_POINTER(0);
+
+	result->res = pushJsonbValue(&result->parseState,
+								 WJB_END_OBJECT, NULL);
+
+
+	out = JsonbValueToJsonb(result->res);
+
+	PG_RETURN_POINTER(out);
+}
diff --git a/src/backend/utils/adt/jsonb_util.c b/src/backend/utils/adt/jsonb_util.c
index 04f35bf..d546fd1 100644
--- a/src/backend/utils/adt/jsonb_util.c
+++ b/src/backend/utils/adt/jsonb_util.c
@@ -1328,7 +1328,7 @@ convertJsonbValue(StringInfo buffer, JEntry *header, JsonbValue *val, int level)
 	else if (val->type == jbvObject)
 		convertJsonbObject(buffer, header, val, level);
 	else
-		elog(ERROR, "unknown type of jsonb container");
+		elog(ERROR, "unknown type of jsonb container to convert");
 }
 
 static void
diff --git a/src/include/catalog/pg_aggregate.h b/src/include/catalog/pg_aggregate.h
index 3ba9e5e..8e0735b 100644
--- a/src/include/catalog/pg_aggregate.h
+++ b/src/include/catalog/pg_aggregate.h
@@ -286,6 +286,10 @@ DATA(insert ( 3545	n 0 bytea_string_agg_transfn	bytea_string_agg_finalfn	-				-
 DATA(insert ( 3175	n 0 json_agg_transfn	json_agg_finalfn			-				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
 DATA(insert ( 3197	n 0 json_object_agg_transfn json_object_agg_finalfn -				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
 
+/* jsonb */
+DATA(insert ( 3267	n 0 jsonb_agg_transfn	jsonb_agg_finalfn			-				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
+DATA(insert ( 3270	n 0 jsonb_object_agg_transfn jsonb_object_agg_finalfn -				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
+
 /* ordered-set and hypothetical-set aggregates */
 DATA(insert ( 3972	o 1 ordered_set_transition			percentile_disc_final					-		-		-		t f 0	2281	0	0		0	_null_ _null_ ));
 DATA(insert ( 3974	o 1 ordered_set_transition			percentile_cont_float8_final			-		-		-		f f 0	2281	0	0		0	_null_ _null_ ));
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index 3ce9849..d2c9032 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4595,6 +4595,33 @@ DESCR("I/O");
 DATA(insert OID =  3803 (  jsonb_send		PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 17 "3802" _null_ _null_ _null_ _null_	jsonb_send _null_ _null_ _null_ ));
 DESCR("I/O");
 
+DATA(insert OID = 3263 (  jsonb_object	 PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 3802 "1009" _null_ _null_ _null_ _null_ jsonb_object _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3264 (  jsonb_object	 PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 3802 "1009 1009" _null_ _null_ _null_ _null_ jsonb_object_two_arg _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3787 (  to_jsonb	   PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 3802 "2283" _null_ _null_ _null_ _null_ to_jsonb _null_ _null_ _null_ ));
+DESCR("map input to jsonb");
+DATA(insert OID = 3265 (  jsonb_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ jsonb_agg_transfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate transition function");
+DATA(insert OID = 3266 (  jsonb_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_agg_finalfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate final function");
+DATA(insert OID = 3267 (  jsonb_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f i 1 0 3802 "2283" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate input into jsonb");
+DATA(insert OID = 3268 (  jsonb_object_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 3 0 2281 "2281 2276 2276" _null_ _null_ _null_ _null_ jsonb_object_agg_transfn _null_ _null_ _null_ ));
+DESCR("jsonb object aggregate transition function");
+DATA(insert OID = 3269 (  jsonb_object_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_object_agg_finalfn _null_ _null_ _null_ ));
+DESCR("jsonb object aggregate final function");
+DATA(insert OID = 3270 (  jsonb_object_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f i 2 0 3802 "2276 2276" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate inputs into jsonb object");
+DATA(insert OID = 3259 (  jsonb_build_array	   PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_array _null_ _null_ _null_ ));
+DESCR("build a jsonb array from any inputs");
+DATA(insert OID = 3260 (  jsonb_build_array	   PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802  "" _null_ _null_ _null_ _null_ jsonb_build_array_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb array");
+DATA(insert OID = 3261 (  jsonb_build_object    PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_object _null_ _null_ _null_ ));
+DESCR("build a jsonb object from pairwise key/value inputs");
+DATA(insert OID = 3262 (  jsonb_build_object    PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802  "" _null_ _null_ _null_ _null_ jsonb_build_object_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb object");
+
 DATA(insert OID = 3478 (  jsonb_object_field			PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field _null_ _null_ _null_ ));
 DATA(insert OID = 3214 (  jsonb_object_field_text	PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 25  "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field_text _null_ _null_ _null_ ));
 DATA(insert OID = 3215 (  jsonb_array_element		PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 23" _null_ _null_ "{from_json, element_index}" _null_ jsonb_array_element _null_ _null_ _null_ ));
diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h
index 91e3e14..7fe12b8 100644
--- a/src/include/utils/jsonb.h
+++ b/src/include/utils/jsonb.h
@@ -315,6 +315,22 @@ extern Datum jsonb_recv(PG_FUNCTION_ARGS);
 extern Datum jsonb_send(PG_FUNCTION_ARGS);
 extern Datum jsonb_typeof(PG_FUNCTION_ARGS);
 
+/* generator routines */
+extern Datum to_jsonb(PG_FUNCTION_ARGS);
+
+extern Datum jsonb_build_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_object_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_two_arg(PG_FUNCTION_ARGS);
+
+/* jsonb_agg, json_object_agg functions */
+extern Datum jsonb_agg_transfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_agg_finalfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_agg_transfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_agg_finalfn(PG_FUNCTION_ARGS);
+
 /* Indexing-related ops */
 extern Datum jsonb_exists(PG_FUNCTION_ARGS);
 extern Datum jsonb_exists_any(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
index eb37da7..d449757 100644
--- a/src/test/regress/expected/jsonb.out
+++ b/src/test/regress/expected/jsonb.out
@@ -301,6 +301,28 @@ SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
  [{"a": 1},{"b": [2, 3]}]
 (1 row)
 
+--jsonb_agg
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+                                                                                                                                                                    jsonb_agg                                                                                                                                                                     
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+(1 row)
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+                               jsonb_agg                               
+-----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+(1 row)
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -1220,6 +1242,120 @@ SELECT jsonb_typeof('"1.0"') AS string;
  string
 (1 row)
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                            jsonb_build_array                            
+-------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+(1 row)
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                           jsonb_build_object                            
+-------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+(1 row)
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+                                       jsonb_build_object                                       
+------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+(1 row)
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+ jsonb_build_array 
+-------------------
+ []
+(1 row)
+
+SELECT jsonb_build_object();
+ jsonb_build_object 
+--------------------
+ {}
+(1 row)
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+ jsonb_build_object 
+--------------------
+ {"1": 2}
+(1 row)
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+ERROR:  arg 1: key cannot be null
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ERROR:  key value must be scalar, not array, composite or json
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+                                                                     jsonb_build_object                                                                      
+-------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+(1 row)
+
+-- jsonb_object
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+ERROR:  array must have even number of elements
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+ERROR:  array must have two columns
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ERROR:  array must have two columns
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ERROR:  wrong number of array subscripts
+--two argument form of jsonb_object
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+                   jsonb_object                   
+--------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+(1 row)
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ERROR:  wrong number of array subscripts
+-- mismatched dimensions
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ERROR:  mismatched array dimensions
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ERROR:  mismatched array dimensions
+-- null key error
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ERROR:  null value not allowed for object key
+-- empty key is allowed
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+                  jsonb_object                   
+-------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
  jsonb_extract_path 
diff --git a/src/test/regress/expected/jsonb_1.out b/src/test/regress/expected/jsonb_1.out
index f3bfc7b..a64158c 100644
--- a/src/test/regress/expected/jsonb_1.out
+++ b/src/test/regress/expected/jsonb_1.out
@@ -301,6 +301,28 @@ SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
  [{"a": 1},{"b": [2, 3]}]
 (1 row)
 
+--jsonb_agg
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+                                                                                                                                                                    jsonb_agg                                                                                                                                                                     
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+(1 row)
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+                               jsonb_agg                               
+-----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+(1 row)
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -1220,6 +1242,120 @@ SELECT jsonb_typeof('"1.0"') AS string;
  string
 (1 row)
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                            jsonb_build_array                            
+-------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+(1 row)
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                           jsonb_build_object                            
+-------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+(1 row)
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+                                       jsonb_build_object                                       
+------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+(1 row)
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+ jsonb_build_array 
+-------------------
+ []
+(1 row)
+
+SELECT jsonb_build_object();
+ jsonb_build_object 
+--------------------
+ {}
+(1 row)
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+ jsonb_build_object 
+--------------------
+ {"1": 2}
+(1 row)
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+ERROR:  arg 1: key cannot be null
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ERROR:  key value must be scalar, not array, composite or json
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+                                                                     jsonb_build_object                                                                      
+-------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+(1 row)
+
+-- jsonb_object
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+ERROR:  array must have even number of elements
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+ERROR:  array must have two columns
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ERROR:  array must have two columns
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ERROR:  wrong number of array subscripts
+--two argument form of jsonb_object
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+                   jsonb_object                   
+--------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+(1 row)
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ERROR:  wrong number of array subscripts
+-- mismatched dimensions
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ERROR:  mismatched array dimensions
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ERROR:  mismatched array dimensions
+-- null key error
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ERROR:  null value not allowed for object key
+-- empty key is allowed
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+                  jsonb_object                   
+-------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
  jsonb_extract_path 
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
index ed266d5..cf79cbf 100644
--- a/src/test/regress/sql/jsonb.sql
+++ b/src/test/regress/sql/jsonb.sql
@@ -62,6 +62,22 @@ SELECT '    '::jsonb;			-- ERROR, no value
 -- make sure jsonb is passed through json generators without being escaped
 SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
 
+--jsonb_agg
+
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -256,6 +272,86 @@ SELECT jsonb_typeof('"hello"') AS string;
 SELECT jsonb_typeof('"true"') AS string;
 SELECT jsonb_typeof('"1.0"') AS string;
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+
+SELECT jsonb_build_object();
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+
+-- jsonb_object
+
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+
+--two argument form of jsonb_object
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- mismatched dimensions
+
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+
+-- null key error
+
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+
+-- empty key is allowed
+
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+
+
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
#5Andrew Dunstan
andrew@dunslane.net
In reply to: Andrew Dunstan (#4)
1 attachment(s)
Re: jsonb generator functions

On 10/13/2014 09:37 AM, Andrew Dunstan wrote:

On 09/26/2014 04:54 PM, Andrew Dunstan wrote:

Here is a patch for the generator and aggregate functions for jsonb
that we didn't manage to get done in time for 9.4. They are all
equivalents of the similarly names json functions. Included are

to_jsonb
jsonb_build_object
jsonb_build_array
jsonb_object
jsonb_agg
jsonb_object_agg

Still to come: documentation.

Adding to the next commitfest.

Revised patch to fix compiler warnings.

And again, initializing an incompletely initialized variable, as found
by Pavel Stehule.

cheers

andrew

Attachments:

jsonbmissingfuncs4.patchtext/x-patch; name=jsonbmissingfuncs4.patchDownload
diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c
index 2fd87fc..33a19be 100644
--- a/src/backend/utils/adt/jsonb.c
+++ b/src/backend/utils/adt/jsonb.c
@@ -12,11 +12,20 @@
  */
 #include "postgres.h"
 
+#include "miscadmin.h"
+#include "access/htup_details.h"
+#include "access/transam.h"
+#include "catalog/pg_cast.h"
+#include "catalog/pg_type.h"
 #include "libpq/pqformat.h"
 #include "utils/builtins.h"
+#include "utils/datetime.h"
+#include "utils/lsyscache.h"
 #include "utils/json.h"
 #include "utils/jsonapi.h"
 #include "utils/jsonb.h"
+#include "utils/syscache.h"
+#include "utils/typcache.h"
 
 typedef struct JsonbInState
 {
@@ -24,6 +33,23 @@ typedef struct JsonbInState
 	JsonbValue *res;
 } JsonbInState;
 
+/* unlike with json categories, we need to treat json and jsonb differently */
+typedef enum					/* type categories for datum_to_jsonb */
+{
+	JSONBTYPE_NULL,				/* null, so we didn't bother to identify */
+	JSONBTYPE_BOOL,				/* boolean (built-in types only) */
+	JSONBTYPE_NUMERIC,			/* numeric (ditto) */
+	JSONBTYPE_TIMESTAMP,		/* we use special formatting for timestamp */
+	JSONBTYPE_TIMESTAMPTZ,		/* ... and timestamptz */
+	JSONBTYPE_JSON,				/* JSON */
+	JSONBTYPE_JSONB,			/* JSONB */
+	JSONBTYPE_ARRAY,			/* array */
+	JSONBTYPE_COMPOSITE,		/* composite */
+	JSONBTYPE_JSONCAST,			/* something with an explicit cast to JSON */
+	JSONBTYPE_JSONBCAST,		/* something with an explicit cast to JSONB */
+	JSONBTYPE_OTHER				/* all else */
+}	JsonbTypeCategory;
+
 static inline Datum jsonb_from_cstring(char *json, int len);
 static size_t checkStringLen(size_t len);
 static void jsonb_in_object_start(void *pstate);
@@ -33,6 +59,22 @@ static void jsonb_in_array_end(void *pstate);
 static void jsonb_in_object_field_start(void *pstate, char *fname, bool isnull);
 static void jsonb_put_escaped_value(StringInfo out, JsonbValue *scalarVal);
 static void jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype);
+static void jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid);
+static void composite_to_jsonb(Datum composite, JsonbInState *result);
+static void array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims,
+				   Datum *vals, bool *nulls, int *valcount,
+				   JsonbTypeCategory tcategory, Oid outfuncoid);
+static void array_to_jsonb_internal(Datum array, JsonbInState *result);
+static void jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid);
+static void datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+			   JsonbTypeCategory tcategory, Oid outfuncoid,
+			   bool key_scalar);
+static void add_jsonb(Datum val, bool is_null, JsonbInState *result,
+		  Oid val_type, bool key_scalar);
 
 /*
  * jsonb type input function
@@ -462,3 +504,1284 @@ JsonbToCString(StringInfo out, JsonbContainer *in, int estimated_len)
 
 	return out->data;
 }
+
+
+/*
+ * Determine how we want to render values of a given type in datum_to_jsonb.
+ *
+ * Given the datatype OID, return its JsonbTypeCategory, as well as the type's
+ * output function OID.  If the returned category is JSONBTYPE_CAST, we
+ * return the OID of the type->JSON cast function instead.
+ */
+static void
+jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid)
+{
+	bool		typisvarlena;
+
+	/* Look through any domain */
+	typoid = getBaseType(typoid);
+
+	/* We'll usually need to return the type output function */
+	getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+
+	/* Check for known types */
+	switch (typoid)
+	{
+		case BOOLOID:
+			*tcategory = JSONBTYPE_BOOL;
+			break;
+
+		case INT2OID:
+		case INT4OID:
+		case INT8OID:
+		case FLOAT4OID:
+		case FLOAT8OID:
+		case NUMERICOID:
+			*tcategory = JSONBTYPE_NUMERIC;
+			break;
+
+		case TIMESTAMPOID:
+			*tcategory = JSONBTYPE_TIMESTAMP;
+			break;
+
+		case TIMESTAMPTZOID:
+			*tcategory = JSONBTYPE_TIMESTAMPTZ;
+			break;
+
+		case JSONBOID:
+			*tcategory = JSONBTYPE_JSONB;
+			break;
+
+		case JSONOID:
+			*tcategory = JSONBTYPE_JSON;
+			break;
+
+		default:
+			/* Check for arrays and composites */
+			if (OidIsValid(get_element_type(typoid)))
+				*tcategory = JSONBTYPE_ARRAY;
+			else if (type_is_rowtype(typoid))
+				*tcategory = JSONBTYPE_COMPOSITE;
+			else
+			{
+				/* It's probably the general case ... */
+				*tcategory = JSONBTYPE_OTHER;
+
+				/*
+				 * but let's look for a cast to json or jsonb, if it's not
+				 * built-in
+				 */
+				if (typoid >= FirstNormalObjectId)
+				{
+					HeapTuple	tuple;
+
+					tuple = SearchSysCache2(CASTSOURCETARGET,
+											ObjectIdGetDatum(typoid),
+											ObjectIdGetDatum(JSONBOID));
+					if (HeapTupleIsValid(tuple))
+					{
+						Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
+
+						if (castForm->castmethod == COERCION_METHOD_FUNCTION)
+						{
+							*tcategory = JSONBTYPE_JSONBCAST;
+							*outfuncoid = castForm->castfunc;
+						}
+
+						ReleaseSysCache(tuple);
+					}
+					else
+					{
+						tuple = SearchSysCache2(CASTSOURCETARGET,
+												ObjectIdGetDatum(typoid),
+												ObjectIdGetDatum(JSONOID));
+						if (HeapTupleIsValid(tuple))
+						{
+							Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
+
+							if (castForm->castmethod == COERCION_METHOD_FUNCTION)
+							{
+								*tcategory = JSONBTYPE_JSONCAST;
+								*outfuncoid = castForm->castfunc;
+							}
+
+							ReleaseSysCache(tuple);
+						}
+					}
+				}
+				break;
+			}
+	}
+}
+
+/*
+ * Turn a Datum into jsonb, adding it to the result JsonbInState.
+ *
+ * tcategory and outfuncoid are from a previous call to json_categorize_type,
+ * except that if is_null is true then they can be invalid.
+ *
+ * If key_scalar is true, the value is stores as a key, so insist
+ * it's of an acceptable type, and force it to be a jbvString.
+ */
+static void
+datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+			   JsonbTypeCategory tcategory, Oid outfuncoid,
+			   bool key_scalar)
+{
+	char	   *outputstr;
+	bool		numeric_error;
+	JsonbValue	jb;
+	bool		scalar_jsonb = false;
+
+	if (is_null)
+	{
+		jb.type = jbvNull;
+	}
+	else if (key_scalar &&
+			 (tcategory == JSONBTYPE_ARRAY ||
+			  tcategory == JSONBTYPE_COMPOSITE ||
+			  tcategory == JSONBTYPE_JSON ||
+			  tcategory == JSONBTYPE_JSONB ||
+			  tcategory == JSONBTYPE_JSONCAST ||
+			  tcategory == JSONBTYPE_JSONBCAST))
+	{
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+		  errmsg("key value must be scalar, not array, composite or json")));
+	}
+	else
+	{
+		if (tcategory == JSONBTYPE_JSONCAST || tcategory == JSONBTYPE_JSONBCAST)
+			val = OidFunctionCall1(outfuncoid, val);
+
+		switch (tcategory)
+		{
+			case JSONBTYPE_ARRAY:
+				array_to_jsonb_internal(val, result);
+				break;
+			case JSONBTYPE_COMPOSITE:
+				composite_to_jsonb(val, result);
+				break;
+			case JSONBTYPE_BOOL:
+				if (key_scalar)
+				{
+					outputstr = DatumGetBool(val) ? "true" : "false";
+					jb.type = jbvString;
+					jb.val.string.len = strlen(outputstr);
+					jb.val.string.val = outputstr;
+				}
+				else
+				{
+					jb.type = jbvBool;
+					jb.val.boolean = DatumGetBool(val);
+				}
+				break;
+			case JSONBTYPE_NUMERIC:
+				outputstr = OidOutputFunctionCall(outfuncoid, val);
+				if (key_scalar)
+				{
+					/* always quote keys */
+					jb.type = jbvString;
+					jb.val.string.len = strlen(outputstr);
+					jb.val.string.val = outputstr;
+				}
+				else
+				{
+					/*
+					 * Make it numeric if it's a valid JSON number, otherwise
+					 * a string. Invalid numeric output will always have an
+					 * 'N' or 'n' in it (I think).
+					 */
+					numeric_error = (strchr(outputstr, 'N') != NULL ||
+									 strchr(outputstr, 'n') != NULL);
+					if (!numeric_error)
+					{
+						jb.type = jbvNumeric;
+						jb.val.numeric = DatumGetNumeric(DirectFunctionCall3(numeric_in, CStringGetDatum(outputstr), 0, -1));
+
+						pfree(outputstr);
+					}
+					else
+					{
+						jb.type = jbvString;
+						jb.val.string.len = strlen(outputstr);
+						jb.val.string.val = outputstr;
+					}
+				}
+				break;
+			case JSONBTYPE_TIMESTAMP:
+				{
+					Timestamp	timestamp;
+					struct pg_tm tm;
+					fsec_t		fsec;
+					char		buf[MAXDATELEN + 1];
+
+					timestamp = DatumGetTimestamp(val);
+
+					/* XSD doesn't support infinite values */
+					if (TIMESTAMP_NOT_FINITE(timestamp))
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range"),
+								 errdetail("JSON does not support infinite timestamp values.")));
+					else if (timestamp2tm(timestamp, NULL, &tm, &fsec, NULL, NULL) == 0)
+						EncodeDateTime(&tm, fsec, false, 0, NULL, USE_XSD_DATES, buf);
+					else
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range")));
+
+					jb.type = jbvString;
+					jb.val.string.len = strlen(buf);
+					jb.val.string.val = pstrdup(buf);
+				}
+				break;
+			case JSONBTYPE_TIMESTAMPTZ:
+				{
+					TimestampTz timestamp;
+					struct pg_tm tm;
+					int			tz;
+					fsec_t		fsec;
+					const char *tzn = NULL;
+					char		buf[MAXDATELEN + 1];
+
+					timestamp = DatumGetTimestamp(val);
+
+					/* XSD doesn't support infinite values */
+					if (TIMESTAMP_NOT_FINITE(timestamp))
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range"),
+								 errdetail("JSON does not support infinite timestamp values.")));
+					else if (timestamp2tm(timestamp, &tz, &tm, &fsec, &tzn, NULL) == 0)
+						EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf);
+					else
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range")));
+
+					jb.type = jbvString;
+					jb.val.string.len = strlen(buf);
+					jb.val.string.val = pstrdup(buf);
+				}
+				break;
+			case JSONBTYPE_JSONCAST:
+			case JSONBTYPE_JSON:
+				{
+					/* parse the json right into the existing result object */
+					JsonLexContext *lex;
+					JsonSemAction sem;
+					text	   *json = DatumGetTextP(val);
+
+					lex = makeJsonLexContext(json, true);
+
+					memset(&sem, 0, sizeof(sem));
+
+					sem.semstate = (void *) result;
+
+					sem.object_start = jsonb_in_object_start;
+					sem.array_start = jsonb_in_array_start;
+					sem.object_end = jsonb_in_object_end;
+					sem.array_end = jsonb_in_array_end;
+					sem.scalar = jsonb_in_scalar;
+					sem.object_field_start = jsonb_in_object_field_start;
+
+					pg_parse_json(lex, &sem);
+
+				}
+				break;
+			case JSONBTYPE_JSONBCAST:
+			case JSONBTYPE_JSONB:
+				{
+					Jsonb	   *jsonb = DatumGetJsonb(val);
+					int			type;
+					JsonbIterator *it;
+
+					it = JsonbIteratorInit(&jsonb->root);
+
+					if (JB_ROOT_IS_SCALAR(jsonb))
+					{
+						(void) JsonbIteratorNext(&it, &jb, true);
+						Assert(jb.type == jbvArray);
+						(void) JsonbIteratorNext(&it, &jb, true);
+						scalar_jsonb = true;
+					}
+					else
+					{
+						while ((type = JsonbIteratorNext(&it, &jb, false))
+							   != WJB_DONE)
+						{
+							if (type == WJB_END_ARRAY || type == WJB_END_OBJECT ||
+								type == WJB_BEGIN_ARRAY || type == WJB_BEGIN_OBJECT)
+								result->res = pushJsonbValue(&result->parseState,
+															 type, NULL);
+							else
+								result->res = pushJsonbValue(&result->parseState,
+															 type, &jb);
+						}
+					}
+				}
+				break;
+			default:
+				outputstr = OidOutputFunctionCall(outfuncoid, val);
+				jb.type = jbvString;
+				jb.val.string.len = checkStringLen(strlen(outputstr));
+				jb.val.string.val = outputstr;
+				break;
+		}
+	}
+	if (tcategory >= JSONBTYPE_JSON && tcategory <= JSONBTYPE_JSONBCAST &&
+		!scalar_jsonb)
+	{
+		/* work has been done recursively */
+		return;
+	}
+	else if (result->parseState == NULL)
+	{
+		/* single root scalar */
+		JsonbValue	va;
+
+		va.type = jbvArray;
+		va.val.array.rawScalar = true;
+		va.val.array.nElems = 1;
+
+		result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, &va);
+		result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+		result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+	}
+	else
+	{
+		JsonbValue *o = &result->parseState->contVal;
+
+		switch (o->type)
+		{
+			case jbvArray:
+				result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+				break;
+			case jbvObject:
+				result->res = pushJsonbValue(&result->parseState,
+											 key_scalar ? WJB_KEY : WJB_VALUE,
+											 &jb);
+				break;
+			default:
+				elog(ERROR, "unexpected parent of nested structure");
+		}
+	}
+}
+
+/*
+ * Process a single dimension of an array.
+ * If it's the innermost dimension, output the values, otherwise call
+ * ourselves recursively to process the next dimension.
+ */
+static void
+array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims, Datum *vals,
+				   bool *nulls, int *valcount, JsonbTypeCategory tcategory,
+				   Oid outfuncoid)
+{
+	int			i;
+
+	Assert(dim < ndims);
+
+	result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+
+	for (i = 1; i <= dims[dim]; i++)
+	{
+		if (dim + 1 == ndims)
+		{
+			datum_to_jsonb(vals[*valcount], nulls[*valcount], result, tcategory,
+						   outfuncoid, false);
+			(*valcount)++;
+		}
+		else
+		{
+			array_dim_to_jsonb(result, dim + 1, ndims, dims, vals, nulls,
+							   valcount, tcategory, outfuncoid);
+		}
+	}
+
+	result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+}
+
+/*
+ * Turn an array into JSON.
+ */
+static void
+array_to_jsonb_internal(Datum array, JsonbInState *result)
+{
+	ArrayType  *v = DatumGetArrayTypeP(array);
+	Oid			element_type = ARR_ELEMTYPE(v);
+	int		   *dim;
+	int			ndim;
+	int			nitems;
+	int			count = 0;
+	Datum	   *elements;
+	bool	   *nulls;
+	int16		typlen;
+	bool		typbyval;
+	char		typalign;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	ndim = ARR_NDIM(v);
+	dim = ARR_DIMS(v);
+	nitems = ArrayGetNItems(ndim, dim);
+
+	if (nitems <= 0)
+	{
+		result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+		result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+		return;
+	}
+
+	get_typlenbyvalalign(element_type,
+						 &typlen, &typbyval, &typalign);
+
+	jsonb_categorize_type(element_type,
+						  &tcategory, &outfuncoid);
+
+	deconstruct_array(v, element_type, typlen, typbyval,
+					  typalign, &elements, &nulls,
+					  &nitems);
+
+	array_dim_to_jsonb(result, 0, ndim, dim, elements, nulls, &count, tcategory,
+					   outfuncoid);
+
+	pfree(elements);
+	pfree(nulls);
+}
+
+/*
+ * Turn a composite / record into JSON.
+ */
+static void
+composite_to_jsonb(Datum composite, JsonbInState *result)
+{
+	HeapTupleHeader td;
+	Oid			tupType;
+	int32		tupTypmod;
+	TupleDesc	tupdesc;
+	HeapTupleData tmptup,
+			   *tuple;
+	int			i;
+
+	td = DatumGetHeapTupleHeader(composite);
+
+	/* Extract rowtype info and find a tupdesc */
+	tupType = HeapTupleHeaderGetTypeId(td);
+	tupTypmod = HeapTupleHeaderGetTypMod(td);
+	tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+
+	/* Build a temporary HeapTuple control structure */
+	tmptup.t_len = HeapTupleHeaderGetDatumLength(td);
+	tmptup.t_data = td;
+	tuple = &tmptup;
+
+	result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_OBJECT, NULL);
+
+	for (i = 0; i < tupdesc->natts; i++)
+	{
+		Datum		val;
+		bool		isnull;
+		char	   *attname;
+		JsonbTypeCategory tcategory;
+		Oid			outfuncoid;
+		JsonbValue	v;
+
+		if (tupdesc->attrs[i]->attisdropped)
+			continue;
+
+		attname = NameStr(tupdesc->attrs[i]->attname);
+
+		v.type = jbvString;
+		/* don't need checkStringLen here - can't exceed maximum name length */
+		v.val.string.len = strlen(attname);
+		v.val.string.val = attname;
+
+		result->res = pushJsonbValue(&result->parseState, WJB_KEY, &v);
+
+		val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+		if (isnull)
+		{
+			tcategory = JSONBTYPE_NULL;
+			outfuncoid = InvalidOid;
+		}
+		else
+			jsonb_categorize_type(tupdesc->attrs[i]->atttypid,
+								  &tcategory, &outfuncoid);
+
+		datum_to_jsonb(val, isnull, result, tcategory, outfuncoid, false);
+	}
+
+	result->res = pushJsonbValue(&result->parseState, WJB_END_OBJECT, NULL);
+	ReleaseTupleDesc(tupdesc);
+}
+
+/*
+ * Append JSON text for "val" to "result".
+ *
+ * This is just a thin wrapper around datum_to_json.  If the same type will be
+ * printed many times, avoid using this; better to do the json_categorize_type
+ * lookups only once.
+ */
+
+static void
+add_jsonb(Datum val, bool is_null, JsonbInState *result,
+		  Oid val_type, bool key_scalar)
+{
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	if (is_null)
+	{
+		tcategory = JSONBTYPE_NULL;
+		outfuncoid = InvalidOid;
+	}
+	else
+		jsonb_categorize_type(val_type,
+							  &tcategory, &outfuncoid);
+
+	datum_to_jsonb(val, is_null, result, tcategory, outfuncoid, key_scalar);
+}
+
+/*
+ * SQL function to_jsonb(anyvalue)
+ */
+Datum
+to_jsonb(PG_FUNCTION_ARGS)
+{
+	Datum		val = PG_GETARG_DATUM(0);
+	Oid			val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
+	JsonbInState result;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &result, tcategory, outfuncoid, false);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_object(variadic "any")
+ */
+Datum
+jsonb_build_object(PG_FUNCTION_ARGS)
+{
+	int			nargs = PG_NARGS();
+	int			i;
+	Datum		arg;
+	Oid			val_type;
+	JsonbInState result;
+
+	if (nargs % 2 != 0)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("invalid number or arguments: object must be matched key value pairs")));
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	for (i = 0; i < nargs; i += 2)
+	{
+
+		/* process key */
+
+		if (PG_ARGISNULL(i))
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: key cannot be null", i + 1)));
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+
+		/*
+		 * turn a constant (more or less literal) value that's of unknown type
+		 * into text. Unknowns come in as a cstring pointer.
+		 */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 1)));
+
+		add_jsonb(arg, false, &result, val_type, true);
+
+		/* process value */
+
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i + 1);
+		/* see comments above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i + 1))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i + 1))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i + 1));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i + 1);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 2)));
+		add_jsonb(arg, PG_ARGISNULL(i + 1), &result, val_type, false);
+
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_object where it gets 0 arguments.
+ */
+Datum
+jsonb_build_object_noargs(PG_FUNCTION_ARGS)
+{
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_array(variadic "any")
+ */
+Datum
+jsonb_build_array(PG_FUNCTION_ARGS)
+{
+	int			nargs = PG_NARGS();
+	int			i;
+	Datum		arg;
+	Oid			val_type;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+
+	for (i = 0; i < nargs; i++)
+	{
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+		arg = PG_GETARG_DATUM(i + 1);
+		/* see comments in jsonb_build_object above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 1)));
+		add_jsonb(arg, PG_ARGISNULL(i), &result, val_type, false);
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_array where it gets 0 arguments.
+ */
+Datum
+jsonb_build_array_noargs(PG_FUNCTION_ARGS)
+{
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+	result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * SQL function jsonb_object(text[])
+ *
+ * take a one or two dimensional array of text as name value pairs
+ * for a json object.
+ *
+ */
+Datum
+jsonb_object(PG_FUNCTION_ARGS)
+{
+	ArrayType  *in_array = PG_GETARG_ARRAYTYPE_P(0);
+	int			ndims = ARR_NDIM(in_array);
+	Datum	   *in_datums;
+	bool	   *in_nulls;
+	int			in_count,
+				count,
+				i;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	switch (ndims)
+	{
+		case 0:
+			goto close_object;
+			break;
+
+		case 1:
+			if ((ARR_DIMS(in_array)[0]) % 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have even number of elements")));
+			break;
+
+		case 2:
+			if ((ARR_DIMS(in_array)[1]) != 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have two columns")));
+			break;
+
+		default:
+			ereport(ERROR,
+					(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+					 errmsg("wrong number of array subscripts")));
+	}
+
+	deconstruct_array(in_array,
+					  TEXTOID, -1, false, 'i',
+					  &in_datums, &in_nulls, &in_count);
+
+	count = in_count / 2;
+
+	for (i = 0; i < count; ++i)
+	{
+		JsonbValue	v;
+		char	   *str;
+		int			len;
+
+		if (in_nulls[i * 2])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		str = TextDatumGetCString(in_datums[i * 2]);
+		len = strlen(str);
+
+		v.type = jbvString;
+
+		v.val.string.len = len;
+		v.val.string.val = str;
+
+		result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+		if (in_nulls[i * 2 + 1])
+		{
+			v.type = jbvNull;
+		}
+		else
+		{
+			str = TextDatumGetCString(in_datums[i * 2 + 1]);
+			len = strlen(str);
+
+			v.type = jbvString;
+
+			v.val.string.len = len;
+			v.val.string.val = str;
+		}
+
+		result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+	}
+
+	pfree(in_datums);
+	pfree(in_nulls);
+
+close_object:
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_object(text[], text[])
+ *
+ * take separate name and value arrays of text to construct a json object
+ * pairwise.
+ */
+Datum
+jsonb_object_two_arg(PG_FUNCTION_ARGS)
+{
+	ArrayType  *key_array = PG_GETARG_ARRAYTYPE_P(0);
+	ArrayType  *val_array = PG_GETARG_ARRAYTYPE_P(1);
+	int			nkdims = ARR_NDIM(key_array);
+	int			nvdims = ARR_NDIM(val_array);
+	Datum	   *key_datums,
+			   *val_datums;
+	bool	   *key_nulls,
+			   *val_nulls;
+	int			key_count,
+				val_count,
+				i;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	if (nkdims > 1 || nkdims != nvdims)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("wrong number of array subscripts")));
+
+	if (nkdims == 0)
+		PG_RETURN_DATUM(CStringGetTextDatum("{}"));
+
+	deconstruct_array(key_array,
+					  TEXTOID, -1, false, 'i',
+					  &key_datums, &key_nulls, &key_count);
+
+	deconstruct_array(val_array,
+					  TEXTOID, -1, false, 'i',
+					  &val_datums, &val_nulls, &val_count);
+
+	if (key_count != val_count)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("mismatched array dimensions")));
+
+	for (i = 0; i < key_count; ++i)
+	{
+		JsonbValue	v;
+		char	   *str;
+		int			len;
+
+		if (key_nulls[i])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		str = TextDatumGetCString(key_datums[i]);
+		len = strlen(str);
+
+		v.type = jbvString;
+
+		v.val.string.len = len;
+		v.val.string.val = str;
+
+		result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+		if (val_nulls[i])
+		{
+			v.type = jbvNull;
+		}
+		else
+		{
+			str = TextDatumGetCString(val_datums[i]);
+			len = strlen(str);
+
+			v.type = jbvString;
+
+			v.val.string.len = len;
+			v.val.string.val = str;
+		}
+
+		result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	pfree(key_datums);
+	pfree(key_nulls);
+	pfree(val_datums);
+	pfree(val_nulls);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * jsonb_agg aggregate function
+ */
+Datum
+jsonb_agg_transfn(PG_FUNCTION_ARGS)
+{
+	Oid			val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+	MemoryContext oldcontext,
+				aggcontext;
+	JsonbInState elem;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+	Datum		val;
+	JsonbInState *result;
+	bool		single_scalar = false;
+	JsonbIterator *it;
+	Jsonb	   *jbelem;
+	JsonbValue	v;
+	int			type;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	if (!AggCheckCallContext(fcinfo, &aggcontext))
+	{
+		/* cannot be called directly because of internal-type argument */
+		elog(ERROR, "jsonb_agg_transfn called in non-aggregate context");
+	}
+
+	/* turn the argument into jsonb in the normal function context */
+
+	val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+	jbelem = JsonbValueToJsonb(elem.res);
+
+	/* switch to the aggregate context for accumulation operations */
+
+	oldcontext = MemoryContextSwitchTo(aggcontext);
+
+	/* set up the accumulator on the first go round */
+
+	if (PG_ARGISNULL(0))
+	{
+		result = palloc0(sizeof(JsonbInState));
+		result->res = pushJsonbValue(&result->parseState,
+									 WJB_BEGIN_ARRAY, NULL);
+
+	}
+	else
+	{
+		result = (JsonbInState *) PG_GETARG_POINTER(0);
+	}
+
+	it = JsonbIteratorInit(&jbelem->root);
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (v.val.array.rawScalar)
+					single_scalar = true;
+				else
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_END_ARRAY:
+				if (!single_scalar)
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_BEGIN_OBJECT:
+			case WJB_END_OBJECT:
+				result->res = pushJsonbValue(&result->parseState,
+											 type, NULL);
+				break;
+			case WJB_ELEM:
+			case WJB_KEY:
+			case WJB_VALUE:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else if (v.type == jbvNumeric)
+				{
+					/* same for numeric */
+				  v.val.numeric = 
+					DatumGetNumeric(DirectFunctionCall1(numeric_uplus, 
+														NumericGetDatum(v.val.numeric)));
+
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 type, &v);
+				break;
+		}
+	}
+
+	MemoryContextSwitchTo(oldcontext);
+
+	PG_RETURN_POINTER(result);
+}
+
+Datum
+jsonb_agg_finalfn(PG_FUNCTION_ARGS)
+{
+	JsonbInState *result;
+	Jsonb	   *out;
+
+	/* cannot be called directly because of internal-type argument */
+	Assert(AggCheckCallContext(fcinfo, NULL));
+
+	if (PG_ARGISNULL(0))
+		PG_RETURN_NULL();		/* returns null iff no input values */
+
+	result = (JsonbInState *) PG_GETARG_POINTER(0);
+
+	result->res = pushJsonbValue(&result->parseState,
+								 WJB_END_ARRAY, NULL);
+
+
+	out = JsonbValueToJsonb(result->res);
+
+	PG_RETURN_POINTER(out);
+}
+
+/*
+ * jsonb_object_agg aggregate function
+ */
+Datum
+jsonb_object_agg_transfn(PG_FUNCTION_ARGS)
+{
+	Oid			val_type;
+	MemoryContext oldcontext,
+				aggcontext;
+	JsonbInState elem;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+	Datum		val;
+	JsonbInState *result;
+	bool		single_scalar;
+	JsonbIterator *it;
+	Jsonb	   *jbkey,
+			   *jbval;
+	JsonbValue	v;
+	int			type;
+
+	if (!AggCheckCallContext(fcinfo, &aggcontext))
+	{
+		/* cannot be called directly because of internal-type argument */
+		elog(ERROR, "jsonb_object_agg_transfn called in non-aggregate context");
+	}
+
+	/* turn the argument into jsonb in the normal function context */
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, true);
+
+	jbkey = JsonbValueToJsonb(elem.res);
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	val = PG_ARGISNULL(2) ? (Datum) 0 : PG_GETARG_DATUM(2);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+	jbval = JsonbValueToJsonb(elem.res);
+
+	/* switch to the aggregate context for accumulation operations */
+
+	oldcontext = MemoryContextSwitchTo(aggcontext);
+
+	/* set up the accumulator on the first go round */
+
+	if (PG_ARGISNULL(0))
+	{
+		result = palloc0(sizeof(JsonbInState));
+		result->res = pushJsonbValue(&result->parseState,
+									 WJB_BEGIN_OBJECT, NULL);
+
+	}
+	else
+	{
+		result = (JsonbInState *) PG_GETARG_POINTER(0);
+	}
+
+	it = JsonbIteratorInit(&jbkey->root);
+
+	/*
+	 * keys should be scalar, and we should have already checked for that
+	 * above when calling datum_to_jsonb, so we only need to look for these
+	 * things.
+	 */
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (!v.val.array.rawScalar)
+					elog(ERROR, "unexpected structure for key");
+				break;
+			case WJB_ELEM:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else
+				{
+					ereport(ERROR,
+							(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+							 errmsg("object keys must be strings")));
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 WJB_KEY, &v);
+				break;
+			case WJB_END_ARRAY:
+				break;
+			default:
+				elog(ERROR, "unexpected structure for key");
+				break;
+		}
+	}
+
+	it = JsonbIteratorInit(&jbval->root);
+
+	single_scalar = false;
+
+	/*
+	 * values can be anything, including structured and null, so we treate
+	 * them as in json_agg_transfn, except that single scalars are always
+	 * pushed as WJB_VALUE items.
+	 */
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (v.val.array.rawScalar)
+					single_scalar = true;
+				else
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_END_ARRAY:
+				if (!single_scalar)
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_BEGIN_OBJECT:
+			case WJB_END_OBJECT:
+				result->res = pushJsonbValue(&result->parseState,
+											 type, NULL);
+				break;
+			case WJB_ELEM:
+			case WJB_KEY:
+			case WJB_VALUE:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else if (v.type == jbvNumeric)
+				{
+					/* same for numeric */
+					v.val.numeric = 
+					  DatumGetNumeric(DirectFunctionCall1(numeric_uplus, 
+														  NumericGetDatum(v.val.numeric)));
+
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 single_scalar ? WJB_VALUE : type,
+											 &v);
+				break;
+		}
+	}
+
+	MemoryContextSwitchTo(oldcontext);
+
+	PG_RETURN_POINTER(result);
+}
+
+Datum
+jsonb_object_agg_finalfn(PG_FUNCTION_ARGS)
+{
+	JsonbInState *result;
+	Jsonb	   *out;
+
+	/* cannot be called directly because of internal-type argument */
+	Assert(AggCheckCallContext(fcinfo, NULL));
+
+	if (PG_ARGISNULL(0))
+		PG_RETURN_NULL();		/* returns null iff no input values */
+
+	result = (JsonbInState *) PG_GETARG_POINTER(0);
+
+	result->res = pushJsonbValue(&result->parseState,
+								 WJB_END_OBJECT, NULL);
+
+
+	out = JsonbValueToJsonb(result->res);
+
+	PG_RETURN_POINTER(out);
+}
diff --git a/src/backend/utils/adt/jsonb_util.c b/src/backend/utils/adt/jsonb_util.c
index 04f35bf..d546fd1 100644
--- a/src/backend/utils/adt/jsonb_util.c
+++ b/src/backend/utils/adt/jsonb_util.c
@@ -1328,7 +1328,7 @@ convertJsonbValue(StringInfo buffer, JEntry *header, JsonbValue *val, int level)
 	else if (val->type == jbvObject)
 		convertJsonbObject(buffer, header, val, level);
 	else
-		elog(ERROR, "unknown type of jsonb container");
+		elog(ERROR, "unknown type of jsonb container to convert");
 }
 
 static void
diff --git a/src/include/catalog/pg_aggregate.h b/src/include/catalog/pg_aggregate.h
index 3ba9e5e..8e0735b 100644
--- a/src/include/catalog/pg_aggregate.h
+++ b/src/include/catalog/pg_aggregate.h
@@ -286,6 +286,10 @@ DATA(insert ( 3545	n 0 bytea_string_agg_transfn	bytea_string_agg_finalfn	-				-
 DATA(insert ( 3175	n 0 json_agg_transfn	json_agg_finalfn			-				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
 DATA(insert ( 3197	n 0 json_object_agg_transfn json_object_agg_finalfn -				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
 
+/* jsonb */
+DATA(insert ( 3267	n 0 jsonb_agg_transfn	jsonb_agg_finalfn			-				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
+DATA(insert ( 3270	n 0 jsonb_object_agg_transfn jsonb_object_agg_finalfn -				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
+
 /* ordered-set and hypothetical-set aggregates */
 DATA(insert ( 3972	o 1 ordered_set_transition			percentile_disc_final					-		-		-		t f 0	2281	0	0		0	_null_ _null_ ));
 DATA(insert ( 3974	o 1 ordered_set_transition			percentile_cont_float8_final			-		-		-		f f 0	2281	0	0		0	_null_ _null_ ));
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index 3ce9849..d2c9032 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4595,6 +4595,33 @@ DESCR("I/O");
 DATA(insert OID =  3803 (  jsonb_send		PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 17 "3802" _null_ _null_ _null_ _null_	jsonb_send _null_ _null_ _null_ ));
 DESCR("I/O");
 
+DATA(insert OID = 3263 (  jsonb_object	 PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 3802 "1009" _null_ _null_ _null_ _null_ jsonb_object _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3264 (  jsonb_object	 PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 3802 "1009 1009" _null_ _null_ _null_ _null_ jsonb_object_two_arg _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3787 (  to_jsonb	   PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 3802 "2283" _null_ _null_ _null_ _null_ to_jsonb _null_ _null_ _null_ ));
+DESCR("map input to jsonb");
+DATA(insert OID = 3265 (  jsonb_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ jsonb_agg_transfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate transition function");
+DATA(insert OID = 3266 (  jsonb_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_agg_finalfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate final function");
+DATA(insert OID = 3267 (  jsonb_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f i 1 0 3802 "2283" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate input into jsonb");
+DATA(insert OID = 3268 (  jsonb_object_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 3 0 2281 "2281 2276 2276" _null_ _null_ _null_ _null_ jsonb_object_agg_transfn _null_ _null_ _null_ ));
+DESCR("jsonb object aggregate transition function");
+DATA(insert OID = 3269 (  jsonb_object_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_object_agg_finalfn _null_ _null_ _null_ ));
+DESCR("jsonb object aggregate final function");
+DATA(insert OID = 3270 (  jsonb_object_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f i 2 0 3802 "2276 2276" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate inputs into jsonb object");
+DATA(insert OID = 3259 (  jsonb_build_array	   PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_array _null_ _null_ _null_ ));
+DESCR("build a jsonb array from any inputs");
+DATA(insert OID = 3260 (  jsonb_build_array	   PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802  "" _null_ _null_ _null_ _null_ jsonb_build_array_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb array");
+DATA(insert OID = 3261 (  jsonb_build_object    PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_object _null_ _null_ _null_ ));
+DESCR("build a jsonb object from pairwise key/value inputs");
+DATA(insert OID = 3262 (  jsonb_build_object    PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802  "" _null_ _null_ _null_ _null_ jsonb_build_object_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb object");
+
 DATA(insert OID = 3478 (  jsonb_object_field			PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field _null_ _null_ _null_ ));
 DATA(insert OID = 3214 (  jsonb_object_field_text	PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 25  "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field_text _null_ _null_ _null_ ));
 DATA(insert OID = 3215 (  jsonb_array_element		PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 23" _null_ _null_ "{from_json, element_index}" _null_ jsonb_array_element _null_ _null_ _null_ ));
diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h
index 91e3e14..7fe12b8 100644
--- a/src/include/utils/jsonb.h
+++ b/src/include/utils/jsonb.h
@@ -315,6 +315,22 @@ extern Datum jsonb_recv(PG_FUNCTION_ARGS);
 extern Datum jsonb_send(PG_FUNCTION_ARGS);
 extern Datum jsonb_typeof(PG_FUNCTION_ARGS);
 
+/* generator routines */
+extern Datum to_jsonb(PG_FUNCTION_ARGS);
+
+extern Datum jsonb_build_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_object_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_two_arg(PG_FUNCTION_ARGS);
+
+/* jsonb_agg, json_object_agg functions */
+extern Datum jsonb_agg_transfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_agg_finalfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_agg_transfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_agg_finalfn(PG_FUNCTION_ARGS);
+
 /* Indexing-related ops */
 extern Datum jsonb_exists(PG_FUNCTION_ARGS);
 extern Datum jsonb_exists_any(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
index eb37da7..d449757 100644
--- a/src/test/regress/expected/jsonb.out
+++ b/src/test/regress/expected/jsonb.out
@@ -301,6 +301,28 @@ SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
  [{"a": 1},{"b": [2, 3]}]
 (1 row)
 
+--jsonb_agg
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+                                                                                                                                                                    jsonb_agg                                                                                                                                                                     
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+(1 row)
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+                               jsonb_agg                               
+-----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+(1 row)
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -1220,6 +1242,120 @@ SELECT jsonb_typeof('"1.0"') AS string;
  string
 (1 row)
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                            jsonb_build_array                            
+-------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+(1 row)
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                           jsonb_build_object                            
+-------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+(1 row)
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+                                       jsonb_build_object                                       
+------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+(1 row)
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+ jsonb_build_array 
+-------------------
+ []
+(1 row)
+
+SELECT jsonb_build_object();
+ jsonb_build_object 
+--------------------
+ {}
+(1 row)
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+ jsonb_build_object 
+--------------------
+ {"1": 2}
+(1 row)
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+ERROR:  arg 1: key cannot be null
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ERROR:  key value must be scalar, not array, composite or json
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+                                                                     jsonb_build_object                                                                      
+-------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+(1 row)
+
+-- jsonb_object
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+ERROR:  array must have even number of elements
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+ERROR:  array must have two columns
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ERROR:  array must have two columns
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ERROR:  wrong number of array subscripts
+--two argument form of jsonb_object
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+                   jsonb_object                   
+--------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+(1 row)
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ERROR:  wrong number of array subscripts
+-- mismatched dimensions
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ERROR:  mismatched array dimensions
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ERROR:  mismatched array dimensions
+-- null key error
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ERROR:  null value not allowed for object key
+-- empty key is allowed
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+                  jsonb_object                   
+-------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
  jsonb_extract_path 
diff --git a/src/test/regress/expected/jsonb_1.out b/src/test/regress/expected/jsonb_1.out
index f3bfc7b..a64158c 100644
--- a/src/test/regress/expected/jsonb_1.out
+++ b/src/test/regress/expected/jsonb_1.out
@@ -301,6 +301,28 @@ SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
  [{"a": 1},{"b": [2, 3]}]
 (1 row)
 
+--jsonb_agg
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+                                                                                                                                                                    jsonb_agg                                                                                                                                                                     
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+(1 row)
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+                               jsonb_agg                               
+-----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+(1 row)
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -1220,6 +1242,120 @@ SELECT jsonb_typeof('"1.0"') AS string;
  string
 (1 row)
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                            jsonb_build_array                            
+-------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+(1 row)
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                           jsonb_build_object                            
+-------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+(1 row)
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+                                       jsonb_build_object                                       
+------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+(1 row)
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+ jsonb_build_array 
+-------------------
+ []
+(1 row)
+
+SELECT jsonb_build_object();
+ jsonb_build_object 
+--------------------
+ {}
+(1 row)
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+ jsonb_build_object 
+--------------------
+ {"1": 2}
+(1 row)
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+ERROR:  arg 1: key cannot be null
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ERROR:  key value must be scalar, not array, composite or json
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+                                                                     jsonb_build_object                                                                      
+-------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+(1 row)
+
+-- jsonb_object
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+ERROR:  array must have even number of elements
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+ERROR:  array must have two columns
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ERROR:  array must have two columns
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ERROR:  wrong number of array subscripts
+--two argument form of jsonb_object
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+                   jsonb_object                   
+--------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+(1 row)
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ERROR:  wrong number of array subscripts
+-- mismatched dimensions
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ERROR:  mismatched array dimensions
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ERROR:  mismatched array dimensions
+-- null key error
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ERROR:  null value not allowed for object key
+-- empty key is allowed
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+                  jsonb_object                   
+-------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
  jsonb_extract_path 
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
index ed266d5..cf79cbf 100644
--- a/src/test/regress/sql/jsonb.sql
+++ b/src/test/regress/sql/jsonb.sql
@@ -62,6 +62,22 @@ SELECT '    '::jsonb;			-- ERROR, no value
 -- make sure jsonb is passed through json generators without being escaped
 SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
 
+--jsonb_agg
+
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -256,6 +272,86 @@ SELECT jsonb_typeof('"hello"') AS string;
 SELECT jsonb_typeof('"true"') AS string;
 SELECT jsonb_typeof('"1.0"') AS string;
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+
+SELECT jsonb_build_object();
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+
+-- jsonb_object
+
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+
+--two argument form of jsonb_object
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- mismatched dimensions
+
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+
+-- null key error
+
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+
+-- empty key is allowed
+
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+
+
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
#6Pavel Stehule
pavel.stehule@gmail.com
In reply to: Andrew Dunstan (#5)
Re: jsonb generator functions

2014-10-13 17:22 GMT+02:00 Andrew Dunstan <andrew@dunslane.net>:

On 10/13/2014 09:37 AM, Andrew Dunstan wrote:

On 09/26/2014 04:54 PM, Andrew Dunstan wrote:

Here is a patch for the generator and aggregate functions for jsonb that
we didn't manage to get done in time for 9.4. They are all equivalents of
the similarly names json functions. Included are

to_jsonb
jsonb_build_object
jsonb_build_array
jsonb_object
jsonb_agg
jsonb_object_agg

Still to come: documentation.

Adding to the next commitfest.

Revised patch to fix compiler warnings.

And again, initializing an incompletely initialized variable, as found by
Pavel Stehule.

I checked a code, and I have only two small objection - a name
"jsonb_object_two_arg" is not good - maybe "json_object_keys_values" ?

Next: there are no tests for to_jsonb function.

Regards

Pavel

Show quoted text

cheers

andrew

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

#7Andrew Dunstan
andrew@dunslane.net
In reply to: Pavel Stehule (#6)
Re: jsonb generator functions

On 10/15/2014 07:38 AM, Pavel Stehule wrote:

2014-10-13 17:22 GMT+02:00 Andrew Dunstan <andrew@dunslane.net
<mailto:andrew@dunslane.net>>:

On 10/13/2014 09:37 AM, Andrew Dunstan wrote:

On 09/26/2014 04:54 PM, Andrew Dunstan wrote:

Here is a patch for the generator and aggregate functions
for jsonb that we didn't manage to get done in time for
9.4. They are all equivalents of the similarly names json
functions. Included are

to_jsonb
jsonb_build_object
jsonb_build_array
jsonb_object
jsonb_agg
jsonb_object_agg

Still to come: documentation.

Adding to the next commitfest.

Revised patch to fix compiler warnings.

And again, initializing an incompletely initialized variable, as
found by Pavel Stehule.

I checked a code, and I have only two small objection - a name
"jsonb_object_two_arg" is not good - maybe "json_object_keys_values" ?

It's consistent with the existing json_object_two_arg. In all cases I
think I kept the names the same except for changing "json" to "jsonb".
Note that these _two_arg functions are not visible at the SQL level -
they are only visible in the C code.

I'm happy to be guided by others in changing or keeping these names.

Next: there are no tests for to_jsonb function.

Oh, my bad. I'll add some.

Thank for the review.

cheers

andrew

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

#8Andrew Dunstan
andrew@dunslane.net
In reply to: Andrew Dunstan (#7)
Re: jsonb generator functions

On 10/15/2014 03:54 PM, I wrote:

On 10/15/2014 07:38 AM, Pavel Stehule wrote:

I checked a code, and I have only two small objection - a name
"jsonb_object_two_arg" is not good - maybe "json_object_keys_values" ?

It's consistent with the existing json_object_two_arg. In all cases I
think I kept the names the same except for changing "json" to "jsonb".
Note that these _two_arg functions are not visible at the SQL level -
they are only visible in the C code.

I'm happy to be guided by others in changing or keeping these names.

If we really want to change the name of json_object_two_arg, it would
probably be best to change it NOW in 9.4 before it gets out into a
production release at all.

Thoughts?

cheers

andrew

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

#9Alvaro Herrera
alvherre@2ndquadrant.com
In reply to: Andrew Dunstan (#8)
Re: jsonb generator functions

Andrew Dunstan wrote:

If we really want to change the name of json_object_two_arg, it
would probably be best to change it NOW in 9.4 before it gets out
into a production release at all.

Doesn't it require initdb? If so, I think it's too late now.

--
�lvaro Herrera http://www.2ndQuadrant.com/
PostgreSQL Development, 24x7 Support, Training & Services

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

#10Andrew Dunstan
andrew@dunslane.net
In reply to: Alvaro Herrera (#9)
Re: jsonb generator functions

On 10/15/2014 05:47 PM, Alvaro Herrera wrote:

Andrew Dunstan wrote:

If we really want to change the name of json_object_two_arg, it
would probably be best to change it NOW in 9.4 before it gets out
into a production release at all.

Doesn't it require initdb? If so, I think it's too late now.

Yeah, you're right, it would.

OK, forget that.

cheers

andrew

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

#11Pavel Stehule
pavel.stehule@gmail.com
In reply to: Andrew Dunstan (#10)
Re: jsonb generator functions

2014-10-15 23:49 GMT+02:00 Andrew Dunstan <andrew@dunslane.net>:

On 10/15/2014 05:47 PM, Alvaro Herrera wrote:

Andrew Dunstan wrote:

If we really want to change the name of json_object_two_arg, it

would probably be best to change it NOW in 9.4 before it gets out
into a production release at all.

Doesn't it require initdb? If so, I think it's too late now.

yes, it is too heavy argument.

ok

Pavel

Show quoted text

Yeah, you're right, it would.

OK, forget that.

cheers

andrew

#12Andrew Dunstan
andrew@dunslane.net
In reply to: Andrew Dunstan (#7)
1 attachment(s)
Re: jsonb generator functions

On 10/15/2014 03:54 PM, Andrew Dunstan wrote:

I checked a code, and I have only two small objection - a name
"jsonb_object_two_arg" is not good - maybe "json_object_keys_values" ?

It's consistent with the existing json_object_two_arg. In all cases I
think I kept the names the same except for changing "json" to "jsonb".
Note that these _two_arg functions are not visible at the SQL level -
they are only visible in the C code.

I'm happy to be guided by others in changing or keeping these names.

Next: there are no tests for to_jsonb function.

Oh, my bad. I'll add some.

Thank for the review.

Here is a new patch that includes documentation and addresses all these
issues, except that I didn't change the name of jsonb_object_two_arg to
keep it consistent with the name of json_object_two_arg. I'm happy to
change both if people feel it matters.

cheers

andrew

Attachments:

jsonbmissingfuncs5.patchtext/x-diff; name=jsonbmissingfuncs5.patchDownload
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index 7e5bcd9..21ce293 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -10245,9 +10245,10 @@ table2-mapping
 
   <para>
    <xref linkend="functions-json-creation-table"> shows the functions that are
-   available for creating <type>json</type> values.
-   (Currently, there are no equivalent functions for <type>jsonb</>, but you
-   can cast the result of one of these functions to <type>jsonb</>.)
+   available for creating <type>json</type> and <type>jsonb</type> values.
+   (There are no equivalent functions for <type>jsonb</>, of the <literal>row_to_json</>
+   and <literal>array_to_json</> functions. However, the <literal>to_jsonb</>
+   function supplies much the same functionality as these functions would.)
   </para>
 
   <indexterm>
@@ -10268,6 +10269,18 @@ table2-mapping
   <indexterm>
    <primary>json_object</primary>
   </indexterm>
+  <indexterm>
+   <primary>to_jsonb</primary>
+  </indexterm>
+  <indexterm>
+   <primary>jsonb_build_array</primary>
+  </indexterm>
+  <indexterm>
+   <primary>jsonb_build_object</primary>
+  </indexterm>
+  <indexterm>
+   <primary>jsonb_object</primary>
+  </indexterm>
 
   <table id="functions-json-creation-table">
     <title>JSON Creation Functions</title>
@@ -10282,17 +10295,19 @@ table2-mapping
      </thead>
      <tbody>
       <row>
+       <entry><para><literal>to_json(anyelement)</literal>
+          </para><para><literal>to_jsonb(anyelement)</literal>
+       </para></entry>
        <entry>
-         <literal>to_json(anyelement)</literal>
-       </entry>
-       <entry>
-         Returns the value as JSON.  Arrays and composites are converted
+         Returns the value as <type>json</> or <type>jsonb</>.
+         Arrays and composites are converted
          (recursively) to arrays and objects; otherwise, if there is a cast
-         from the type to <type>json</type>, the cast function will be used to
-         perform the conversion; otherwise, a JSON scalar value is produced.
+         from the type to <type>json</type> or <type>jsonb</> in the case of 
+         <literal>to_jsonb</>, the cast function will be used to
+         perform the conversion; otherwise, a scalar value is produced.
          For any scalar type other than a number, a Boolean, or a null value,
-         the text representation will be used, properly quoted and escaped
-         so that it is a valid JSON string.
+         the text representation will be used, in such a fashion that it is a 
+         valid <type>json</> or <type>jsonb</> value.
        </entry>
        <entry><literal>to_json('Fred said "Hi."'::text)</literal></entry>
        <entry><literal>"Fred said \"Hi.\""</literal></entry>
@@ -10321,9 +10336,9 @@ table2-mapping
        <entry><literal>{"f1":1,"f2":"foo"}</literal></entry>
       </row>
       <row>
-       <entry>
-         <literal>json_build_array(VARIADIC "any")</literal>
-       </entry>
+       <entry><para><literal>json_build_array(VARIADIC "any")</literal>
+          </para><para><literal>jsonb_build_array(VARIADIC "any")</literal>
+       </para></entry>
        <entry>
          Builds a possibly-heterogeneously-typed JSON array out of a variadic
          argument list.
@@ -10332,9 +10347,9 @@ table2-mapping
        <entry><literal>[1, 2, "3", 4, 5]</literal></entry>
       </row>
       <row>
-       <entry>
-         <literal>json_build_object(VARIADIC "any")</literal>
-       </entry>
+       <entry><para><literal>json_build_object(VARIADIC "any")</literal>
+          </para><para><literal>jsonb_build_object(VARIADIC "any")</literal>
+       </para></entry>
        <entry>
          Builds a JSON object out of a variadic argument list.  By
          convention, the argument list consists of alternating
@@ -10344,9 +10359,9 @@ table2-mapping
        <entry><literal>{"foo": 1, "bar": 2}</literal></entry>
       </row>
       <row>
-       <entry>
-         <literal>json_object(text[])</literal>
-       </entry>
+       <entry><para><literal>json_object(text[])</literal>
+          </para><para><literal>jsonb_object(text[])</literal>
+       </para></entry>
        <entry>
          Builds a JSON object out of a text array.  The array must have either
          exactly one dimension with an even number of members, in which case
@@ -10359,9 +10374,9 @@ table2-mapping
        <entry><literal>{"a": "1", "b": "def", "c": "3.5"}</literal></entry>
       </row>
       <row>
-       <entry>
-         <literal>json_object(keys text[], values text[])</literal>
-       </entry>
+       <entry><para><literal>json_object(keys text[], values text[])</literal>
+          </para><para><literal>json_object(keys text[], values text[])</literal>
+       </para></entry>
        <entry>
          This form of <function>json_object</> takes keys and values pairwise from two separate
          arrays. In all other respects it is identical to the one-argument form.
@@ -10757,7 +10772,8 @@ table2-mapping
     function <function>json_agg</function> which aggregates record
     values as JSON, and the aggregate function
     <function>json_object_agg</function> which aggregates pairs of values
-    into a JSON object.
+    into a JSON object, and their <type>jsonb</type> equivalents,
+    <function>jsonb_agg</> and <function>jsonb_object_agg</>.
   </para>
 
  </sect1>
@@ -12189,6 +12205,22 @@ NULL baz</literallayout>(3 rows)</entry>
      <row>
       <entry>
        <indexterm>
+        <primary>jsonb_agg</primary>
+       </indexterm>
+       <function>jsonb_agg(<replaceable class="parameter">record</replaceable>)</function>
+      </entry>
+      <entry>
+       <type>record</type>
+      </entry>
+      <entry>
+       <type>jsonb</type>
+      </entry>
+      <entry>aggregates records as a JSON array of objects</entry>
+     </row>
+
+     <row>
+      <entry>
+       <indexterm>
         <primary>json_object_agg</primary>
        </indexterm>
        <function>json_object_agg(<replaceable class="parameter">name</replaceable>, <replaceable class="parameter">value</replaceable>)</function>
@@ -12205,6 +12237,22 @@ NULL baz</literallayout>(3 rows)</entry>
      <row>
       <entry>
        <indexterm>
+        <primary>jsonb_object_agg</primary>
+       </indexterm>
+       <function>jsonb_object_agg(<replaceable class="parameter">name</replaceable>, <replaceable class="parameter">value</replaceable>)</function>
+      </entry>
+      <entry>
+       <type>("any", "any")</type>
+      </entry>
+      <entry>
+       <type>jsonb</type>
+      </entry>
+      <entry>aggregates name/value pairs as a JSON object</entry>
+     </row>
+
+     <row>
+      <entry>
+       <indexterm>
         <primary>max</primary>
        </indexterm>
        <function>max(<replaceable class="parameter">expression</replaceable>)</function>
@@ -12348,8 +12396,8 @@ SELECT count(*) FROM sometable;
 
   <para>
    The aggregate functions <function>array_agg</function>,
-   <function>json_agg</function>,
-   <function>json_object_agg</function>,
+   <function>json_agg</function>, <function>jsonb_agg</function>,
+   <function>json_object_agg</function>, <function>jsonb_object_agg</function>,
    <function>string_agg</function>,
    and <function>xmlagg</function>, as well as similar user-defined
    aggregate functions, produce meaningfully different result values
diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c
index 9beebb3..89890ea 100644
--- a/src/backend/utils/adt/jsonb.c
+++ b/src/backend/utils/adt/jsonb.c
@@ -12,11 +12,20 @@
  */
 #include "postgres.h"
 
+#include "miscadmin.h"
+#include "access/htup_details.h"
+#include "access/transam.h"
+#include "catalog/pg_cast.h"
+#include "catalog/pg_type.h"
 #include "libpq/pqformat.h"
 #include "utils/builtins.h"
+#include "utils/datetime.h"
+#include "utils/lsyscache.h"
 #include "utils/json.h"
 #include "utils/jsonapi.h"
 #include "utils/jsonb.h"
+#include "utils/syscache.h"
+#include "utils/typcache.h"
 
 typedef struct JsonbInState
 {
@@ -24,6 +33,23 @@ typedef struct JsonbInState
 	JsonbValue *res;
 } JsonbInState;
 
+/* unlike with json categories, we need to treat json and jsonb differently */
+typedef enum					/* type categories for datum_to_jsonb */
+{
+	JSONBTYPE_NULL,				/* null, so we didn't bother to identify */
+	JSONBTYPE_BOOL,				/* boolean (built-in types only) */
+	JSONBTYPE_NUMERIC,			/* numeric (ditto) */
+	JSONBTYPE_TIMESTAMP,		/* we use special formatting for timestamp */
+	JSONBTYPE_TIMESTAMPTZ,		/* ... and timestamptz */
+	JSONBTYPE_JSON,				/* JSON */
+	JSONBTYPE_JSONB,			/* JSONB */
+	JSONBTYPE_ARRAY,			/* array */
+	JSONBTYPE_COMPOSITE,		/* composite */
+	JSONBTYPE_JSONCAST,			/* something with an explicit cast to JSON */
+	JSONBTYPE_JSONBCAST,		/* something with an explicit cast to JSONB */
+	JSONBTYPE_OTHER				/* all else */
+}	JsonbTypeCategory;
+
 static inline Datum jsonb_from_cstring(char *json, int len);
 static size_t checkStringLen(size_t len);
 static void jsonb_in_object_start(void *pstate);
@@ -33,6 +59,22 @@ static void jsonb_in_array_end(void *pstate);
 static void jsonb_in_object_field_start(void *pstate, char *fname, bool isnull);
 static void jsonb_put_escaped_value(StringInfo out, JsonbValue *scalarVal);
 static void jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype);
+static void jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid);
+static void composite_to_jsonb(Datum composite, JsonbInState *result);
+static void array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims,
+				   Datum *vals, bool *nulls, int *valcount,
+				   JsonbTypeCategory tcategory, Oid outfuncoid);
+static void array_to_jsonb_internal(Datum array, JsonbInState *result);
+static void jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid);
+static void datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+			   JsonbTypeCategory tcategory, Oid outfuncoid,
+			   bool key_scalar);
+static void add_jsonb(Datum val, bool is_null, JsonbInState *result,
+		  Oid val_type, bool key_scalar);
 
 /*
  * jsonb type input function
@@ -462,3 +504,1284 @@ JsonbToCString(StringInfo out, JsonbContainer *in, int estimated_len)
 
 	return out->data;
 }
+
+
+/*
+ * Determine how we want to render values of a given type in datum_to_jsonb.
+ *
+ * Given the datatype OID, return its JsonbTypeCategory, as well as the type's
+ * output function OID.  If the returned category is JSONBTYPE_CAST, we
+ * return the OID of the type->JSON cast function instead.
+ */
+static void
+jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid)
+{
+	bool		typisvarlena;
+
+	/* Look through any domain */
+	typoid = getBaseType(typoid);
+
+	/* We'll usually need to return the type output function */
+	getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+
+	/* Check for known types */
+	switch (typoid)
+	{
+		case BOOLOID:
+			*tcategory = JSONBTYPE_BOOL;
+			break;
+
+		case INT2OID:
+		case INT4OID:
+		case INT8OID:
+		case FLOAT4OID:
+		case FLOAT8OID:
+		case NUMERICOID:
+			*tcategory = JSONBTYPE_NUMERIC;
+			break;
+
+		case TIMESTAMPOID:
+			*tcategory = JSONBTYPE_TIMESTAMP;
+			break;
+
+		case TIMESTAMPTZOID:
+			*tcategory = JSONBTYPE_TIMESTAMPTZ;
+			break;
+
+		case JSONBOID:
+			*tcategory = JSONBTYPE_JSONB;
+			break;
+
+		case JSONOID:
+			*tcategory = JSONBTYPE_JSON;
+			break;
+
+		default:
+			/* Check for arrays and composites */
+			if (OidIsValid(get_element_type(typoid)))
+				*tcategory = JSONBTYPE_ARRAY;
+			else if (type_is_rowtype(typoid))
+				*tcategory = JSONBTYPE_COMPOSITE;
+			else
+			{
+				/* It's probably the general case ... */
+				*tcategory = JSONBTYPE_OTHER;
+
+				/*
+				 * but let's look for a cast to json or jsonb, if it's not
+				 * built-in
+				 */
+				if (typoid >= FirstNormalObjectId)
+				{
+					HeapTuple	tuple;
+
+					tuple = SearchSysCache2(CASTSOURCETARGET,
+											ObjectIdGetDatum(typoid),
+											ObjectIdGetDatum(JSONBOID));
+					if (HeapTupleIsValid(tuple))
+					{
+						Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
+
+						if (castForm->castmethod == COERCION_METHOD_FUNCTION)
+						{
+							*tcategory = JSONBTYPE_JSONBCAST;
+							*outfuncoid = castForm->castfunc;
+						}
+
+						ReleaseSysCache(tuple);
+					}
+					else
+					{
+						tuple = SearchSysCache2(CASTSOURCETARGET,
+												ObjectIdGetDatum(typoid),
+												ObjectIdGetDatum(JSONOID));
+						if (HeapTupleIsValid(tuple))
+						{
+							Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
+
+							if (castForm->castmethod == COERCION_METHOD_FUNCTION)
+							{
+								*tcategory = JSONBTYPE_JSONCAST;
+								*outfuncoid = castForm->castfunc;
+							}
+
+							ReleaseSysCache(tuple);
+						}
+					}
+				}
+				break;
+			}
+	}
+}
+
+/*
+ * Turn a Datum into jsonb, adding it to the result JsonbInState.
+ *
+ * tcategory and outfuncoid are from a previous call to json_categorize_type,
+ * except that if is_null is true then they can be invalid.
+ *
+ * If key_scalar is true, the value is stores as a key, so insist
+ * it's of an acceptable type, and force it to be a jbvString.
+ */
+static void
+datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+			   JsonbTypeCategory tcategory, Oid outfuncoid,
+			   bool key_scalar)
+{
+	char	   *outputstr;
+	bool		numeric_error;
+	JsonbValue	jb;
+	bool		scalar_jsonb = false;
+
+	if (is_null)
+	{
+		jb.type = jbvNull;
+	}
+	else if (key_scalar &&
+			 (tcategory == JSONBTYPE_ARRAY ||
+			  tcategory == JSONBTYPE_COMPOSITE ||
+			  tcategory == JSONBTYPE_JSON ||
+			  tcategory == JSONBTYPE_JSONB ||
+			  tcategory == JSONBTYPE_JSONCAST ||
+			  tcategory == JSONBTYPE_JSONBCAST))
+	{
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+		  errmsg("key value must be scalar, not array, composite or json")));
+	}
+	else
+	{
+		if (tcategory == JSONBTYPE_JSONCAST || tcategory == JSONBTYPE_JSONBCAST)
+			val = OidFunctionCall1(outfuncoid, val);
+
+		switch (tcategory)
+		{
+			case JSONBTYPE_ARRAY:
+				array_to_jsonb_internal(val, result);
+				break;
+			case JSONBTYPE_COMPOSITE:
+				composite_to_jsonb(val, result);
+				break;
+			case JSONBTYPE_BOOL:
+				if (key_scalar)
+				{
+					outputstr = DatumGetBool(val) ? "true" : "false";
+					jb.type = jbvString;
+					jb.val.string.len = strlen(outputstr);
+					jb.val.string.val = outputstr;
+				}
+				else
+				{
+					jb.type = jbvBool;
+					jb.val.boolean = DatumGetBool(val);
+				}
+				break;
+			case JSONBTYPE_NUMERIC:
+				outputstr = OidOutputFunctionCall(outfuncoid, val);
+				if (key_scalar)
+				{
+					/* always quote keys */
+					jb.type = jbvString;
+					jb.val.string.len = strlen(outputstr);
+					jb.val.string.val = outputstr;
+				}
+				else
+				{
+					/*
+					 * Make it numeric if it's a valid JSON number, otherwise
+					 * a string. Invalid numeric output will always have an
+					 * 'N' or 'n' in it (I think).
+					 */
+					numeric_error = (strchr(outputstr, 'N') != NULL ||
+									 strchr(outputstr, 'n') != NULL);
+					if (!numeric_error)
+					{
+						jb.type = jbvNumeric;
+						jb.val.numeric = DatumGetNumeric(DirectFunctionCall3(numeric_in, CStringGetDatum(outputstr), 0, -1));
+
+						pfree(outputstr);
+					}
+					else
+					{
+						jb.type = jbvString;
+						jb.val.string.len = strlen(outputstr);
+						jb.val.string.val = outputstr;
+					}
+				}
+				break;
+			case JSONBTYPE_TIMESTAMP:
+				{
+					Timestamp	timestamp;
+					struct pg_tm tm;
+					fsec_t		fsec;
+					char		buf[MAXDATELEN + 1];
+
+					timestamp = DatumGetTimestamp(val);
+
+					/* XSD doesn't support infinite values */
+					if (TIMESTAMP_NOT_FINITE(timestamp))
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range"),
+								 errdetail("JSON does not support infinite timestamp values.")));
+					else if (timestamp2tm(timestamp, NULL, &tm, &fsec, NULL, NULL) == 0)
+						EncodeDateTime(&tm, fsec, false, 0, NULL, USE_XSD_DATES, buf);
+					else
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range")));
+
+					jb.type = jbvString;
+					jb.val.string.len = strlen(buf);
+					jb.val.string.val = pstrdup(buf);
+				}
+				break;
+			case JSONBTYPE_TIMESTAMPTZ:
+				{
+					TimestampTz timestamp;
+					struct pg_tm tm;
+					int			tz;
+					fsec_t		fsec;
+					const char *tzn = NULL;
+					char		buf[MAXDATELEN + 1];
+
+					timestamp = DatumGetTimestamp(val);
+
+					/* XSD doesn't support infinite values */
+					if (TIMESTAMP_NOT_FINITE(timestamp))
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range"),
+								 errdetail("JSON does not support infinite timestamp values.")));
+					else if (timestamp2tm(timestamp, &tz, &tm, &fsec, &tzn, NULL) == 0)
+						EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf);
+					else
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range")));
+
+					jb.type = jbvString;
+					jb.val.string.len = strlen(buf);
+					jb.val.string.val = pstrdup(buf);
+				}
+				break;
+			case JSONBTYPE_JSONCAST:
+			case JSONBTYPE_JSON:
+				{
+					/* parse the json right into the existing result object */
+					JsonLexContext *lex;
+					JsonSemAction sem;
+					text	   *json = DatumGetTextP(val);
+
+					lex = makeJsonLexContext(json, true);
+
+					memset(&sem, 0, sizeof(sem));
+
+					sem.semstate = (void *) result;
+
+					sem.object_start = jsonb_in_object_start;
+					sem.array_start = jsonb_in_array_start;
+					sem.object_end = jsonb_in_object_end;
+					sem.array_end = jsonb_in_array_end;
+					sem.scalar = jsonb_in_scalar;
+					sem.object_field_start = jsonb_in_object_field_start;
+
+					pg_parse_json(lex, &sem);
+
+				}
+				break;
+			case JSONBTYPE_JSONBCAST:
+			case JSONBTYPE_JSONB:
+				{
+					Jsonb	   *jsonb = DatumGetJsonb(val);
+					int			type;
+					JsonbIterator *it;
+
+					it = JsonbIteratorInit(&jsonb->root);
+
+					if (JB_ROOT_IS_SCALAR(jsonb))
+					{
+						(void) JsonbIteratorNext(&it, &jb, true);
+						Assert(jb.type == jbvArray);
+						(void) JsonbIteratorNext(&it, &jb, true);
+						scalar_jsonb = true;
+					}
+					else
+					{
+						while ((type = JsonbIteratorNext(&it, &jb, false))
+							   != WJB_DONE)
+						{
+							if (type == WJB_END_ARRAY || type == WJB_END_OBJECT ||
+								type == WJB_BEGIN_ARRAY || type == WJB_BEGIN_OBJECT)
+								result->res = pushJsonbValue(&result->parseState,
+															 type, NULL);
+							else
+								result->res = pushJsonbValue(&result->parseState,
+															 type, &jb);
+						}
+					}
+				}
+				break;
+			default:
+				outputstr = OidOutputFunctionCall(outfuncoid, val);
+				jb.type = jbvString;
+				jb.val.string.len = checkStringLen(strlen(outputstr));
+				jb.val.string.val = outputstr;
+				break;
+		}
+	}
+	if (tcategory >= JSONBTYPE_JSON && tcategory <= JSONBTYPE_JSONBCAST &&
+		!scalar_jsonb)
+	{
+		/* work has been done recursively */
+		return;
+	}
+	else if (result->parseState == NULL)
+	{
+		/* single root scalar */
+		JsonbValue	va;
+
+		va.type = jbvArray;
+		va.val.array.rawScalar = true;
+		va.val.array.nElems = 1;
+
+		result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, &va);
+		result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+		result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+	}
+	else
+	{
+		JsonbValue *o = &result->parseState->contVal;
+
+		switch (o->type)
+		{
+			case jbvArray:
+				result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+				break;
+			case jbvObject:
+				result->res = pushJsonbValue(&result->parseState,
+											 key_scalar ? WJB_KEY : WJB_VALUE,
+											 &jb);
+				break;
+			default:
+				elog(ERROR, "unexpected parent of nested structure");
+		}
+	}
+}
+
+/*
+ * Process a single dimension of an array.
+ * If it's the innermost dimension, output the values, otherwise call
+ * ourselves recursively to process the next dimension.
+ */
+static void
+array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims, Datum *vals,
+				   bool *nulls, int *valcount, JsonbTypeCategory tcategory,
+				   Oid outfuncoid)
+{
+	int			i;
+
+	Assert(dim < ndims);
+
+	result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+
+	for (i = 1; i <= dims[dim]; i++)
+	{
+		if (dim + 1 == ndims)
+		{
+			datum_to_jsonb(vals[*valcount], nulls[*valcount], result, tcategory,
+						   outfuncoid, false);
+			(*valcount)++;
+		}
+		else
+		{
+			array_dim_to_jsonb(result, dim + 1, ndims, dims, vals, nulls,
+							   valcount, tcategory, outfuncoid);
+		}
+	}
+
+	result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+}
+
+/*
+ * Turn an array into JSON.
+ */
+static void
+array_to_jsonb_internal(Datum array, JsonbInState *result)
+{
+	ArrayType  *v = DatumGetArrayTypeP(array);
+	Oid			element_type = ARR_ELEMTYPE(v);
+	int		   *dim;
+	int			ndim;
+	int			nitems;
+	int			count = 0;
+	Datum	   *elements;
+	bool	   *nulls;
+	int16		typlen;
+	bool		typbyval;
+	char		typalign;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	ndim = ARR_NDIM(v);
+	dim = ARR_DIMS(v);
+	nitems = ArrayGetNItems(ndim, dim);
+
+	if (nitems <= 0)
+	{
+		result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+		result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+		return;
+	}
+
+	get_typlenbyvalalign(element_type,
+						 &typlen, &typbyval, &typalign);
+
+	jsonb_categorize_type(element_type,
+						  &tcategory, &outfuncoid);
+
+	deconstruct_array(v, element_type, typlen, typbyval,
+					  typalign, &elements, &nulls,
+					  &nitems);
+
+	array_dim_to_jsonb(result, 0, ndim, dim, elements, nulls, &count, tcategory,
+					   outfuncoid);
+
+	pfree(elements);
+	pfree(nulls);
+}
+
+/*
+ * Turn a composite / record into JSON.
+ */
+static void
+composite_to_jsonb(Datum composite, JsonbInState *result)
+{
+	HeapTupleHeader td;
+	Oid			tupType;
+	int32		tupTypmod;
+	TupleDesc	tupdesc;
+	HeapTupleData tmptup,
+			   *tuple;
+	int			i;
+
+	td = DatumGetHeapTupleHeader(composite);
+
+	/* Extract rowtype info and find a tupdesc */
+	tupType = HeapTupleHeaderGetTypeId(td);
+	tupTypmod = HeapTupleHeaderGetTypMod(td);
+	tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+
+	/* Build a temporary HeapTuple control structure */
+	tmptup.t_len = HeapTupleHeaderGetDatumLength(td);
+	tmptup.t_data = td;
+	tuple = &tmptup;
+
+	result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_OBJECT, NULL);
+
+	for (i = 0; i < tupdesc->natts; i++)
+	{
+		Datum		val;
+		bool		isnull;
+		char	   *attname;
+		JsonbTypeCategory tcategory;
+		Oid			outfuncoid;
+		JsonbValue	v;
+
+		if (tupdesc->attrs[i]->attisdropped)
+			continue;
+
+		attname = NameStr(tupdesc->attrs[i]->attname);
+
+		v.type = jbvString;
+		/* don't need checkStringLen here - can't exceed maximum name length */
+		v.val.string.len = strlen(attname);
+		v.val.string.val = attname;
+
+		result->res = pushJsonbValue(&result->parseState, WJB_KEY, &v);
+
+		val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+		if (isnull)
+		{
+			tcategory = JSONBTYPE_NULL;
+			outfuncoid = InvalidOid;
+		}
+		else
+			jsonb_categorize_type(tupdesc->attrs[i]->atttypid,
+								  &tcategory, &outfuncoid);
+
+		datum_to_jsonb(val, isnull, result, tcategory, outfuncoid, false);
+	}
+
+	result->res = pushJsonbValue(&result->parseState, WJB_END_OBJECT, NULL);
+	ReleaseTupleDesc(tupdesc);
+}
+
+/*
+ * Append JSON text for "val" to "result".
+ *
+ * This is just a thin wrapper around datum_to_jsonb.  If the same type will be
+ * printed many times, avoid using this; better to do the jsonb_categorize_type
+ * lookups only once.
+ */
+
+static void
+add_jsonb(Datum val, bool is_null, JsonbInState *result,
+		  Oid val_type, bool key_scalar)
+{
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	if (is_null)
+	{
+		tcategory = JSONBTYPE_NULL;
+		outfuncoid = InvalidOid;
+	}
+	else
+		jsonb_categorize_type(val_type,
+							  &tcategory, &outfuncoid);
+
+	datum_to_jsonb(val, is_null, result, tcategory, outfuncoid, key_scalar);
+}
+
+/*
+ * SQL function to_jsonb(anyvalue)
+ */
+Datum
+to_jsonb(PG_FUNCTION_ARGS)
+{
+	Datum		val = PG_GETARG_DATUM(0);
+	Oid			val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
+	JsonbInState result;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &result, tcategory, outfuncoid, false);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_object(variadic "any")
+ */
+Datum
+jsonb_build_object(PG_FUNCTION_ARGS)
+{
+	int			nargs = PG_NARGS();
+	int			i;
+	Datum		arg;
+	Oid			val_type;
+	JsonbInState result;
+
+	if (nargs % 2 != 0)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("invalid number or arguments: object must be matched key value pairs")));
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	for (i = 0; i < nargs; i += 2)
+	{
+
+		/* process key */
+
+		if (PG_ARGISNULL(i))
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: key cannot be null", i + 1)));
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+
+		/*
+		 * turn a constant (more or less literal) value that's of unknown type
+		 * into text. Unknowns come in as a cstring pointer.
+		 */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 1)));
+
+		add_jsonb(arg, false, &result, val_type, true);
+
+		/* process value */
+
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i + 1);
+		/* see comments above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i + 1))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i + 1))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i + 1));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i + 1);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 2)));
+		add_jsonb(arg, PG_ARGISNULL(i + 1), &result, val_type, false);
+
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_object where it gets 0 arguments.
+ */
+Datum
+jsonb_build_object_noargs(PG_FUNCTION_ARGS)
+{
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_array(variadic "any")
+ */
+Datum
+jsonb_build_array(PG_FUNCTION_ARGS)
+{
+	int			nargs = PG_NARGS();
+	int			i;
+	Datum		arg;
+	Oid			val_type;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+
+	for (i = 0; i < nargs; i++)
+	{
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+		arg = PG_GETARG_DATUM(i + 1);
+		/* see comments in jsonb_build_object above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 1)));
+		add_jsonb(arg, PG_ARGISNULL(i), &result, val_type, false);
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_array where it gets 0 arguments.
+ */
+Datum
+jsonb_build_array_noargs(PG_FUNCTION_ARGS)
+{
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+	result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * SQL function jsonb_object(text[])
+ *
+ * take a one or two dimensional array of text as name value pairs
+ * for a jsonb object.
+ *
+ */
+Datum
+jsonb_object(PG_FUNCTION_ARGS)
+{
+	ArrayType  *in_array = PG_GETARG_ARRAYTYPE_P(0);
+	int			ndims = ARR_NDIM(in_array);
+	Datum	   *in_datums;
+	bool	   *in_nulls;
+	int			in_count,
+				count,
+				i;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	switch (ndims)
+	{
+		case 0:
+			goto close_object;
+			break;
+
+		case 1:
+			if ((ARR_DIMS(in_array)[0]) % 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have even number of elements")));
+			break;
+
+		case 2:
+			if ((ARR_DIMS(in_array)[1]) != 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have two columns")));
+			break;
+
+		default:
+			ereport(ERROR,
+					(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+					 errmsg("wrong number of array subscripts")));
+	}
+
+	deconstruct_array(in_array,
+					  TEXTOID, -1, false, 'i',
+					  &in_datums, &in_nulls, &in_count);
+
+	count = in_count / 2;
+
+	for (i = 0; i < count; ++i)
+	{
+		JsonbValue	v;
+		char	   *str;
+		int			len;
+
+		if (in_nulls[i * 2])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		str = TextDatumGetCString(in_datums[i * 2]);
+		len = strlen(str);
+
+		v.type = jbvString;
+
+		v.val.string.len = len;
+		v.val.string.val = str;
+
+		result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+		if (in_nulls[i * 2 + 1])
+		{
+			v.type = jbvNull;
+		}
+		else
+		{
+			str = TextDatumGetCString(in_datums[i * 2 + 1]);
+			len = strlen(str);
+
+			v.type = jbvString;
+
+			v.val.string.len = len;
+			v.val.string.val = str;
+		}
+
+		result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+	}
+
+	pfree(in_datums);
+	pfree(in_nulls);
+
+close_object:
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_object(text[], text[])
+ *
+ * take separate name and value arrays of text to construct a jsonb object
+ * pairwise.
+ */
+Datum
+jsonb_object_two_arg(PG_FUNCTION_ARGS)
+{
+	ArrayType  *key_array = PG_GETARG_ARRAYTYPE_P(0);
+	ArrayType  *val_array = PG_GETARG_ARRAYTYPE_P(1);
+	int			nkdims = ARR_NDIM(key_array);
+	int			nvdims = ARR_NDIM(val_array);
+	Datum	   *key_datums,
+			   *val_datums;
+	bool	   *key_nulls,
+			   *val_nulls;
+	int			key_count,
+				val_count,
+				i;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	if (nkdims > 1 || nkdims != nvdims)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("wrong number of array subscripts")));
+
+	if (nkdims == 0)
+		PG_RETURN_DATUM(CStringGetTextDatum("{}"));
+
+	deconstruct_array(key_array,
+					  TEXTOID, -1, false, 'i',
+					  &key_datums, &key_nulls, &key_count);
+
+	deconstruct_array(val_array,
+					  TEXTOID, -1, false, 'i',
+					  &val_datums, &val_nulls, &val_count);
+
+	if (key_count != val_count)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("mismatched array dimensions")));
+
+	for (i = 0; i < key_count; ++i)
+	{
+		JsonbValue	v;
+		char	   *str;
+		int			len;
+
+		if (key_nulls[i])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		str = TextDatumGetCString(key_datums[i]);
+		len = strlen(str);
+
+		v.type = jbvString;
+
+		v.val.string.len = len;
+		v.val.string.val = str;
+
+		result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+		if (val_nulls[i])
+		{
+			v.type = jbvNull;
+		}
+		else
+		{
+			str = TextDatumGetCString(val_datums[i]);
+			len = strlen(str);
+
+			v.type = jbvString;
+
+			v.val.string.len = len;
+			v.val.string.val = str;
+		}
+
+		result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	pfree(key_datums);
+	pfree(key_nulls);
+	pfree(val_datums);
+	pfree(val_nulls);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * jsonb_agg aggregate function
+ */
+Datum
+jsonb_agg_transfn(PG_FUNCTION_ARGS)
+{
+	Oid			val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+	MemoryContext oldcontext,
+				aggcontext;
+	JsonbInState elem;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+	Datum		val;
+	JsonbInState *result;
+	bool		single_scalar = false;
+	JsonbIterator *it;
+	Jsonb	   *jbelem;
+	JsonbValue	v;
+	int			type;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	if (!AggCheckCallContext(fcinfo, &aggcontext))
+	{
+		/* cannot be called directly because of internal-type argument */
+		elog(ERROR, "jsonb_agg_transfn called in non-aggregate context");
+	}
+
+	/* turn the argument into jsonb in the normal function context */
+
+	val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+	jbelem = JsonbValueToJsonb(elem.res);
+
+	/* switch to the aggregate context for accumulation operations */
+
+	oldcontext = MemoryContextSwitchTo(aggcontext);
+
+	/* set up the accumulator on the first go round */
+
+	if (PG_ARGISNULL(0))
+	{
+		result = palloc0(sizeof(JsonbInState));
+		result->res = pushJsonbValue(&result->parseState,
+									 WJB_BEGIN_ARRAY, NULL);
+
+	}
+	else
+	{
+		result = (JsonbInState *) PG_GETARG_POINTER(0);
+	}
+
+	it = JsonbIteratorInit(&jbelem->root);
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (v.val.array.rawScalar)
+					single_scalar = true;
+				else
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_END_ARRAY:
+				if (!single_scalar)
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_BEGIN_OBJECT:
+			case WJB_END_OBJECT:
+				result->res = pushJsonbValue(&result->parseState,
+											 type, NULL);
+				break;
+			case WJB_ELEM:
+			case WJB_KEY:
+			case WJB_VALUE:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else if (v.type == jbvNumeric)
+				{
+					/* same for numeric */
+				  v.val.numeric =
+					DatumGetNumeric(DirectFunctionCall1(numeric_uplus,
+														NumericGetDatum(v.val.numeric)));
+
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 type, &v);
+				break;
+		}
+	}
+
+	MemoryContextSwitchTo(oldcontext);
+
+	PG_RETURN_POINTER(result);
+}
+
+Datum
+jsonb_agg_finalfn(PG_FUNCTION_ARGS)
+{
+	JsonbInState *result;
+	Jsonb	   *out;
+
+	/* cannot be called directly because of internal-type argument */
+	Assert(AggCheckCallContext(fcinfo, NULL));
+
+	if (PG_ARGISNULL(0))
+		PG_RETURN_NULL();		/* returns null iff no input values */
+
+	result = (JsonbInState *) PG_GETARG_POINTER(0);
+
+	result->res = pushJsonbValue(&result->parseState,
+								 WJB_END_ARRAY, NULL);
+
+
+	out = JsonbValueToJsonb(result->res);
+
+	PG_RETURN_POINTER(out);
+}
+
+/*
+ * jsonb_object_agg aggregate function
+ */
+Datum
+jsonb_object_agg_transfn(PG_FUNCTION_ARGS)
+{
+	Oid			val_type;
+	MemoryContext oldcontext,
+				aggcontext;
+	JsonbInState elem;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+	Datum		val;
+	JsonbInState *result;
+	bool		single_scalar;
+	JsonbIterator *it;
+	Jsonb	   *jbkey,
+			   *jbval;
+	JsonbValue	v;
+	int			type;
+
+	if (!AggCheckCallContext(fcinfo, &aggcontext))
+	{
+		/* cannot be called directly because of internal-type argument */
+		elog(ERROR, "jsonb_object_agg_transfn called in non-aggregate context");
+	}
+
+	/* turn the argument into jsonb in the normal function context */
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, true);
+
+	jbkey = JsonbValueToJsonb(elem.res);
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	val = PG_ARGISNULL(2) ? (Datum) 0 : PG_GETARG_DATUM(2);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+	jbval = JsonbValueToJsonb(elem.res);
+
+	/* switch to the aggregate context for accumulation operations */
+
+	oldcontext = MemoryContextSwitchTo(aggcontext);
+
+	/* set up the accumulator on the first go round */
+
+	if (PG_ARGISNULL(0))
+	{
+		result = palloc0(sizeof(JsonbInState));
+		result->res = pushJsonbValue(&result->parseState,
+									 WJB_BEGIN_OBJECT, NULL);
+
+	}
+	else
+	{
+		result = (JsonbInState *) PG_GETARG_POINTER(0);
+	}
+
+	it = JsonbIteratorInit(&jbkey->root);
+
+	/*
+	 * keys should be scalar, and we should have already checked for that
+	 * above when calling datum_to_jsonb, so we only need to look for these
+	 * things.
+	 */
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (!v.val.array.rawScalar)
+					elog(ERROR, "unexpected structure for key");
+				break;
+			case WJB_ELEM:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else
+				{
+					ereport(ERROR,
+							(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+							 errmsg("object keys must be strings")));
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 WJB_KEY, &v);
+				break;
+			case WJB_END_ARRAY:
+				break;
+			default:
+				elog(ERROR, "unexpected structure for key");
+				break;
+		}
+	}
+
+	it = JsonbIteratorInit(&jbval->root);
+
+	single_scalar = false;
+
+	/*
+	 * values can be anything, including structured and null, so we treate
+	 * them as in json_agg_transfn, except that single scalars are always
+	 * pushed as WJB_VALUE items.
+	 */
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (v.val.array.rawScalar)
+					single_scalar = true;
+				else
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_END_ARRAY:
+				if (!single_scalar)
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_BEGIN_OBJECT:
+			case WJB_END_OBJECT:
+				result->res = pushJsonbValue(&result->parseState,
+											 type, NULL);
+				break;
+			case WJB_ELEM:
+			case WJB_KEY:
+			case WJB_VALUE:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else if (v.type == jbvNumeric)
+				{
+					/* same for numeric */
+					v.val.numeric =
+					  DatumGetNumeric(DirectFunctionCall1(numeric_uplus,
+														  NumericGetDatum(v.val.numeric)));
+
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 single_scalar ? WJB_VALUE : type,
+											 &v);
+				break;
+		}
+	}
+
+	MemoryContextSwitchTo(oldcontext);
+
+	PG_RETURN_POINTER(result);
+}
+
+Datum
+jsonb_object_agg_finalfn(PG_FUNCTION_ARGS)
+{
+	JsonbInState *result;
+	Jsonb	   *out;
+
+	/* cannot be called directly because of internal-type argument */
+	Assert(AggCheckCallContext(fcinfo, NULL));
+
+	if (PG_ARGISNULL(0))
+		PG_RETURN_NULL();		/* returns null iff no input values */
+
+	result = (JsonbInState *) PG_GETARG_POINTER(0);
+
+	result->res = pushJsonbValue(&result->parseState,
+								 WJB_END_OBJECT, NULL);
+
+
+	out = JsonbValueToJsonb(result->res);
+
+	PG_RETURN_POINTER(out);
+}
diff --git a/src/backend/utils/adt/jsonb_util.c b/src/backend/utils/adt/jsonb_util.c
index 2ff8539..4cce30d 100644
--- a/src/backend/utils/adt/jsonb_util.c
+++ b/src/backend/utils/adt/jsonb_util.c
@@ -1427,7 +1427,7 @@ convertJsonbValue(StringInfo buffer, JEntry *header, JsonbValue *val, int level)
 	else if (val->type == jbvObject)
 		convertJsonbObject(buffer, header, val, level);
 	else
-		elog(ERROR, "unknown type of jsonb container");
+		elog(ERROR, "unknown type of jsonb container to convert");
 }
 
 static void
diff --git a/src/include/catalog/pg_aggregate.h b/src/include/catalog/pg_aggregate.h
index 3ba9e5e..8e0735b 100644
--- a/src/include/catalog/pg_aggregate.h
+++ b/src/include/catalog/pg_aggregate.h
@@ -286,6 +286,10 @@ DATA(insert ( 3545	n 0 bytea_string_agg_transfn	bytea_string_agg_finalfn	-				-
 DATA(insert ( 3175	n 0 json_agg_transfn	json_agg_finalfn			-				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
 DATA(insert ( 3197	n 0 json_object_agg_transfn json_object_agg_finalfn -				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
 
+/* jsonb */
+DATA(insert ( 3267	n 0 jsonb_agg_transfn	jsonb_agg_finalfn			-				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
+DATA(insert ( 3270	n 0 jsonb_object_agg_transfn jsonb_object_agg_finalfn -				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
+
 /* ordered-set and hypothetical-set aggregates */
 DATA(insert ( 3972	o 1 ordered_set_transition			percentile_disc_final					-		-		-		t f 0	2281	0	0		0	_null_ _null_ ));
 DATA(insert ( 3974	o 1 ordered_set_transition			percentile_cont_float8_final			-		-		-		f f 0	2281	0	0		0	_null_ _null_ ));
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index b6dc1b8..a93004f 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4599,6 +4599,33 @@ DESCR("I/O");
 DATA(insert OID =  3803 (  jsonb_send		PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 17 "3802" _null_ _null_ _null_ _null_	jsonb_send _null_ _null_ _null_ ));
 DESCR("I/O");
 
+DATA(insert OID = 3263 (  jsonb_object	 PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 3802 "1009" _null_ _null_ _null_ _null_ jsonb_object _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3264 (  jsonb_object	 PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "1009 1009" _null_ _null_ _null_ _null_ jsonb_object_two_arg _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3787 (  to_jsonb	   PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 3802 "2283" _null_ _null_ _null_ _null_ to_jsonb _null_ _null_ _null_ ));
+DESCR("map input to jsonb");
+DATA(insert OID = 3265 (  jsonb_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f s 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ jsonb_agg_transfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate transition function");
+DATA(insert OID = 3266 (  jsonb_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f s 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_agg_finalfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate final function");
+DATA(insert OID = 3267 (  jsonb_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f s 1 0 3802 "2283" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate input into jsonb");
+DATA(insert OID = 3268 (  jsonb_object_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f s 3 0 2281 "2281 2276 2276" _null_ _null_ _null_ _null_ jsonb_object_agg_transfn _null_ _null_ _null_ ));
+DESCR("jsonb object aggregate transition function");
+DATA(insert OID = 3269 (  jsonb_object_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f s 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_object_agg_finalfn _null_ _null_ _null_ ));
+DESCR("jsonb object aggregate final function");
+DATA(insert OID = 3270 (  jsonb_object_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f i 2 0 3802 "2276 2276" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate inputs into jsonb object");
+DATA(insert OID = 3259 (  jsonb_build_array	   PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_array _null_ _null_ _null_ ));
+DESCR("build a jsonb array from any inputs");
+DATA(insert OID = 3260 (  jsonb_build_array	   PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802  "" _null_ _null_ _null_ _null_ jsonb_build_array_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb array");
+DATA(insert OID = 3261 (  jsonb_build_object    PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_object _null_ _null_ _null_ ));
+DESCR("build a jsonb object from pairwise key/value inputs");
+DATA(insert OID = 3262 (  jsonb_build_object    PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802  "" _null_ _null_ _null_ _null_ jsonb_build_object_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb object");
+
 DATA(insert OID = 3478 (  jsonb_object_field			PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field _null_ _null_ _null_ ));
 DATA(insert OID = 3214 (  jsonb_object_field_text	PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 25  "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field_text _null_ _null_ _null_ ));
 DATA(insert OID = 3215 (  jsonb_array_element		PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 23" _null_ _null_ "{from_json, element_index}" _null_ jsonb_array_element _null_ _null_ _null_ ));
diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h
index b89e4cb..d261aaa 100644
--- a/src/include/utils/jsonb.h
+++ b/src/include/utils/jsonb.h
@@ -350,6 +350,22 @@ extern Datum jsonb_recv(PG_FUNCTION_ARGS);
 extern Datum jsonb_send(PG_FUNCTION_ARGS);
 extern Datum jsonb_typeof(PG_FUNCTION_ARGS);
 
+/* generator routines */
+extern Datum to_jsonb(PG_FUNCTION_ARGS);
+
+extern Datum jsonb_build_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_object_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_two_arg(PG_FUNCTION_ARGS);
+
+/* jsonb_agg, json_object_agg functions */
+extern Datum jsonb_agg_transfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_agg_finalfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_agg_transfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_agg_finalfn(PG_FUNCTION_ARGS);
+
 /* Indexing-related ops */
 extern Datum jsonb_exists(PG_FUNCTION_ARGS);
 extern Datum jsonb_exists_any(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
index 9146f59..bb29f0c 100644
--- a/src/test/regress/expected/jsonb.out
+++ b/src/test/regress/expected/jsonb.out
@@ -301,6 +301,65 @@ SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
  [{"a": 1},{"b": [2, 3]}]
 (1 row)
 
+-- to_jsonb, timestamps
+select to_jsonb(timestamp '2014-05-28 12:22:35.614298');
+           to_jsonb           
+------------------------------
+ "2014-05-28T12:22:35.614298"
+(1 row)
+
+BEGIN;
+SET LOCAL TIME ZONE 10.5;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+              to_jsonb              
+------------------------------------
+ "2014-05-29T02:52:35.614298+10:30"
+(1 row)
+
+SET LOCAL TIME ZONE -8;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+              to_jsonb              
+------------------------------------
+ "2014-05-28T08:22:35.614298-08:00"
+(1 row)
+
+COMMIT;
+-- unicode escape - backslash is not escaped
+select to_jsonb(text '\uabcd');
+ to_jsonb 
+----------
+ "\uabcd"
+(1 row)
+
+-- any other backslash is escaped
+select to_jsonb(text '\abcd');
+ to_jsonb 
+----------
+ "\\abcd"
+(1 row)
+
+--jsonb_agg
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+                                                                                                                                                                    jsonb_agg                                                                                                                                                                     
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+(1 row)
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+                               jsonb_agg                               
+-----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+(1 row)
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -1256,6 +1315,120 @@ SELECT jsonb_typeof('"1.0"') AS string;
  string
 (1 row)
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                            jsonb_build_array                            
+-------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+(1 row)
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                           jsonb_build_object                            
+-------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+(1 row)
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+                                       jsonb_build_object                                       
+------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+(1 row)
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+ jsonb_build_array 
+-------------------
+ []
+(1 row)
+
+SELECT jsonb_build_object();
+ jsonb_build_object 
+--------------------
+ {}
+(1 row)
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+ jsonb_build_object 
+--------------------
+ {"1": 2}
+(1 row)
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+ERROR:  arg 1: key cannot be null
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ERROR:  key value must be scalar, not array, composite or json
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+                                                                     jsonb_build_object                                                                      
+-------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+(1 row)
+
+-- jsonb_object
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+ERROR:  array must have even number of elements
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+ERROR:  array must have two columns
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ERROR:  array must have two columns
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ERROR:  wrong number of array subscripts
+--two argument form of jsonb_object
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+                   jsonb_object                   
+--------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+(1 row)
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ERROR:  wrong number of array subscripts
+-- mismatched dimensions
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ERROR:  mismatched array dimensions
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ERROR:  mismatched array dimensions
+-- null key error
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ERROR:  null value not allowed for object key
+-- empty key is allowed
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+                  jsonb_object                   
+-------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
  jsonb_extract_path 
diff --git a/src/test/regress/expected/jsonb_1.out b/src/test/regress/expected/jsonb_1.out
index 83d61f8..7a81e7d 100644
--- a/src/test/regress/expected/jsonb_1.out
+++ b/src/test/regress/expected/jsonb_1.out
@@ -301,6 +301,65 @@ SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
  [{"a": 1},{"b": [2, 3]}]
 (1 row)
 
+-- to_jsonb, timestamps
+select to_jsonb(timestamp '2014-05-28 12:22:35.614298');
+           to_jsonb           
+------------------------------
+ "2014-05-28T12:22:35.614298"
+(1 row)
+
+BEGIN;
+SET LOCAL TIME ZONE 10.5;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+              to_jsonb              
+------------------------------------
+ "2014-05-29T02:52:35.614298+10:30"
+(1 row)
+
+SET LOCAL TIME ZONE -8;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+              to_jsonb              
+------------------------------------
+ "2014-05-28T08:22:35.614298-08:00"
+(1 row)
+
+COMMIT;
+-- unicode escape - backslash is not escaped
+select to_jsonb(text '\uabcd');
+ to_jsonb 
+----------
+ "\uabcd"
+(1 row)
+
+-- any other backslash is escaped
+select to_jsonb(text '\abcd');
+ to_jsonb 
+----------
+ "\\abcd"
+(1 row)
+
+--jsonb_agg
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+                                                                                                                                                                    jsonb_agg                                                                                                                                                                     
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+(1 row)
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+                               jsonb_agg                               
+-----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+(1 row)
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -1256,6 +1315,120 @@ SELECT jsonb_typeof('"1.0"') AS string;
  string
 (1 row)
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                            jsonb_build_array                            
+-------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+(1 row)
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                           jsonb_build_object                            
+-------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+(1 row)
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+                                       jsonb_build_object                                       
+------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+(1 row)
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+ jsonb_build_array 
+-------------------
+ []
+(1 row)
+
+SELECT jsonb_build_object();
+ jsonb_build_object 
+--------------------
+ {}
+(1 row)
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+ jsonb_build_object 
+--------------------
+ {"1": 2}
+(1 row)
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+ERROR:  arg 1: key cannot be null
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ERROR:  key value must be scalar, not array, composite or json
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+                                                                     jsonb_build_object                                                                      
+-------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+(1 row)
+
+-- jsonb_object
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+ERROR:  array must have even number of elements
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+ERROR:  array must have two columns
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ERROR:  array must have two columns
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ERROR:  wrong number of array subscripts
+--two argument form of jsonb_object
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+                   jsonb_object                   
+--------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+(1 row)
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ERROR:  wrong number of array subscripts
+-- mismatched dimensions
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ERROR:  mismatched array dimensions
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ERROR:  mismatched array dimensions
+-- null key error
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ERROR:  null value not allowed for object key
+-- empty key is allowed
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+                  jsonb_object                   
+-------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
  jsonb_extract_path 
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
index f1ed021..1c9cd83 100644
--- a/src/test/regress/sql/jsonb.sql
+++ b/src/test/regress/sql/jsonb.sql
@@ -62,6 +62,41 @@ SELECT '    '::jsonb;			-- ERROR, no value
 -- make sure jsonb is passed through json generators without being escaped
 SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
 
+-- to_jsonb, timestamps
+
+select to_jsonb(timestamp '2014-05-28 12:22:35.614298');
+
+BEGIN;
+SET LOCAL TIME ZONE 10.5;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+SET LOCAL TIME ZONE -8;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+COMMIT;
+
+-- unicode escape - backslash is not escaped
+
+select to_jsonb(text '\uabcd');
+
+-- any other backslash is escaped
+
+select to_jsonb(text '\abcd');
+
+--jsonb_agg
+
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -263,6 +298,86 @@ SELECT jsonb_typeof('"hello"') AS string;
 SELECT jsonb_typeof('"true"') AS string;
 SELECT jsonb_typeof('"1.0"') AS string;
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+
+SELECT jsonb_build_object();
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+
+-- jsonb_object
+
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+
+--two argument form of jsonb_object
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- mismatched dimensions
+
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+
+-- null key error
+
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+
+-- empty key is allowed
+
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+
+
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
#13Pavel Stehule
pavel.stehule@gmail.com
In reply to: Andrew Dunstan (#12)
Re: jsonb generator functions

Hi

2014-10-27 15:33 GMT+01:00 Andrew Dunstan <andrew@dunslane.net>:

On 10/15/2014 03:54 PM, Andrew Dunstan wrote:

I checked a code, and I have only two small objection - a name
"jsonb_object_two_arg" is not good - maybe "json_object_keys_values" ?

It's consistent with the existing json_object_two_arg. In all cases I
think I kept the names the same except for changing "json" to "jsonb". Note
that these _two_arg functions are not visible at the SQL level - they are
only visible in the C code.

I'm happy to be guided by others in changing or keeping these names.

Next: there are no tests for to_jsonb function.

Oh, my bad. I'll add some.

Thank for the review.

Here is a new patch that includes documentation and addresses all these
issues, except that I didn't change the name of jsonb_object_two_arg to
keep it consistent with the name of json_object_two_arg. I'm happy to
change both if people feel it matters.

I checked last patch "jsonbmissingfunc5.patch" and I have no any objections:

1. This jsonb API is consistent with current JSON API, so we surely would
to this functionality

2. A implementation is clean without side effects and without impact on
current code.

3. Patching and compilation are without any issues and warnings

4. Source code respects PostgreSQL coding rules

5. All regress tests was passed without problems

6. Documentation was build without problems

7. Patch contains necessary regress tests

8. Patch contains necessary documentation for new functions.

Patch is ready for commiters

Thank you for patch

Regards

Pavel

Show quoted text

cheers

andrew

#14Alvaro Herrera
alvherre@2ndquadrant.com
In reply to: Andrew Dunstan (#12)
Re: jsonb generator functions

Andrew Dunstan wrote:

This bit:

+/*
+ * Determine how we want to render values of a given type in datum_to_jsonb.
+ *
+ * Given the datatype OID, return its JsonbTypeCategory, as well as the type's
+ * output function OID.  If the returned category is JSONBTYPE_CAST, we
+ * return the OID of the type->JSON cast function instead.
+ */
+static void
+jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid)
+{

seems like it can return without having set the category and func OID,
if there's no available cast. Callers don't seem to check for this
condition; is this a bug? If not, why not? Maybe some extra comments
are warranted.

Right now, for the "general case" there, there are two syscache lookups
rather than one. The fix is simple: just do the getTypeOutputInfo call
inside each case inside the switch instead of once at the beginning, so
that the general case can omit it; then there is just one syscache
access in all the cases. json.c suffers from the same problem.

Anyway this whole business of searching through the CASTSOURCETARGET
syscache seems like it could be refactored. If I'm counting correctly,
that block now appears four times (three in this patch, once in json.c).
Can't we add a new function to (say) lsyscache and remove that?

I'm just commenting on that part because the syscache.h/pg_cast.h
inclusions look a bit out of place; it's certainly not a serious issue.

I looked at what makes you include miscadmin.h. It's only USE_XSD_DATES
as far as I can tell. I looked at how that might be fixed, and a quick
patch that moves DateStyle, DateOrder and IntervalStyle (and associated
definitions) to datetime.h seems to work pretty well ... except that
initdb.c requires to know about some DATEORDER constants; but frontend
code cannot include datetime.h because of Datum. So that idea crashed
and burned until someone reorganizes the whole datetime code, which
currently is pretty messy.

I don't have any further comments on this patch, other than please add
JsonbTypeCategory to pgindent/typedefs.list before doing your pgindent
run.

--
�lvaro Herrera http://www.2ndQuadrant.com/
PostgreSQL Development, 24x7 Support, Training & Services

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

#15Andrew Dunstan
andrew@dunslane.net
In reply to: Alvaro Herrera (#14)
Re: jsonb generator functions

On 10/27/2014 05:57 PM, Alvaro Herrera wrote:

Andrew Dunstan wrote:

This bit:

+/*
+ * Determine how we want to render values of a given type in datum_to_jsonb.
+ *
+ * Given the datatype OID, return its JsonbTypeCategory, as well as the type's
+ * output function OID.  If the returned category is JSONBTYPE_CAST, we
+ * return the OID of the type->JSON cast function instead.
+ */
+static void
+jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid)
+{

seems like it can return without having set the category and func OID,
if there's no available cast. Callers don't seem to check for this
condition; is this a bug? If not, why not? Maybe some extra comments
are warranted.

Umm, no. The outfuncoid is set by the call to getTypeOutputInfo() and
the category is set by every branch of the switch. We override the
funcoid in the case where there's a cast to json or jsonb.

I'll add a comment to that effect.

Right now, for the "general case" there, there are two syscache lookups
rather than one. The fix is simple: just do the getTypeOutputInfo call
inside each case inside the switch instead of once at the beginning, so
that the general case can omit it; then there is just one syscache
access in all the cases. json.c suffers from the same problem.

We only do more than one if it's not a builtin type, or an array or
composite. So 99% of the time this won't even be called.

Anyway this whole business of searching through the CASTSOURCETARGET
syscache seems like it could be refactored. If I'm counting correctly,
that block now appears four times (three in this patch, once in json.c).
Can't we add a new function to (say) lsyscache and remove that?

Twice, not three times in this patch, unless I'm going crazier than I
thought.

I can add a function to lsyscache along the lines of

Oid get_cast_func(Oid from_type, Oid to_type)

if you think it's worth it.

cheers

andrew

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

#16Andrew Dunstan
andrew@dunslane.net
In reply to: Andrew Dunstan (#15)
1 attachment(s)
Re: jsonb generator functions

On 10/28/2014 09:49 AM, Andrew Dunstan wrote:

On 10/27/2014 05:57 PM, Alvaro Herrera wrote:

Anyway this whole business of searching through the CASTSOURCETARGET
syscache seems like it could be refactored. If I'm counting correctly,
that block now appears four times (three in this patch, once in json.c).
Can't we add a new function to (say) lsyscache and remove that?

Twice, not three times in this patch, unless I'm going crazier than I
thought.

I can add a function to lsyscache along the lines of

Oid get_cast_func(Oid from_type, Oid to_type)

if you think it's worth it.

OK, here is a new patch version that

* uses find_coercion_path() to find the cast function if any, as
discussed elsewhere
* removes calls to getTypeOutputInfo() except where required
* honors a cast to json only for rendering both json and jsonb
* adds processing for the date type that was previously missing in
datum_to_jsonb

cheers

andrew

Attachments:

jsonbmissingfuncs7.patchtext/x-diff; name=jsonbmissingfuncs7.patchDownload
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index 7e5bcd9..fad0e79 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -10245,9 +10245,10 @@ table2-mapping
 
   <para>
    <xref linkend="functions-json-creation-table"> shows the functions that are
-   available for creating <type>json</type> values.
-   (Currently, there are no equivalent functions for <type>jsonb</>, but you
-   can cast the result of one of these functions to <type>jsonb</>.)
+   available for creating <type>json</type> and <type>jsonb</type> values.
+   (There are no equivalent functions for <type>jsonb</>, of the <literal>row_to_json</>
+   and <literal>array_to_json</> functions. However, the <literal>to_jsonb</>
+   function supplies much the same functionality as these functions would.)
   </para>
 
   <indexterm>
@@ -10268,6 +10269,18 @@ table2-mapping
   <indexterm>
    <primary>json_object</primary>
   </indexterm>
+  <indexterm>
+   <primary>to_jsonb</primary>
+  </indexterm>
+  <indexterm>
+   <primary>jsonb_build_array</primary>
+  </indexterm>
+  <indexterm>
+   <primary>jsonb_build_object</primary>
+  </indexterm>
+  <indexterm>
+   <primary>jsonb_object</primary>
+  </indexterm>
 
   <table id="functions-json-creation-table">
     <title>JSON Creation Functions</title>
@@ -10282,17 +10295,18 @@ table2-mapping
      </thead>
      <tbody>
       <row>
+       <entry><para><literal>to_json(anyelement)</literal>
+          </para><para><literal>to_jsonb(anyelement)</literal>
+       </para></entry>
        <entry>
-         <literal>to_json(anyelement)</literal>
-       </entry>
-       <entry>
-         Returns the value as JSON.  Arrays and composites are converted
+         Returns the value as <type>json</> or <type>jsonb</>.
+         Arrays and composites are converted
          (recursively) to arrays and objects; otherwise, if there is a cast
          from the type to <type>json</type>, the cast function will be used to
-         perform the conversion; otherwise, a JSON scalar value is produced.
+         perform the conversion; otherwise, a scalar value is produced.
          For any scalar type other than a number, a Boolean, or a null value,
-         the text representation will be used, properly quoted and escaped
-         so that it is a valid JSON string.
+         the text representation will be used, in such a fashion that it is a 
+         valid <type>json</> or <type>jsonb</> value.
        </entry>
        <entry><literal>to_json('Fred said "Hi."'::text)</literal></entry>
        <entry><literal>"Fred said \"Hi.\""</literal></entry>
@@ -10321,9 +10335,9 @@ table2-mapping
        <entry><literal>{"f1":1,"f2":"foo"}</literal></entry>
       </row>
       <row>
-       <entry>
-         <literal>json_build_array(VARIADIC "any")</literal>
-       </entry>
+       <entry><para><literal>json_build_array(VARIADIC "any")</literal>
+          </para><para><literal>jsonb_build_array(VARIADIC "any")</literal>
+       </para></entry>
        <entry>
          Builds a possibly-heterogeneously-typed JSON array out of a variadic
          argument list.
@@ -10332,9 +10346,9 @@ table2-mapping
        <entry><literal>[1, 2, "3", 4, 5]</literal></entry>
       </row>
       <row>
-       <entry>
-         <literal>json_build_object(VARIADIC "any")</literal>
-       </entry>
+       <entry><para><literal>json_build_object(VARIADIC "any")</literal>
+          </para><para><literal>jsonb_build_object(VARIADIC "any")</literal>
+       </para></entry>
        <entry>
          Builds a JSON object out of a variadic argument list.  By
          convention, the argument list consists of alternating
@@ -10344,9 +10358,9 @@ table2-mapping
        <entry><literal>{"foo": 1, "bar": 2}</literal></entry>
       </row>
       <row>
-       <entry>
-         <literal>json_object(text[])</literal>
-       </entry>
+       <entry><para><literal>json_object(text[])</literal>
+          </para><para><literal>jsonb_object(text[])</literal>
+       </para></entry>
        <entry>
          Builds a JSON object out of a text array.  The array must have either
          exactly one dimension with an even number of members, in which case
@@ -10359,9 +10373,9 @@ table2-mapping
        <entry><literal>{"a": "1", "b": "def", "c": "3.5"}</literal></entry>
       </row>
       <row>
-       <entry>
-         <literal>json_object(keys text[], values text[])</literal>
-       </entry>
+       <entry><para><literal>json_object(keys text[], values text[])</literal>
+          </para><para><literal>json_object(keys text[], values text[])</literal>
+       </para></entry>
        <entry>
          This form of <function>json_object</> takes keys and values pairwise from two separate
          arrays. In all other respects it is identical to the one-argument form.
@@ -10757,7 +10771,8 @@ table2-mapping
     function <function>json_agg</function> which aggregates record
     values as JSON, and the aggregate function
     <function>json_object_agg</function> which aggregates pairs of values
-    into a JSON object.
+    into a JSON object, and their <type>jsonb</type> equivalents,
+    <function>jsonb_agg</> and <function>jsonb_object_agg</>.
   </para>
 
  </sect1>
@@ -12189,6 +12204,22 @@ NULL baz</literallayout>(3 rows)</entry>
      <row>
       <entry>
        <indexterm>
+        <primary>jsonb_agg</primary>
+       </indexterm>
+       <function>jsonb_agg(<replaceable class="parameter">record</replaceable>)</function>
+      </entry>
+      <entry>
+       <type>record</type>
+      </entry>
+      <entry>
+       <type>jsonb</type>
+      </entry>
+      <entry>aggregates records as a JSON array of objects</entry>
+     </row>
+
+     <row>
+      <entry>
+       <indexterm>
         <primary>json_object_agg</primary>
        </indexterm>
        <function>json_object_agg(<replaceable class="parameter">name</replaceable>, <replaceable class="parameter">value</replaceable>)</function>
@@ -12205,6 +12236,22 @@ NULL baz</literallayout>(3 rows)</entry>
      <row>
       <entry>
        <indexterm>
+        <primary>jsonb_object_agg</primary>
+       </indexterm>
+       <function>jsonb_object_agg(<replaceable class="parameter">name</replaceable>, <replaceable class="parameter">value</replaceable>)</function>
+      </entry>
+      <entry>
+       <type>("any", "any")</type>
+      </entry>
+      <entry>
+       <type>jsonb</type>
+      </entry>
+      <entry>aggregates name/value pairs as a JSON object</entry>
+     </row>
+
+     <row>
+      <entry>
+       <indexterm>
         <primary>max</primary>
        </indexterm>
        <function>max(<replaceable class="parameter">expression</replaceable>)</function>
@@ -12348,8 +12395,8 @@ SELECT count(*) FROM sometable;
 
   <para>
    The aggregate functions <function>array_agg</function>,
-   <function>json_agg</function>,
-   <function>json_object_agg</function>,
+   <function>json_agg</function>, <function>jsonb_agg</function>,
+   <function>json_object_agg</function>, <function>jsonb_object_agg</function>,
    <function>string_agg</function>,
    and <function>xmlagg</function>, as well as similar user-defined
    aggregate functions, produce meaningfully different result values
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index d2bf640..f931fcf 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -15,7 +15,6 @@
 
 #include "access/htup_details.h"
 #include "access/transam.h"
-#include "catalog/pg_cast.h"
 #include "catalog/pg_type.h"
 #include "executor/spi.h"
 #include "lib/stringinfo.h"
@@ -1250,10 +1249,14 @@ json_categorize_type(Oid typoid,
 	/* Look through any domain */
 	typoid = getBaseType(typoid);
 
-	/* We'll usually need to return the type output function */
-	getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+	*outfuncoid = InvalidOid;
+
+	/*
+	 * We need to get the output function for everything except date and
+	 * timestamp types, array and composite types, booleans,
+	 * and non-builtin types  where there's a cast to json.
+	 */
 
-	/* Check for known types */
 	switch (typoid)
 	{
 		case BOOLOID:
@@ -1266,6 +1269,7 @@ json_categorize_type(Oid typoid,
 		case FLOAT4OID:
 		case FLOAT8OID:
 		case NUMERICOID:
+			getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
 			*tcategory = JSONTYPE_NUMERIC;
 			break;
 
@@ -1283,6 +1287,7 @@ json_categorize_type(Oid typoid,
 
 		case JSONOID:
 		case JSONBOID:
+			getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
 			*tcategory = JSONTYPE_JSON;
 			break;
 
@@ -1299,23 +1304,26 @@ json_categorize_type(Oid typoid,
 				/* but let's look for a cast to json, if it's not built-in */
 				if (typoid >= FirstNormalObjectId)
 				{
-					HeapTuple	tuple;
+					Oid castfunc;
+					CoercionPathType ctype;
 
-					tuple = SearchSysCache2(CASTSOURCETARGET,
-											ObjectIdGetDatum(typoid),
-											ObjectIdGetDatum(JSONOID));
-					if (HeapTupleIsValid(tuple))
+					ctype = find_coercion_pathway(JSONOID, typoid,
+												  COERCION_EXPLICIT, &castfunc);
+					if (ctype == COERCION_PATH_FUNC && OidIsValid(castfunc))
 					{
-						Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
-
-						if (castForm->castmethod == COERCION_METHOD_FUNCTION)
-						{
-							*tcategory = JSONTYPE_CAST;
-							*outfuncoid = castForm->castfunc;
-						}
-
-						ReleaseSysCache(tuple);
+						*tcategory = JSONTYPE_CAST;
+						*outfuncoid = castfunc;
 					}
+					else
+					{
+						/* non builtin type with no cast */
+						getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+					}
+				}
+				else
+				{
+					/* any other builtin type */
+					getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
 				}
 			}
 			break;
diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c
index 9beebb3..e63eddd 100644
--- a/src/backend/utils/adt/jsonb.c
+++ b/src/backend/utils/adt/jsonb.c
@@ -12,11 +12,21 @@
  */
 #include "postgres.h"
 
+#include "miscadmin.h"
+#include "access/htup_details.h"
+#include "access/transam.h"
+#include "catalog/pg_type.h"
 #include "libpq/pqformat.h"
+#include "parser/parse_coerce.h"
 #include "utils/builtins.h"
+#include "utils/date.h"
+#include "utils/datetime.h"
+#include "utils/lsyscache.h"
 #include "utils/json.h"
 #include "utils/jsonapi.h"
 #include "utils/jsonb.h"
+#include "utils/syscache.h"
+#include "utils/typcache.h"
 
 typedef struct JsonbInState
 {
@@ -24,6 +34,23 @@ typedef struct JsonbInState
 	JsonbValue *res;
 } JsonbInState;
 
+/* unlike with json categories, we need to treat json and jsonb differently */
+typedef enum					/* type categories for datum_to_jsonb */
+{
+	JSONBTYPE_NULL,				/* null, so we didn't bother to identify */
+	JSONBTYPE_BOOL,				/* boolean (built-in types only) */
+	JSONBTYPE_NUMERIC,			/* numeric (ditto) */
+	JSONBTYPE_DATE,				/* we use special formatting for datetimes */
+	JSONBTYPE_TIMESTAMP,		/* we use special formatting for timestamp */
+	JSONBTYPE_TIMESTAMPTZ,		/* ... and timestamptz */
+	JSONBTYPE_JSON,				/* JSON */
+	JSONBTYPE_JSONB,			/* JSONB */
+	JSONBTYPE_ARRAY,			/* array */
+	JSONBTYPE_COMPOSITE,		/* composite */
+	JSONBTYPE_JSONCAST,			/* something with an explicit cast to JSON */
+	JSONBTYPE_OTHER				/* all else */
+}	JsonbTypeCategory;
+
 static inline Datum jsonb_from_cstring(char *json, int len);
 static size_t checkStringLen(size_t len);
 static void jsonb_in_object_start(void *pstate);
@@ -33,6 +60,22 @@ static void jsonb_in_array_end(void *pstate);
 static void jsonb_in_object_field_start(void *pstate, char *fname, bool isnull);
 static void jsonb_put_escaped_value(StringInfo out, JsonbValue *scalarVal);
 static void jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype);
+static void jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid);
+static void composite_to_jsonb(Datum composite, JsonbInState *result);
+static void array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims,
+				   Datum *vals, bool *nulls, int *valcount,
+				   JsonbTypeCategory tcategory, Oid outfuncoid);
+static void array_to_jsonb_internal(Datum array, JsonbInState *result);
+static void jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid);
+static void datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+			   JsonbTypeCategory tcategory, Oid outfuncoid,
+			   bool key_scalar);
+static void add_jsonb(Datum val, bool is_null, JsonbInState *result,
+		  Oid val_type, bool key_scalar);
 
 /*
  * jsonb type input function
@@ -462,3 +505,1303 @@ JsonbToCString(StringInfo out, JsonbContainer *in, int estimated_len)
 
 	return out->data;
 }
+
+
+/*
+ * Determine how we want to render values of a given type in datum_to_jsonb.
+ *
+ * Given the datatype OID, return its JsonbTypeCategory, as well as the type's
+ * output function OID.  If the returned category is JSONBTYPE_JSONCAST,
+ *  we return the OID of the relevant cast function instead.
+ */
+static void
+jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid)
+{
+	bool		typisvarlena;
+
+	/* Look through any domain */
+	typoid = getBaseType(typoid);
+
+	*outfuncoid = InvalidOid;
+
+	/*
+	 * We need to get the output function for everything except date and
+	 * timestamp types, booleans, array and composite types, json and jsonb,
+	 * and non-builtin types where there's a cast to json. In this last case
+	 * we return the oid of the cast function instead.
+	 */
+
+	switch (typoid)
+	{
+		case BOOLOID:
+			*tcategory = JSONBTYPE_BOOL;
+			break;
+
+		case INT2OID:
+		case INT4OID:
+		case INT8OID:
+		case FLOAT4OID:
+		case FLOAT8OID:
+		case NUMERICOID:
+			getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+			*tcategory = JSONBTYPE_NUMERIC;
+			break;
+
+		case DATEOID:
+			*tcategory = JSONBTYPE_DATE;
+			break;
+
+		case TIMESTAMPOID:
+			*tcategory = JSONBTYPE_TIMESTAMP;
+			break;
+
+		case TIMESTAMPTZOID:
+			*tcategory = JSONBTYPE_TIMESTAMPTZ;
+			break;
+
+		case JSONBOID:
+			*tcategory = JSONBTYPE_JSONB;
+			break;
+
+		case JSONOID:
+			*tcategory = JSONBTYPE_JSON;
+			break;
+
+		default:
+			/* Check for arrays and composites */
+			if (OidIsValid(get_element_type(typoid)))
+				*tcategory = JSONBTYPE_ARRAY;
+			else if (type_is_rowtype(typoid))
+				*tcategory = JSONBTYPE_COMPOSITE;
+			else
+			{
+				/* It's probably the general case ... */
+				*tcategory = JSONBTYPE_OTHER;
+
+				/*
+				 * but first let's look for a cast to json (note: not to jsonb)
+				 * if it's not built-in.
+				 */
+				if (typoid >= FirstNormalObjectId)
+				{
+					Oid castfunc;
+					CoercionPathType ctype;
+
+					ctype = find_coercion_pathway(JSONOID, typoid,
+												  COERCION_EXPLICIT, &castfunc);
+					if (ctype == COERCION_PATH_FUNC && OidIsValid(castfunc))
+					{
+						*tcategory = JSONBTYPE_JSONCAST;
+						*outfuncoid = castfunc;
+					}
+					else
+					{
+						/* not a cast type, so just get the usual output func */
+						getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+					}
+				}
+				else
+				{
+					/* any other builtin type */
+					getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+				}
+				break;
+			}
+	}
+}
+
+/*
+ * Turn a Datum into jsonb, adding it to the result JsonbInState.
+ *
+ * tcategory and outfuncoid are from a previous call to json_categorize_type,
+ * except that if is_null is true then they can be invalid.
+ *
+ * If key_scalar is true, the value is stored as a key, so insist
+ * it's of an acceptable type, and force it to be a jbvString.
+ */
+static void
+datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+			   JsonbTypeCategory tcategory, Oid outfuncoid,
+			   bool key_scalar)
+{
+	char	   *outputstr;
+	bool		numeric_error;
+	JsonbValue	jb;
+	bool		scalar_jsonb = false;
+
+	if (is_null)
+	{
+		jb.type = jbvNull;
+	}
+	else if (key_scalar &&
+			 (tcategory == JSONBTYPE_ARRAY ||
+			  tcategory == JSONBTYPE_COMPOSITE ||
+			  tcategory == JSONBTYPE_JSON ||
+			  tcategory == JSONBTYPE_JSONB ||
+			  tcategory == JSONBTYPE_JSONCAST))
+	{
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+		  errmsg("key value must be scalar, not array, composite or json")));
+	}
+	else
+	{
+		if (tcategory == JSONBTYPE_JSONCAST)
+			val = OidFunctionCall1(outfuncoid, val);
+
+		switch (tcategory)
+		{
+			case JSONBTYPE_ARRAY:
+				array_to_jsonb_internal(val, result);
+				break;
+			case JSONBTYPE_COMPOSITE:
+				composite_to_jsonb(val, result);
+				break;
+			case JSONBTYPE_BOOL:
+				if (key_scalar)
+				{
+					outputstr = DatumGetBool(val) ? "true" : "false";
+					jb.type = jbvString;
+					jb.val.string.len = strlen(outputstr);
+					jb.val.string.val = outputstr;
+				}
+				else
+				{
+					jb.type = jbvBool;
+					jb.val.boolean = DatumGetBool(val);
+				}
+				break;
+			case JSONBTYPE_NUMERIC:
+				outputstr = OidOutputFunctionCall(outfuncoid, val);
+				if (key_scalar)
+				{
+					/* always quote keys */
+					jb.type = jbvString;
+					jb.val.string.len = strlen(outputstr);
+					jb.val.string.val = outputstr;
+				}
+				else
+				{
+					/*
+					 * Make it numeric if it's a valid JSON number, otherwise
+					 * a string. Invalid numeric output will always have an
+					 * 'N' or 'n' in it (I think).
+					 */
+					numeric_error = (strchr(outputstr, 'N') != NULL ||
+									 strchr(outputstr, 'n') != NULL);
+					if (!numeric_error)
+					{
+						jb.type = jbvNumeric;
+						jb.val.numeric = DatumGetNumeric(DirectFunctionCall3(numeric_in, CStringGetDatum(outputstr), 0, -1));
+
+						pfree(outputstr);
+					}
+					else
+					{
+						jb.type = jbvString;
+						jb.val.string.len = strlen(outputstr);
+						jb.val.string.val = outputstr;
+					}
+				}
+				break;
+		case JSONBTYPE_DATE:
+			{
+				DateADT		date;
+				struct pg_tm tm;
+				char		buf[MAXDATELEN + 1];
+
+				date = DatumGetDateADT(val);
+
+				/* XSD doesn't support infinite values */
+				if (DATE_NOT_FINITE(date))
+					ereport(ERROR,
+							(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+							 errmsg("date out of range"),
+							 errdetail("JSON does not support infinite date values.")));
+				else
+				{
+					j2date(date + POSTGRES_EPOCH_JDATE,
+						   &(tm.tm_year), &(tm.tm_mon), &(tm.tm_mday));
+					EncodeDateOnly(&tm, USE_XSD_DATES, buf);
+				}
+
+				jb.type = jbvString;
+				jb.val.string.len = strlen(buf);
+				jb.val.string.val = pstrdup(buf);
+			}
+			break;
+			case JSONBTYPE_TIMESTAMP:
+				{
+					Timestamp	timestamp;
+					struct pg_tm tm;
+					fsec_t		fsec;
+					char		buf[MAXDATELEN + 1];
+
+					timestamp = DatumGetTimestamp(val);
+
+					/* XSD doesn't support infinite values */
+					if (TIMESTAMP_NOT_FINITE(timestamp))
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range"),
+								 errdetail("JSON does not support infinite timestamp values.")));
+					else if (timestamp2tm(timestamp, NULL, &tm, &fsec, NULL, NULL) == 0)
+						EncodeDateTime(&tm, fsec, false, 0, NULL, USE_XSD_DATES, buf);
+					else
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range")));
+
+					jb.type = jbvString;
+					jb.val.string.len = strlen(buf);
+					jb.val.string.val = pstrdup(buf);
+				}
+				break;
+			case JSONBTYPE_TIMESTAMPTZ:
+				{
+					TimestampTz timestamp;
+					struct pg_tm tm;
+					int			tz;
+					fsec_t		fsec;
+					const char *tzn = NULL;
+					char		buf[MAXDATELEN + 1];
+
+					timestamp = DatumGetTimestamp(val);
+
+					/* XSD doesn't support infinite values */
+					if (TIMESTAMP_NOT_FINITE(timestamp))
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range"),
+								 errdetail("JSON does not support infinite timestamp values.")));
+					else if (timestamp2tm(timestamp, &tz, &tm, &fsec, &tzn, NULL) == 0)
+						EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf);
+					else
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range")));
+
+					jb.type = jbvString;
+					jb.val.string.len = strlen(buf);
+					jb.val.string.val = pstrdup(buf);
+				}
+				break;
+			case JSONBTYPE_JSONCAST:
+			case JSONBTYPE_JSON:
+				{
+					/* parse the json right into the existing result object */
+					JsonLexContext *lex;
+					JsonSemAction sem;
+					text	   *json = DatumGetTextP(val);
+
+					lex = makeJsonLexContext(json, true);
+
+					memset(&sem, 0, sizeof(sem));
+
+					sem.semstate = (void *) result;
+
+					sem.object_start = jsonb_in_object_start;
+					sem.array_start = jsonb_in_array_start;
+					sem.object_end = jsonb_in_object_end;
+					sem.array_end = jsonb_in_array_end;
+					sem.scalar = jsonb_in_scalar;
+					sem.object_field_start = jsonb_in_object_field_start;
+
+					pg_parse_json(lex, &sem);
+
+				}
+				break;
+			case JSONBTYPE_JSONB:
+				{
+					Jsonb	   *jsonb = DatumGetJsonb(val);
+					int			type;
+					JsonbIterator *it;
+
+					it = JsonbIteratorInit(&jsonb->root);
+
+					if (JB_ROOT_IS_SCALAR(jsonb))
+					{
+						(void) JsonbIteratorNext(&it, &jb, true);
+						Assert(jb.type == jbvArray);
+						(void) JsonbIteratorNext(&it, &jb, true);
+						scalar_jsonb = true;
+					}
+					else
+					{
+						while ((type = JsonbIteratorNext(&it, &jb, false))
+							   != WJB_DONE)
+						{
+							if (type == WJB_END_ARRAY || type == WJB_END_OBJECT ||
+								type == WJB_BEGIN_ARRAY || type == WJB_BEGIN_OBJECT)
+								result->res = pushJsonbValue(&result->parseState,
+															 type, NULL);
+							else
+								result->res = pushJsonbValue(&result->parseState,
+															 type, &jb);
+						}
+					}
+				}
+				break;
+			default:
+				outputstr = OidOutputFunctionCall(outfuncoid, val);
+				jb.type = jbvString;
+				jb.val.string.len = checkStringLen(strlen(outputstr));
+				jb.val.string.val = outputstr;
+				break;
+		}
+	}
+	if (tcategory >= JSONBTYPE_JSON && tcategory <= JSONBTYPE_JSONCAST &&
+		!scalar_jsonb)
+	{
+		/* work has been done recursively */
+		return;
+	}
+	else if (result->parseState == NULL)
+	{
+		/* single root scalar */
+		JsonbValue	va;
+
+		va.type = jbvArray;
+		va.val.array.rawScalar = true;
+		va.val.array.nElems = 1;
+
+		result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, &va);
+		result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+		result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+	}
+	else
+	{
+		JsonbValue *o = &result->parseState->contVal;
+
+		switch (o->type)
+		{
+			case jbvArray:
+				result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+				break;
+			case jbvObject:
+				result->res = pushJsonbValue(&result->parseState,
+											 key_scalar ? WJB_KEY : WJB_VALUE,
+											 &jb);
+				break;
+			default:
+				elog(ERROR, "unexpected parent of nested structure");
+		}
+	}
+}
+
+/*
+ * Process a single dimension of an array.
+ * If it's the innermost dimension, output the values, otherwise call
+ * ourselves recursively to process the next dimension.
+ */
+static void
+array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims, Datum *vals,
+				   bool *nulls, int *valcount, JsonbTypeCategory tcategory,
+				   Oid outfuncoid)
+{
+	int			i;
+
+	Assert(dim < ndims);
+
+	result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+
+	for (i = 1; i <= dims[dim]; i++)
+	{
+		if (dim + 1 == ndims)
+		{
+			datum_to_jsonb(vals[*valcount], nulls[*valcount], result, tcategory,
+						   outfuncoid, false);
+			(*valcount)++;
+		}
+		else
+		{
+			array_dim_to_jsonb(result, dim + 1, ndims, dims, vals, nulls,
+							   valcount, tcategory, outfuncoid);
+		}
+	}
+
+	result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+}
+
+/*
+ * Turn an array into JSON.
+ */
+static void
+array_to_jsonb_internal(Datum array, JsonbInState *result)
+{
+	ArrayType  *v = DatumGetArrayTypeP(array);
+	Oid			element_type = ARR_ELEMTYPE(v);
+	int		   *dim;
+	int			ndim;
+	int			nitems;
+	int			count = 0;
+	Datum	   *elements;
+	bool	   *nulls;
+	int16		typlen;
+	bool		typbyval;
+	char		typalign;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	ndim = ARR_NDIM(v);
+	dim = ARR_DIMS(v);
+	nitems = ArrayGetNItems(ndim, dim);
+
+	if (nitems <= 0)
+	{
+		result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+		result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+		return;
+	}
+
+	get_typlenbyvalalign(element_type,
+						 &typlen, &typbyval, &typalign);
+
+	jsonb_categorize_type(element_type,
+						  &tcategory, &outfuncoid);
+
+	deconstruct_array(v, element_type, typlen, typbyval,
+					  typalign, &elements, &nulls,
+					  &nitems);
+
+	array_dim_to_jsonb(result, 0, ndim, dim, elements, nulls, &count, tcategory,
+					   outfuncoid);
+
+	pfree(elements);
+	pfree(nulls);
+}
+
+/*
+ * Turn a composite / record into JSON.
+ */
+static void
+composite_to_jsonb(Datum composite, JsonbInState *result)
+{
+	HeapTupleHeader td;
+	Oid			tupType;
+	int32		tupTypmod;
+	TupleDesc	tupdesc;
+	HeapTupleData tmptup,
+			   *tuple;
+	int			i;
+
+	td = DatumGetHeapTupleHeader(composite);
+
+	/* Extract rowtype info and find a tupdesc */
+	tupType = HeapTupleHeaderGetTypeId(td);
+	tupTypmod = HeapTupleHeaderGetTypMod(td);
+	tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+
+	/* Build a temporary HeapTuple control structure */
+	tmptup.t_len = HeapTupleHeaderGetDatumLength(td);
+	tmptup.t_data = td;
+	tuple = &tmptup;
+
+	result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_OBJECT, NULL);
+
+	for (i = 0; i < tupdesc->natts; i++)
+	{
+		Datum		val;
+		bool		isnull;
+		char	   *attname;
+		JsonbTypeCategory tcategory;
+		Oid			outfuncoid;
+		JsonbValue	v;
+
+		if (tupdesc->attrs[i]->attisdropped)
+			continue;
+
+		attname = NameStr(tupdesc->attrs[i]->attname);
+
+		v.type = jbvString;
+		/* don't need checkStringLen here - can't exceed maximum name length */
+		v.val.string.len = strlen(attname);
+		v.val.string.val = attname;
+
+		result->res = pushJsonbValue(&result->parseState, WJB_KEY, &v);
+
+		val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+		if (isnull)
+		{
+			tcategory = JSONBTYPE_NULL;
+			outfuncoid = InvalidOid;
+		}
+		else
+			jsonb_categorize_type(tupdesc->attrs[i]->atttypid,
+								  &tcategory, &outfuncoid);
+
+		datum_to_jsonb(val, isnull, result, tcategory, outfuncoid, false);
+	}
+
+	result->res = pushJsonbValue(&result->parseState, WJB_END_OBJECT, NULL);
+	ReleaseTupleDesc(tupdesc);
+}
+
+/*
+ * Append JSON text for "val" to "result".
+ *
+ * This is just a thin wrapper around datum_to_jsonb.  If the same type will be
+ * printed many times, avoid using this; better to do the jsonb_categorize_type
+ * lookups only once.
+ */
+
+static void
+add_jsonb(Datum val, bool is_null, JsonbInState *result,
+		  Oid val_type, bool key_scalar)
+{
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	if (is_null)
+	{
+		tcategory = JSONBTYPE_NULL;
+		outfuncoid = InvalidOid;
+	}
+	else
+		jsonb_categorize_type(val_type,
+							  &tcategory, &outfuncoid);
+
+	datum_to_jsonb(val, is_null, result, tcategory, outfuncoid, key_scalar);
+}
+
+/*
+ * SQL function to_jsonb(anyvalue)
+ */
+Datum
+to_jsonb(PG_FUNCTION_ARGS)
+{
+	Datum		val = PG_GETARG_DATUM(0);
+	Oid			val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
+	JsonbInState result;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &result, tcategory, outfuncoid, false);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_object(variadic "any")
+ */
+Datum
+jsonb_build_object(PG_FUNCTION_ARGS)
+{
+	int			nargs = PG_NARGS();
+	int			i;
+	Datum		arg;
+	Oid			val_type;
+	JsonbInState result;
+
+	if (nargs % 2 != 0)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("invalid number or arguments: object must be matched key value pairs")));
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	for (i = 0; i < nargs; i += 2)
+	{
+
+		/* process key */
+
+		if (PG_ARGISNULL(i))
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: key cannot be null", i + 1)));
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+
+		/*
+		 * turn a constant (more or less literal) value that's of unknown type
+		 * into text. Unknowns come in as a cstring pointer.
+		 */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 1)));
+
+		add_jsonb(arg, false, &result, val_type, true);
+
+		/* process value */
+
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i + 1);
+		/* see comments above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i + 1))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i + 1))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i + 1));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i + 1);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 2)));
+		add_jsonb(arg, PG_ARGISNULL(i + 1), &result, val_type, false);
+
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_object where it gets 0 arguments.
+ */
+Datum
+jsonb_build_object_noargs(PG_FUNCTION_ARGS)
+{
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_array(variadic "any")
+ */
+Datum
+jsonb_build_array(PG_FUNCTION_ARGS)
+{
+	int			nargs = PG_NARGS();
+	int			i;
+	Datum		arg;
+	Oid			val_type;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+
+	for (i = 0; i < nargs; i++)
+	{
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+		arg = PG_GETARG_DATUM(i + 1);
+		/* see comments in jsonb_build_object above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 1)));
+		add_jsonb(arg, PG_ARGISNULL(i), &result, val_type, false);
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_array where it gets 0 arguments.
+ */
+Datum
+jsonb_build_array_noargs(PG_FUNCTION_ARGS)
+{
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+	result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * SQL function jsonb_object(text[])
+ *
+ * take a one or two dimensional array of text as name value pairs
+ * for a jsonb object.
+ *
+ */
+Datum
+jsonb_object(PG_FUNCTION_ARGS)
+{
+	ArrayType  *in_array = PG_GETARG_ARRAYTYPE_P(0);
+	int			ndims = ARR_NDIM(in_array);
+	Datum	   *in_datums;
+	bool	   *in_nulls;
+	int			in_count,
+				count,
+				i;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	switch (ndims)
+	{
+		case 0:
+			goto close_object;
+			break;
+
+		case 1:
+			if ((ARR_DIMS(in_array)[0]) % 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have even number of elements")));
+			break;
+
+		case 2:
+			if ((ARR_DIMS(in_array)[1]) != 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have two columns")));
+			break;
+
+		default:
+			ereport(ERROR,
+					(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+					 errmsg("wrong number of array subscripts")));
+	}
+
+	deconstruct_array(in_array,
+					  TEXTOID, -1, false, 'i',
+					  &in_datums, &in_nulls, &in_count);
+
+	count = in_count / 2;
+
+	for (i = 0; i < count; ++i)
+	{
+		JsonbValue	v;
+		char	   *str;
+		int			len;
+
+		if (in_nulls[i * 2])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		str = TextDatumGetCString(in_datums[i * 2]);
+		len = strlen(str);
+
+		v.type = jbvString;
+
+		v.val.string.len = len;
+		v.val.string.val = str;
+
+		result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+		if (in_nulls[i * 2 + 1])
+		{
+			v.type = jbvNull;
+		}
+		else
+		{
+			str = TextDatumGetCString(in_datums[i * 2 + 1]);
+			len = strlen(str);
+
+			v.type = jbvString;
+
+			v.val.string.len = len;
+			v.val.string.val = str;
+		}
+
+		result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+	}
+
+	pfree(in_datums);
+	pfree(in_nulls);
+
+close_object:
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_object(text[], text[])
+ *
+ * take separate name and value arrays of text to construct a jsonb object
+ * pairwise.
+ */
+Datum
+jsonb_object_two_arg(PG_FUNCTION_ARGS)
+{
+	ArrayType  *key_array = PG_GETARG_ARRAYTYPE_P(0);
+	ArrayType  *val_array = PG_GETARG_ARRAYTYPE_P(1);
+	int			nkdims = ARR_NDIM(key_array);
+	int			nvdims = ARR_NDIM(val_array);
+	Datum	   *key_datums,
+			   *val_datums;
+	bool	   *key_nulls,
+			   *val_nulls;
+	int			key_count,
+				val_count,
+				i;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	if (nkdims > 1 || nkdims != nvdims)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("wrong number of array subscripts")));
+
+	if (nkdims == 0)
+		PG_RETURN_DATUM(CStringGetTextDatum("{}"));
+
+	deconstruct_array(key_array,
+					  TEXTOID, -1, false, 'i',
+					  &key_datums, &key_nulls, &key_count);
+
+	deconstruct_array(val_array,
+					  TEXTOID, -1, false, 'i',
+					  &val_datums, &val_nulls, &val_count);
+
+	if (key_count != val_count)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("mismatched array dimensions")));
+
+	for (i = 0; i < key_count; ++i)
+	{
+		JsonbValue	v;
+		char	   *str;
+		int			len;
+
+		if (key_nulls[i])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		str = TextDatumGetCString(key_datums[i]);
+		len = strlen(str);
+
+		v.type = jbvString;
+
+		v.val.string.len = len;
+		v.val.string.val = str;
+
+		result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+		if (val_nulls[i])
+		{
+			v.type = jbvNull;
+		}
+		else
+		{
+			str = TextDatumGetCString(val_datums[i]);
+			len = strlen(str);
+
+			v.type = jbvString;
+
+			v.val.string.len = len;
+			v.val.string.val = str;
+		}
+
+		result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	pfree(key_datums);
+	pfree(key_nulls);
+	pfree(val_datums);
+	pfree(val_nulls);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * jsonb_agg aggregate function
+ */
+Datum
+jsonb_agg_transfn(PG_FUNCTION_ARGS)
+{
+	Oid			val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+	MemoryContext oldcontext,
+				aggcontext;
+	JsonbInState elem;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+	Datum		val;
+	JsonbInState *result;
+	bool		single_scalar = false;
+	JsonbIterator *it;
+	Jsonb	   *jbelem;
+	JsonbValue	v;
+	int			type;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	if (!AggCheckCallContext(fcinfo, &aggcontext))
+	{
+		/* cannot be called directly because of internal-type argument */
+		elog(ERROR, "jsonb_agg_transfn called in non-aggregate context");
+	}
+
+	/* turn the argument into jsonb in the normal function context */
+
+	val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+	jbelem = JsonbValueToJsonb(elem.res);
+
+	/* switch to the aggregate context for accumulation operations */
+
+	oldcontext = MemoryContextSwitchTo(aggcontext);
+
+	/* set up the accumulator on the first go round */
+
+	if (PG_ARGISNULL(0))
+	{
+		result = palloc0(sizeof(JsonbInState));
+		result->res = pushJsonbValue(&result->parseState,
+									 WJB_BEGIN_ARRAY, NULL);
+
+	}
+	else
+	{
+		result = (JsonbInState *) PG_GETARG_POINTER(0);
+	}
+
+	it = JsonbIteratorInit(&jbelem->root);
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (v.val.array.rawScalar)
+					single_scalar = true;
+				else
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_END_ARRAY:
+				if (!single_scalar)
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_BEGIN_OBJECT:
+			case WJB_END_OBJECT:
+				result->res = pushJsonbValue(&result->parseState,
+											 type, NULL);
+				break;
+			case WJB_ELEM:
+			case WJB_KEY:
+			case WJB_VALUE:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else if (v.type == jbvNumeric)
+				{
+					/* same for numeric */
+				  v.val.numeric =
+					DatumGetNumeric(DirectFunctionCall1(numeric_uplus,
+														NumericGetDatum(v.val.numeric)));
+
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 type, &v);
+				break;
+		}
+	}
+
+	MemoryContextSwitchTo(oldcontext);
+
+	PG_RETURN_POINTER(result);
+}
+
+Datum
+jsonb_agg_finalfn(PG_FUNCTION_ARGS)
+{
+	JsonbInState *result;
+	Jsonb	   *out;
+
+	/* cannot be called directly because of internal-type argument */
+	Assert(AggCheckCallContext(fcinfo, NULL));
+
+	if (PG_ARGISNULL(0))
+		PG_RETURN_NULL();		/* returns null iff no input values */
+
+	result = (JsonbInState *) PG_GETARG_POINTER(0);
+
+	result->res = pushJsonbValue(&result->parseState,
+								 WJB_END_ARRAY, NULL);
+
+
+	out = JsonbValueToJsonb(result->res);
+
+	PG_RETURN_POINTER(out);
+}
+
+/*
+ * jsonb_object_agg aggregate function
+ */
+Datum
+jsonb_object_agg_transfn(PG_FUNCTION_ARGS)
+{
+	Oid			val_type;
+	MemoryContext oldcontext,
+				aggcontext;
+	JsonbInState elem;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+	Datum		val;
+	JsonbInState *result;
+	bool		single_scalar;
+	JsonbIterator *it;
+	Jsonb	   *jbkey,
+			   *jbval;
+	JsonbValue	v;
+	int			type;
+
+	if (!AggCheckCallContext(fcinfo, &aggcontext))
+	{
+		/* cannot be called directly because of internal-type argument */
+		elog(ERROR, "jsonb_object_agg_transfn called in non-aggregate context");
+	}
+
+	/* turn the argument into jsonb in the normal function context */
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, true);
+
+	jbkey = JsonbValueToJsonb(elem.res);
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	val = PG_ARGISNULL(2) ? (Datum) 0 : PG_GETARG_DATUM(2);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+	jbval = JsonbValueToJsonb(elem.res);
+
+	/* switch to the aggregate context for accumulation operations */
+
+	oldcontext = MemoryContextSwitchTo(aggcontext);
+
+	/* set up the accumulator on the first go round */
+
+	if (PG_ARGISNULL(0))
+	{
+		result = palloc0(sizeof(JsonbInState));
+		result->res = pushJsonbValue(&result->parseState,
+									 WJB_BEGIN_OBJECT, NULL);
+
+	}
+	else
+	{
+		result = (JsonbInState *) PG_GETARG_POINTER(0);
+	}
+
+	it = JsonbIteratorInit(&jbkey->root);
+
+	/*
+	 * keys should be scalar, and we should have already checked for that
+	 * above when calling datum_to_jsonb, so we only need to look for these
+	 * things.
+	 */
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (!v.val.array.rawScalar)
+					elog(ERROR, "unexpected structure for key");
+				break;
+			case WJB_ELEM:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else
+				{
+					ereport(ERROR,
+							(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+							 errmsg("object keys must be strings")));
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 WJB_KEY, &v);
+				break;
+			case WJB_END_ARRAY:
+				break;
+			default:
+				elog(ERROR, "unexpected structure for key");
+				break;
+		}
+	}
+
+	it = JsonbIteratorInit(&jbval->root);
+
+	single_scalar = false;
+
+	/*
+	 * values can be anything, including structured and null, so we treate
+	 * them as in json_agg_transfn, except that single scalars are always
+	 * pushed as WJB_VALUE items.
+	 */
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (v.val.array.rawScalar)
+					single_scalar = true;
+				else
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_END_ARRAY:
+				if (!single_scalar)
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_BEGIN_OBJECT:
+			case WJB_END_OBJECT:
+				result->res = pushJsonbValue(&result->parseState,
+											 type, NULL);
+				break;
+			case WJB_ELEM:
+			case WJB_KEY:
+			case WJB_VALUE:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else if (v.type == jbvNumeric)
+				{
+					/* same for numeric */
+					v.val.numeric =
+					  DatumGetNumeric(DirectFunctionCall1(numeric_uplus,
+														  NumericGetDatum(v.val.numeric)));
+
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 single_scalar ? WJB_VALUE : type,
+											 &v);
+				break;
+		}
+	}
+
+	MemoryContextSwitchTo(oldcontext);
+
+	PG_RETURN_POINTER(result);
+}
+
+Datum
+jsonb_object_agg_finalfn(PG_FUNCTION_ARGS)
+{
+	JsonbInState *result;
+	Jsonb	   *out;
+
+	/* cannot be called directly because of internal-type argument */
+	Assert(AggCheckCallContext(fcinfo, NULL));
+
+	if (PG_ARGISNULL(0))
+		PG_RETURN_NULL();		/* returns null iff no input values */
+
+	result = (JsonbInState *) PG_GETARG_POINTER(0);
+
+	result->res = pushJsonbValue(&result->parseState,
+								 WJB_END_OBJECT, NULL);
+
+
+	out = JsonbValueToJsonb(result->res);
+
+	PG_RETURN_POINTER(out);
+}
diff --git a/src/backend/utils/adt/jsonb_util.c b/src/backend/utils/adt/jsonb_util.c
index 2ff8539..4cce30d 100644
--- a/src/backend/utils/adt/jsonb_util.c
+++ b/src/backend/utils/adt/jsonb_util.c
@@ -1427,7 +1427,7 @@ convertJsonbValue(StringInfo buffer, JEntry *header, JsonbValue *val, int level)
 	else if (val->type == jbvObject)
 		convertJsonbObject(buffer, header, val, level);
 	else
-		elog(ERROR, "unknown type of jsonb container");
+		elog(ERROR, "unknown type of jsonb container to convert");
 }
 
 static void
diff --git a/src/include/catalog/pg_aggregate.h b/src/include/catalog/pg_aggregate.h
index 3ba9e5e..8e0735b 100644
--- a/src/include/catalog/pg_aggregate.h
+++ b/src/include/catalog/pg_aggregate.h
@@ -286,6 +286,10 @@ DATA(insert ( 3545	n 0 bytea_string_agg_transfn	bytea_string_agg_finalfn	-				-
 DATA(insert ( 3175	n 0 json_agg_transfn	json_agg_finalfn			-				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
 DATA(insert ( 3197	n 0 json_object_agg_transfn json_object_agg_finalfn -				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
 
+/* jsonb */
+DATA(insert ( 3267	n 0 jsonb_agg_transfn	jsonb_agg_finalfn			-				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
+DATA(insert ( 3270	n 0 jsonb_object_agg_transfn jsonb_object_agg_finalfn -				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
+
 /* ordered-set and hypothetical-set aggregates */
 DATA(insert ( 3972	o 1 ordered_set_transition			percentile_disc_final					-		-		-		t f 0	2281	0	0		0	_null_ _null_ ));
 DATA(insert ( 3974	o 1 ordered_set_transition			percentile_cont_float8_final			-		-		-		f f 0	2281	0	0		0	_null_ _null_ ));
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index b6dc1b8..a93004f 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4599,6 +4599,33 @@ DESCR("I/O");
 DATA(insert OID =  3803 (  jsonb_send		PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 17 "3802" _null_ _null_ _null_ _null_	jsonb_send _null_ _null_ _null_ ));
 DESCR("I/O");
 
+DATA(insert OID = 3263 (  jsonb_object	 PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 3802 "1009" _null_ _null_ _null_ _null_ jsonb_object _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3264 (  jsonb_object	 PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "1009 1009" _null_ _null_ _null_ _null_ jsonb_object_two_arg _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3787 (  to_jsonb	   PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 3802 "2283" _null_ _null_ _null_ _null_ to_jsonb _null_ _null_ _null_ ));
+DESCR("map input to jsonb");
+DATA(insert OID = 3265 (  jsonb_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f s 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ jsonb_agg_transfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate transition function");
+DATA(insert OID = 3266 (  jsonb_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f s 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_agg_finalfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate final function");
+DATA(insert OID = 3267 (  jsonb_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f s 1 0 3802 "2283" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate input into jsonb");
+DATA(insert OID = 3268 (  jsonb_object_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f s 3 0 2281 "2281 2276 2276" _null_ _null_ _null_ _null_ jsonb_object_agg_transfn _null_ _null_ _null_ ));
+DESCR("jsonb object aggregate transition function");
+DATA(insert OID = 3269 (  jsonb_object_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f s 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_object_agg_finalfn _null_ _null_ _null_ ));
+DESCR("jsonb object aggregate final function");
+DATA(insert OID = 3270 (  jsonb_object_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f i 2 0 3802 "2276 2276" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate inputs into jsonb object");
+DATA(insert OID = 3259 (  jsonb_build_array	   PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_array _null_ _null_ _null_ ));
+DESCR("build a jsonb array from any inputs");
+DATA(insert OID = 3260 (  jsonb_build_array	   PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802  "" _null_ _null_ _null_ _null_ jsonb_build_array_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb array");
+DATA(insert OID = 3261 (  jsonb_build_object    PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_object _null_ _null_ _null_ ));
+DESCR("build a jsonb object from pairwise key/value inputs");
+DATA(insert OID = 3262 (  jsonb_build_object    PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802  "" _null_ _null_ _null_ _null_ jsonb_build_object_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb object");
+
 DATA(insert OID = 3478 (  jsonb_object_field			PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field _null_ _null_ _null_ ));
 DATA(insert OID = 3214 (  jsonb_object_field_text	PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 25  "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field_text _null_ _null_ _null_ ));
 DATA(insert OID = 3215 (  jsonb_array_element		PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 23" _null_ _null_ "{from_json, element_index}" _null_ jsonb_array_element _null_ _null_ _null_ ));
diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h
index b89e4cb..d261aaa 100644
--- a/src/include/utils/jsonb.h
+++ b/src/include/utils/jsonb.h
@@ -350,6 +350,22 @@ extern Datum jsonb_recv(PG_FUNCTION_ARGS);
 extern Datum jsonb_send(PG_FUNCTION_ARGS);
 extern Datum jsonb_typeof(PG_FUNCTION_ARGS);
 
+/* generator routines */
+extern Datum to_jsonb(PG_FUNCTION_ARGS);
+
+extern Datum jsonb_build_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_object_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_two_arg(PG_FUNCTION_ARGS);
+
+/* jsonb_agg, json_object_agg functions */
+extern Datum jsonb_agg_transfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_agg_finalfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_agg_transfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_agg_finalfn(PG_FUNCTION_ARGS);
+
 /* Indexing-related ops */
 extern Datum jsonb_exists(PG_FUNCTION_ARGS);
 extern Datum jsonb_exists_any(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
index 9146f59..bb29f0c 100644
--- a/src/test/regress/expected/jsonb.out
+++ b/src/test/regress/expected/jsonb.out
@@ -301,6 +301,65 @@ SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
  [{"a": 1},{"b": [2, 3]}]
 (1 row)
 
+-- to_jsonb, timestamps
+select to_jsonb(timestamp '2014-05-28 12:22:35.614298');
+           to_jsonb           
+------------------------------
+ "2014-05-28T12:22:35.614298"
+(1 row)
+
+BEGIN;
+SET LOCAL TIME ZONE 10.5;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+              to_jsonb              
+------------------------------------
+ "2014-05-29T02:52:35.614298+10:30"
+(1 row)
+
+SET LOCAL TIME ZONE -8;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+              to_jsonb              
+------------------------------------
+ "2014-05-28T08:22:35.614298-08:00"
+(1 row)
+
+COMMIT;
+-- unicode escape - backslash is not escaped
+select to_jsonb(text '\uabcd');
+ to_jsonb 
+----------
+ "\uabcd"
+(1 row)
+
+-- any other backslash is escaped
+select to_jsonb(text '\abcd');
+ to_jsonb 
+----------
+ "\\abcd"
+(1 row)
+
+--jsonb_agg
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+                                                                                                                                                                    jsonb_agg                                                                                                                                                                     
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+(1 row)
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+                               jsonb_agg                               
+-----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+(1 row)
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -1256,6 +1315,120 @@ SELECT jsonb_typeof('"1.0"') AS string;
  string
 (1 row)
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                            jsonb_build_array                            
+-------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+(1 row)
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                           jsonb_build_object                            
+-------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+(1 row)
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+                                       jsonb_build_object                                       
+------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+(1 row)
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+ jsonb_build_array 
+-------------------
+ []
+(1 row)
+
+SELECT jsonb_build_object();
+ jsonb_build_object 
+--------------------
+ {}
+(1 row)
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+ jsonb_build_object 
+--------------------
+ {"1": 2}
+(1 row)
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+ERROR:  arg 1: key cannot be null
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ERROR:  key value must be scalar, not array, composite or json
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+                                                                     jsonb_build_object                                                                      
+-------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+(1 row)
+
+-- jsonb_object
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+ERROR:  array must have even number of elements
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+ERROR:  array must have two columns
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ERROR:  array must have two columns
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ERROR:  wrong number of array subscripts
+--two argument form of jsonb_object
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+                   jsonb_object                   
+--------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+(1 row)
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ERROR:  wrong number of array subscripts
+-- mismatched dimensions
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ERROR:  mismatched array dimensions
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ERROR:  mismatched array dimensions
+-- null key error
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ERROR:  null value not allowed for object key
+-- empty key is allowed
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+                  jsonb_object                   
+-------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
  jsonb_extract_path 
diff --git a/src/test/regress/expected/jsonb_1.out b/src/test/regress/expected/jsonb_1.out
index 83d61f8..7a81e7d 100644
--- a/src/test/regress/expected/jsonb_1.out
+++ b/src/test/regress/expected/jsonb_1.out
@@ -301,6 +301,65 @@ SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
  [{"a": 1},{"b": [2, 3]}]
 (1 row)
 
+-- to_jsonb, timestamps
+select to_jsonb(timestamp '2014-05-28 12:22:35.614298');
+           to_jsonb           
+------------------------------
+ "2014-05-28T12:22:35.614298"
+(1 row)
+
+BEGIN;
+SET LOCAL TIME ZONE 10.5;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+              to_jsonb              
+------------------------------------
+ "2014-05-29T02:52:35.614298+10:30"
+(1 row)
+
+SET LOCAL TIME ZONE -8;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+              to_jsonb              
+------------------------------------
+ "2014-05-28T08:22:35.614298-08:00"
+(1 row)
+
+COMMIT;
+-- unicode escape - backslash is not escaped
+select to_jsonb(text '\uabcd');
+ to_jsonb 
+----------
+ "\uabcd"
+(1 row)
+
+-- any other backslash is escaped
+select to_jsonb(text '\abcd');
+ to_jsonb 
+----------
+ "\\abcd"
+(1 row)
+
+--jsonb_agg
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+                                                                                                                                                                    jsonb_agg                                                                                                                                                                     
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+(1 row)
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+                               jsonb_agg                               
+-----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+(1 row)
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -1256,6 +1315,120 @@ SELECT jsonb_typeof('"1.0"') AS string;
  string
 (1 row)
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                            jsonb_build_array                            
+-------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+(1 row)
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                           jsonb_build_object                            
+-------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+(1 row)
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+                                       jsonb_build_object                                       
+------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+(1 row)
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+ jsonb_build_array 
+-------------------
+ []
+(1 row)
+
+SELECT jsonb_build_object();
+ jsonb_build_object 
+--------------------
+ {}
+(1 row)
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+ jsonb_build_object 
+--------------------
+ {"1": 2}
+(1 row)
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+ERROR:  arg 1: key cannot be null
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ERROR:  key value must be scalar, not array, composite or json
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+                                                                     jsonb_build_object                                                                      
+-------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+(1 row)
+
+-- jsonb_object
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+ERROR:  array must have even number of elements
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+ERROR:  array must have two columns
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ERROR:  array must have two columns
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ERROR:  wrong number of array subscripts
+--two argument form of jsonb_object
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+                   jsonb_object                   
+--------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+(1 row)
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ERROR:  wrong number of array subscripts
+-- mismatched dimensions
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ERROR:  mismatched array dimensions
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ERROR:  mismatched array dimensions
+-- null key error
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ERROR:  null value not allowed for object key
+-- empty key is allowed
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+                  jsonb_object                   
+-------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
  jsonb_extract_path 
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
index f1ed021..1c9cd83 100644
--- a/src/test/regress/sql/jsonb.sql
+++ b/src/test/regress/sql/jsonb.sql
@@ -62,6 +62,41 @@ SELECT '    '::jsonb;			-- ERROR, no value
 -- make sure jsonb is passed through json generators without being escaped
 SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
 
+-- to_jsonb, timestamps
+
+select to_jsonb(timestamp '2014-05-28 12:22:35.614298');
+
+BEGIN;
+SET LOCAL TIME ZONE 10.5;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+SET LOCAL TIME ZONE -8;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+COMMIT;
+
+-- unicode escape - backslash is not escaped
+
+select to_jsonb(text '\uabcd');
+
+-- any other backslash is escaped
+
+select to_jsonb(text '\abcd');
+
+--jsonb_agg
+
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -263,6 +298,86 @@ SELECT jsonb_typeof('"hello"') AS string;
 SELECT jsonb_typeof('"true"') AS string;
 SELECT jsonb_typeof('"1.0"') AS string;
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+
+SELECT jsonb_build_object();
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+
+-- jsonb_object
+
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+
+--two argument form of jsonb_object
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- mismatched dimensions
+
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+
+-- null key error
+
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+
+-- empty key is allowed
+
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+
+
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
#17Alvaro Herrera
alvherre@2ndquadrant.com
In reply to: Andrew Dunstan (#16)
Re: jsonb generator functions

Andrew Dunstan wrote:

OK, here is a new patch version that

* uses find_coercion_path() to find the cast function if any, as
discussed elsewhere
* removes calls to getTypeOutputInfo() except where required
* honors a cast to json only for rendering both json and jsonb
* adds processing for the date type that was previously missing in
datum_to_jsonb

Did this go anywhere?

--
�lvaro Herrera http://www.2ndQuadrant.com/
PostgreSQL Development, 24x7 Support, Remote DBA, Training & Services

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

#18Andrew Dunstan
andrew@dunslane.net
In reply to: Alvaro Herrera (#17)
Re: jsonb generator functions

On 12/08/2014 04:21 AM, Alvaro Herrera wrote:

Andrew Dunstan wrote:

OK, here is a new patch version that

* uses find_coercion_path() to find the cast function if any, as
discussed elsewhere
* removes calls to getTypeOutputInfo() except where required
* honors a cast to json only for rendering both json and jsonb
* adds processing for the date type that was previously missing in
datum_to_jsonb

Did this go anywhere?

Not, yet. I hope to get to it this week.

cheers

andrew

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

#19Andrew Dunstan
andrew@dunslane.net
In reply to: Andrew Dunstan (#18)
Re: jsonb generator functions

On 12/08/2014 01:00 PM, Andrew Dunstan wrote:

On 12/08/2014 04:21 AM, Alvaro Herrera wrote:

Andrew Dunstan wrote:

OK, here is a new patch version that

* uses find_coercion_path() to find the cast function if any, as
discussed elsewhere
* removes calls to getTypeOutputInfo() except where required
* honors a cast to json only for rendering both json and jsonb
* adds processing for the date type that was previously missing in
datum_to_jsonb

Did this go anywhere?

Not, yet. I hope to get to it this week.

OK, here is a new version.

The major change is that the aggregate final functions now clone the
transition value rather than modifying it directly, avoiding a similar
nearby error which Tom fixed recently.

Also here is a patch factored out which applies the
find_coercion_pathway change to json.c. I'm inclined to say we should
backpatch this to 9.4 (and with a small change 9.3). Thoughts?

cheers

andrew

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

#20Andrew Dunstan
andrew@dunslane.net
In reply to: Andrew Dunstan (#19)
2 attachment(s)
Re: jsonb generator functions

On 12/12/2014 01:10 PM, Andrew Dunstan wrote:

On 12/08/2014 01:00 PM, Andrew Dunstan wrote:

On 12/08/2014 04:21 AM, Alvaro Herrera wrote:

Andrew Dunstan wrote:

OK, here is a new patch version that

* uses find_coercion_path() to find the cast function if any, as
discussed elsewhere
* removes calls to getTypeOutputInfo() except where required
* honors a cast to json only for rendering both json and jsonb
* adds processing for the date type that was previously missing in
datum_to_jsonb

Did this go anywhere?

Not, yet. I hope to get to it this week.

OK, here is a new version.

The major change is that the aggregate final functions now clone the
transition value rather than modifying it directly, avoiding a similar
nearby error which Tom fixed recently.

Also here is a patch factored out which applies the
find_coercion_pathway change to json.c. I'm inclined to say we should
backpatch this to 9.4 (and with a small change 9.3). Thoughts?

Er this time with patches.

cheers

andrew

Attachments:

jsonbmissingfuncs8.patchapplication/x-patch; name=jsonbmissingfuncs8.patchDownload
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index da138e1..ef69b94 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -10245,9 +10245,10 @@ table2-mapping
 
   <para>
    <xref linkend="functions-json-creation-table"> shows the functions that are
-   available for creating <type>json</type> values.
-   (Currently, there are no equivalent functions for <type>jsonb</>, but you
-   can cast the result of one of these functions to <type>jsonb</>.)
+   available for creating <type>json</type> and <type>jsonb</type> values.
+   (There are no equivalent functions for <type>jsonb</>, of the <literal>row_to_json</>
+   and <literal>array_to_json</> functions. However, the <literal>to_jsonb</>
+   function supplies much the same functionality as these functions would.)
   </para>
 
   <indexterm>
@@ -10268,6 +10269,18 @@ table2-mapping
   <indexterm>
    <primary>json_object</primary>
   </indexterm>
+  <indexterm>
+   <primary>to_jsonb</primary>
+  </indexterm>
+  <indexterm>
+   <primary>jsonb_build_array</primary>
+  </indexterm>
+  <indexterm>
+   <primary>jsonb_build_object</primary>
+  </indexterm>
+  <indexterm>
+   <primary>jsonb_object</primary>
+  </indexterm>
 
   <table id="functions-json-creation-table">
     <title>JSON Creation Functions</title>
@@ -10282,17 +10295,18 @@ table2-mapping
      </thead>
      <tbody>
       <row>
+       <entry><para><literal>to_json(anyelement)</literal>
+          </para><para><literal>to_jsonb(anyelement)</literal>
+       </para></entry>
        <entry>
-         <literal>to_json(anyelement)</literal>
-       </entry>
-       <entry>
-         Returns the value as JSON.  Arrays and composites are converted
+         Returns the value as <type>json</> or <type>jsonb</>.
+         Arrays and composites are converted
          (recursively) to arrays and objects; otherwise, if there is a cast
          from the type to <type>json</type>, the cast function will be used to
-         perform the conversion; otherwise, a JSON scalar value is produced.
+         perform the conversion; otherwise, a scalar value is produced.
          For any scalar type other than a number, a Boolean, or a null value,
-         the text representation will be used, properly quoted and escaped
-         so that it is a valid JSON string.
+         the text representation will be used, in such a fashion that it is a 
+         valid <type>json</> or <type>jsonb</> value.
        </entry>
        <entry><literal>to_json('Fred said "Hi."'::text)</literal></entry>
        <entry><literal>"Fred said \"Hi.\""</literal></entry>
@@ -10321,9 +10335,9 @@ table2-mapping
        <entry><literal>{"f1":1,"f2":"foo"}</literal></entry>
       </row>
       <row>
-       <entry>
-         <literal>json_build_array(VARIADIC "any")</literal>
-       </entry>
+       <entry><para><literal>json_build_array(VARIADIC "any")</literal>
+          </para><para><literal>jsonb_build_array(VARIADIC "any")</literal>
+       </para></entry>
        <entry>
          Builds a possibly-heterogeneously-typed JSON array out of a variadic
          argument list.
@@ -10332,9 +10346,9 @@ table2-mapping
        <entry><literal>[1, 2, "3", 4, 5]</literal></entry>
       </row>
       <row>
-       <entry>
-         <literal>json_build_object(VARIADIC "any")</literal>
-       </entry>
+       <entry><para><literal>json_build_object(VARIADIC "any")</literal>
+          </para><para><literal>jsonb_build_object(VARIADIC "any")</literal>
+       </para></entry>
        <entry>
          Builds a JSON object out of a variadic argument list.  By
          convention, the argument list consists of alternating
@@ -10344,9 +10358,9 @@ table2-mapping
        <entry><literal>{"foo": 1, "bar": 2}</literal></entry>
       </row>
       <row>
-       <entry>
-         <literal>json_object(text[])</literal>
-       </entry>
+       <entry><para><literal>json_object(text[])</literal>
+          </para><para><literal>jsonb_object(text[])</literal>
+       </para></entry>
        <entry>
          Builds a JSON object out of a text array.  The array must have either
          exactly one dimension with an even number of members, in which case
@@ -10359,9 +10373,9 @@ table2-mapping
        <entry><literal>{"a": "1", "b": "def", "c": "3.5"}</literal></entry>
       </row>
       <row>
-       <entry>
-         <literal>json_object(keys text[], values text[])</literal>
-       </entry>
+       <entry><para><literal>json_object(keys text[], values text[])</literal>
+          </para><para><literal>json_object(keys text[], values text[])</literal>
+       </para></entry>
        <entry>
          This form of <function>json_object</> takes keys and values pairwise from two separate
          arrays. In all other respects it is identical to the one-argument form.
@@ -10780,7 +10794,8 @@ table2-mapping
     function <function>json_agg</function> which aggregates record
     values as JSON, and the aggregate function
     <function>json_object_agg</function> which aggregates pairs of values
-    into a JSON object.
+    into a JSON object, and their <type>jsonb</type> equivalents,
+    <function>jsonb_agg</> and <function>jsonb_object_agg</>.
   </para>
 
  </sect1>
@@ -12227,6 +12242,22 @@ NULL baz</literallayout>(3 rows)</entry>
      <row>
       <entry>
        <indexterm>
+        <primary>jsonb_agg</primary>
+       </indexterm>
+       <function>jsonb_agg(<replaceable class="parameter">record</replaceable>)</function>
+      </entry>
+      <entry>
+       <type>record</type>
+      </entry>
+      <entry>
+       <type>jsonb</type>
+      </entry>
+      <entry>aggregates records as a JSON array of objects</entry>
+     </row>
+
+     <row>
+      <entry>
+       <indexterm>
         <primary>json_object_agg</primary>
        </indexterm>
        <function>json_object_agg(<replaceable class="parameter">name</replaceable>, <replaceable class="parameter">value</replaceable>)</function>
@@ -12243,6 +12274,22 @@ NULL baz</literallayout>(3 rows)</entry>
      <row>
       <entry>
        <indexterm>
+        <primary>jsonb_object_agg</primary>
+       </indexterm>
+       <function>jsonb_object_agg(<replaceable class="parameter">name</replaceable>, <replaceable class="parameter">value</replaceable>)</function>
+      </entry>
+      <entry>
+       <type>("any", "any")</type>
+      </entry>
+      <entry>
+       <type>jsonb</type>
+      </entry>
+      <entry>aggregates name/value pairs as a JSON object</entry>
+     </row>
+
+     <row>
+      <entry>
+       <indexterm>
         <primary>max</primary>
        </indexterm>
        <function>max(<replaceable class="parameter">expression</replaceable>)</function>
@@ -12386,8 +12433,8 @@ SELECT count(*) FROM sometable;
 
   <para>
    The aggregate functions <function>array_agg</function>,
-   <function>json_agg</function>,
-   <function>json_object_agg</function>,
+   <function>json_agg</function>, <function>jsonb_agg</function>,
+   <function>json_object_agg</function>, <function>jsonb_object_agg</function>,
    <function>string_agg</function>,
    and <function>xmlagg</function>, as well as similar user-defined
    aggregate functions, produce meaningfully different result values
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index 1d6b752..2a9aeec 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -15,7 +15,6 @@
 
 #include "access/htup_details.h"
 #include "access/transam.h"
-#include "catalog/pg_cast.h"
 #include "catalog/pg_type.h"
 #include "executor/spi.h"
 #include "lib/stringinfo.h"
@@ -1281,10 +1280,14 @@ json_categorize_type(Oid typoid,
 	/* Look through any domain */
 	typoid = getBaseType(typoid);
 
-	/* We'll usually need to return the type output function */
-	getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+	*outfuncoid = InvalidOid;
+
+	/*
+	 * We need to get the output function for everything except date and
+	 * timestamp types, array and composite types, booleans,
+	 * and non-builtin types  where there's a cast to json.
+	 */
 
-	/* Check for known types */
 	switch (typoid)
 	{
 		case BOOLOID:
@@ -1297,6 +1300,7 @@ json_categorize_type(Oid typoid,
 		case FLOAT4OID:
 		case FLOAT8OID:
 		case NUMERICOID:
+			getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
 			*tcategory = JSONTYPE_NUMERIC;
 			break;
 
@@ -1314,6 +1318,7 @@ json_categorize_type(Oid typoid,
 
 		case JSONOID:
 		case JSONBOID:
+			getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
 			*tcategory = JSONTYPE_JSON;
 			break;
 
@@ -1330,23 +1335,26 @@ json_categorize_type(Oid typoid,
 				/* but let's look for a cast to json, if it's not built-in */
 				if (typoid >= FirstNormalObjectId)
 				{
-					HeapTuple	tuple;
+					Oid castfunc;
+					CoercionPathType ctype;
 
-					tuple = SearchSysCache2(CASTSOURCETARGET,
-											ObjectIdGetDatum(typoid),
-											ObjectIdGetDatum(JSONOID));
-					if (HeapTupleIsValid(tuple))
+					ctype = find_coercion_pathway(JSONOID, typoid,
+												  COERCION_EXPLICIT, &castfunc);
+					if (ctype == COERCION_PATH_FUNC && OidIsValid(castfunc))
 					{
-						Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
-
-						if (castForm->castmethod == COERCION_METHOD_FUNCTION)
-						{
-							*tcategory = JSONTYPE_CAST;
-							*outfuncoid = castForm->castfunc;
-						}
-
-						ReleaseSysCache(tuple);
+						*tcategory = JSONTYPE_CAST;
+						*outfuncoid = castfunc;
 					}
+					else
+					{
+						/* non builtin type with no cast */
+						getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+					}
+				}
+				else
+				{
+					/* any other builtin type */
+					getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
 				}
 			}
 			break;
diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c
index 9beebb3..a520b36 100644
--- a/src/backend/utils/adt/jsonb.c
+++ b/src/backend/utils/adt/jsonb.c
@@ -12,11 +12,21 @@
  */
 #include "postgres.h"
 
+#include "miscadmin.h"
+#include "access/htup_details.h"
+#include "access/transam.h"
+#include "catalog/pg_type.h"
 #include "libpq/pqformat.h"
+#include "parser/parse_coerce.h"
 #include "utils/builtins.h"
+#include "utils/date.h"
+#include "utils/datetime.h"
+#include "utils/lsyscache.h"
 #include "utils/json.h"
 #include "utils/jsonapi.h"
 #include "utils/jsonb.h"
+#include "utils/syscache.h"
+#include "utils/typcache.h"
 
 typedef struct JsonbInState
 {
@@ -24,6 +34,23 @@ typedef struct JsonbInState
 	JsonbValue *res;
 } JsonbInState;
 
+/* unlike with json categories, we need to treat json and jsonb differently */
+typedef enum					/* type categories for datum_to_jsonb */
+{
+	JSONBTYPE_NULL,				/* null, so we didn't bother to identify */
+	JSONBTYPE_BOOL,				/* boolean (built-in types only) */
+	JSONBTYPE_NUMERIC,			/* numeric (ditto) */
+	JSONBTYPE_DATE,				/* we use special formatting for datetimes */
+	JSONBTYPE_TIMESTAMP,		/* we use special formatting for timestamp */
+	JSONBTYPE_TIMESTAMPTZ,		/* ... and timestamptz */
+	JSONBTYPE_JSON,				/* JSON */
+	JSONBTYPE_JSONB,			/* JSONB */
+	JSONBTYPE_ARRAY,			/* array */
+	JSONBTYPE_COMPOSITE,		/* composite */
+	JSONBTYPE_JSONCAST,			/* something with an explicit cast to JSON */
+	JSONBTYPE_OTHER				/* all else */
+}	JsonbTypeCategory;
+
 static inline Datum jsonb_from_cstring(char *json, int len);
 static size_t checkStringLen(size_t len);
 static void jsonb_in_object_start(void *pstate);
@@ -33,6 +60,23 @@ static void jsonb_in_array_end(void *pstate);
 static void jsonb_in_object_field_start(void *pstate, char *fname, bool isnull);
 static void jsonb_put_escaped_value(StringInfo out, JsonbValue *scalarVal);
 static void jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype);
+static void jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid);
+static void composite_to_jsonb(Datum composite, JsonbInState *result);
+static void array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims,
+				   Datum *vals, bool *nulls, int *valcount,
+				   JsonbTypeCategory tcategory, Oid outfuncoid);
+static void array_to_jsonb_internal(Datum array, JsonbInState *result);
+static void jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid);
+static void datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+			   JsonbTypeCategory tcategory, Oid outfuncoid,
+			   bool key_scalar);
+static void add_jsonb(Datum val, bool is_null, JsonbInState *result,
+		  Oid val_type, bool key_scalar);
+static JsonbParseState * clone_parse_state(JsonbParseState * state);
 
 /*
  * jsonb type input function
@@ -462,3 +506,1355 @@ JsonbToCString(StringInfo out, JsonbContainer *in, int estimated_len)
 
 	return out->data;
 }
+
+
+/*
+ * Determine how we want to render values of a given type in datum_to_jsonb.
+ *
+ * Given the datatype OID, return its JsonbTypeCategory, as well as the type's
+ * output function OID.  If the returned category is JSONBTYPE_JSONCAST,
+ *  we return the OID of the relevant cast function instead.
+ */
+static void
+jsonb_categorize_type(Oid typoid,
+					  JsonbTypeCategory * tcategory,
+					  Oid *outfuncoid)
+{
+	bool		typisvarlena;
+
+	/* Look through any domain */
+	typoid = getBaseType(typoid);
+
+	*outfuncoid = InvalidOid;
+
+	/*
+	 * We need to get the output function for everything except date and
+	 * timestamp types, booleans, array and composite types, json and jsonb,
+	 * and non-builtin types where there's a cast to json. In this last case
+	 * we return the oid of the cast function instead.
+	 */
+
+	switch (typoid)
+	{
+		case BOOLOID:
+			*tcategory = JSONBTYPE_BOOL;
+			break;
+
+		case INT2OID:
+		case INT4OID:
+		case INT8OID:
+		case FLOAT4OID:
+		case FLOAT8OID:
+		case NUMERICOID:
+			getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+			*tcategory = JSONBTYPE_NUMERIC;
+			break;
+
+		case DATEOID:
+			*tcategory = JSONBTYPE_DATE;
+			break;
+
+		case TIMESTAMPOID:
+			*tcategory = JSONBTYPE_TIMESTAMP;
+			break;
+
+		case TIMESTAMPTZOID:
+			*tcategory = JSONBTYPE_TIMESTAMPTZ;
+			break;
+
+		case JSONBOID:
+			*tcategory = JSONBTYPE_JSONB;
+			break;
+
+		case JSONOID:
+			*tcategory = JSONBTYPE_JSON;
+			break;
+
+		default:
+			/* Check for arrays and composites */
+			if (OidIsValid(get_element_type(typoid)))
+				*tcategory = JSONBTYPE_ARRAY;
+			else if (type_is_rowtype(typoid))
+				*tcategory = JSONBTYPE_COMPOSITE;
+			else
+			{
+				/* It's probably the general case ... */
+				*tcategory = JSONBTYPE_OTHER;
+
+				/*
+				 * but first let's look for a cast to json (note: not to jsonb)
+				 * if it's not built-in.
+				 */
+				if (typoid >= FirstNormalObjectId)
+				{
+					Oid castfunc;
+					CoercionPathType ctype;
+
+					ctype = find_coercion_pathway(JSONOID, typoid,
+												  COERCION_EXPLICIT, &castfunc);
+					if (ctype == COERCION_PATH_FUNC && OidIsValid(castfunc))
+					{
+						*tcategory = JSONBTYPE_JSONCAST;
+						*outfuncoid = castfunc;
+					}
+					else
+					{
+						/* not a cast type, so just get the usual output func */
+						getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+					}
+				}
+				else
+				{
+					/* any other builtin type */
+					getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+				}
+				break;
+			}
+	}
+}
+
+/*
+ * Turn a Datum into jsonb, adding it to the result JsonbInState.
+ *
+ * tcategory and outfuncoid are from a previous call to json_categorize_type,
+ * except that if is_null is true then they can be invalid.
+ *
+ * If key_scalar is true, the value is stored as a key, so insist
+ * it's of an acceptable type, and force it to be a jbvString.
+ */
+static void
+datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
+			   JsonbTypeCategory tcategory, Oid outfuncoid,
+			   bool key_scalar)
+{
+	char	   *outputstr;
+	bool		numeric_error;
+	JsonbValue	jb;
+	bool		scalar_jsonb = false;
+
+	if (is_null)
+	{
+		jb.type = jbvNull;
+	}
+	else if (key_scalar &&
+			 (tcategory == JSONBTYPE_ARRAY ||
+			  tcategory == JSONBTYPE_COMPOSITE ||
+			  tcategory == JSONBTYPE_JSON ||
+			  tcategory == JSONBTYPE_JSONB ||
+			  tcategory == JSONBTYPE_JSONCAST))
+	{
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+		  errmsg("key value must be scalar, not array, composite or json")));
+	}
+	else
+	{
+		if (tcategory == JSONBTYPE_JSONCAST)
+			val = OidFunctionCall1(outfuncoid, val);
+
+		switch (tcategory)
+		{
+			case JSONBTYPE_ARRAY:
+				array_to_jsonb_internal(val, result);
+				break;
+			case JSONBTYPE_COMPOSITE:
+				composite_to_jsonb(val, result);
+				break;
+			case JSONBTYPE_BOOL:
+				if (key_scalar)
+				{
+					outputstr = DatumGetBool(val) ? "true" : "false";
+					jb.type = jbvString;
+					jb.val.string.len = strlen(outputstr);
+					jb.val.string.val = outputstr;
+				}
+				else
+				{
+					jb.type = jbvBool;
+					jb.val.boolean = DatumGetBool(val);
+				}
+				break;
+			case JSONBTYPE_NUMERIC:
+				outputstr = OidOutputFunctionCall(outfuncoid, val);
+				if (key_scalar)
+				{
+					/* always quote keys */
+					jb.type = jbvString;
+					jb.val.string.len = strlen(outputstr);
+					jb.val.string.val = outputstr;
+				}
+				else
+				{
+					/*
+					 * Make it numeric if it's a valid JSON number, otherwise
+					 * a string. Invalid numeric output will always have an
+					 * 'N' or 'n' in it (I think).
+					 */
+					numeric_error = (strchr(outputstr, 'N') != NULL ||
+									 strchr(outputstr, 'n') != NULL);
+					if (!numeric_error)
+					{
+						jb.type = jbvNumeric;
+						jb.val.numeric = DatumGetNumeric(DirectFunctionCall3(numeric_in, CStringGetDatum(outputstr), 0, -1));
+
+						pfree(outputstr);
+					}
+					else
+					{
+						jb.type = jbvString;
+						jb.val.string.len = strlen(outputstr);
+						jb.val.string.val = outputstr;
+					}
+				}
+				break;
+		case JSONBTYPE_DATE:
+			{
+				DateADT		date;
+				struct pg_tm tm;
+				char		buf[MAXDATELEN + 1];
+
+				date = DatumGetDateADT(val);
+
+				/* XSD doesn't support infinite values */
+				if (DATE_NOT_FINITE(date))
+					ereport(ERROR,
+							(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+							 errmsg("date out of range"),
+							 errdetail("JSON does not support infinite date values.")));
+				else
+				{
+					j2date(date + POSTGRES_EPOCH_JDATE,
+						   &(tm.tm_year), &(tm.tm_mon), &(tm.tm_mday));
+					EncodeDateOnly(&tm, USE_XSD_DATES, buf);
+				}
+
+				jb.type = jbvString;
+				jb.val.string.len = strlen(buf);
+				jb.val.string.val = pstrdup(buf);
+			}
+			break;
+			case JSONBTYPE_TIMESTAMP:
+				{
+					Timestamp	timestamp;
+					struct pg_tm tm;
+					fsec_t		fsec;
+					char		buf[MAXDATELEN + 1];
+
+					timestamp = DatumGetTimestamp(val);
+
+					/* XSD doesn't support infinite values */
+					if (TIMESTAMP_NOT_FINITE(timestamp))
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range"),
+								 errdetail("JSON does not support infinite timestamp values.")));
+					else if (timestamp2tm(timestamp, NULL, &tm, &fsec, NULL, NULL) == 0)
+						EncodeDateTime(&tm, fsec, false, 0, NULL, USE_XSD_DATES, buf);
+					else
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range")));
+
+					jb.type = jbvString;
+					jb.val.string.len = strlen(buf);
+					jb.val.string.val = pstrdup(buf);
+				}
+				break;
+			case JSONBTYPE_TIMESTAMPTZ:
+				{
+					TimestampTz timestamp;
+					struct pg_tm tm;
+					int			tz;
+					fsec_t		fsec;
+					const char *tzn = NULL;
+					char		buf[MAXDATELEN + 1];
+
+					timestamp = DatumGetTimestamp(val);
+
+					/* XSD doesn't support infinite values */
+					if (TIMESTAMP_NOT_FINITE(timestamp))
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range"),
+								 errdetail("JSON does not support infinite timestamp values.")));
+					else if (timestamp2tm(timestamp, &tz, &tm, &fsec, &tzn, NULL) == 0)
+						EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf);
+					else
+						ereport(ERROR,
+								(errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
+								 errmsg("timestamp out of range")));
+
+					jb.type = jbvString;
+					jb.val.string.len = strlen(buf);
+					jb.val.string.val = pstrdup(buf);
+				}
+				break;
+			case JSONBTYPE_JSONCAST:
+			case JSONBTYPE_JSON:
+				{
+					/* parse the json right into the existing result object */
+					JsonLexContext *lex;
+					JsonSemAction sem;
+					text	   *json = DatumGetTextP(val);
+
+					lex = makeJsonLexContext(json, true);
+
+					memset(&sem, 0, sizeof(sem));
+
+					sem.semstate = (void *) result;
+
+					sem.object_start = jsonb_in_object_start;
+					sem.array_start = jsonb_in_array_start;
+					sem.object_end = jsonb_in_object_end;
+					sem.array_end = jsonb_in_array_end;
+					sem.scalar = jsonb_in_scalar;
+					sem.object_field_start = jsonb_in_object_field_start;
+
+					pg_parse_json(lex, &sem);
+
+				}
+				break;
+			case JSONBTYPE_JSONB:
+				{
+					Jsonb	   *jsonb = DatumGetJsonb(val);
+					int			type;
+					JsonbIterator *it;
+
+					it = JsonbIteratorInit(&jsonb->root);
+
+					if (JB_ROOT_IS_SCALAR(jsonb))
+					{
+						(void) JsonbIteratorNext(&it, &jb, true);
+						Assert(jb.type == jbvArray);
+						(void) JsonbIteratorNext(&it, &jb, true);
+						scalar_jsonb = true;
+					}
+					else
+					{
+						while ((type = JsonbIteratorNext(&it, &jb, false))
+							   != WJB_DONE)
+						{
+							if (type == WJB_END_ARRAY || type == WJB_END_OBJECT ||
+								type == WJB_BEGIN_ARRAY || type == WJB_BEGIN_OBJECT)
+								result->res = pushJsonbValue(&result->parseState,
+															 type, NULL);
+							else
+								result->res = pushJsonbValue(&result->parseState,
+															 type, &jb);
+						}
+					}
+				}
+				break;
+			default:
+				outputstr = OidOutputFunctionCall(outfuncoid, val);
+				jb.type = jbvString;
+				jb.val.string.len = checkStringLen(strlen(outputstr));
+				jb.val.string.val = outputstr;
+				break;
+		}
+	}
+	if (tcategory >= JSONBTYPE_JSON && tcategory <= JSONBTYPE_JSONCAST &&
+		!scalar_jsonb)
+	{
+		/* work has been done recursively */
+		return;
+	}
+	else if (result->parseState == NULL)
+	{
+		/* single root scalar */
+		JsonbValue	va;
+
+		va.type = jbvArray;
+		va.val.array.rawScalar = true;
+		va.val.array.nElems = 1;
+
+		result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, &va);
+		result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+		result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+	}
+	else
+	{
+		JsonbValue *o = &result->parseState->contVal;
+
+		switch (o->type)
+		{
+			case jbvArray:
+				result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb);
+				break;
+			case jbvObject:
+				result->res = pushJsonbValue(&result->parseState,
+											 key_scalar ? WJB_KEY : WJB_VALUE,
+											 &jb);
+				break;
+			default:
+				elog(ERROR, "unexpected parent of nested structure");
+		}
+	}
+}
+
+/*
+ * Process a single dimension of an array.
+ * If it's the innermost dimension, output the values, otherwise call
+ * ourselves recursively to process the next dimension.
+ */
+static void
+array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims, Datum *vals,
+				   bool *nulls, int *valcount, JsonbTypeCategory tcategory,
+				   Oid outfuncoid)
+{
+	int			i;
+
+	Assert(dim < ndims);
+
+	result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+
+	for (i = 1; i <= dims[dim]; i++)
+	{
+		if (dim + 1 == ndims)
+		{
+			datum_to_jsonb(vals[*valcount], nulls[*valcount], result, tcategory,
+						   outfuncoid, false);
+			(*valcount)++;
+		}
+		else
+		{
+			array_dim_to_jsonb(result, dim + 1, ndims, dims, vals, nulls,
+							   valcount, tcategory, outfuncoid);
+		}
+	}
+
+	result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+}
+
+/*
+ * Turn an array into JSON.
+ */
+static void
+array_to_jsonb_internal(Datum array, JsonbInState *result)
+{
+	ArrayType  *v = DatumGetArrayTypeP(array);
+	Oid			element_type = ARR_ELEMTYPE(v);
+	int		   *dim;
+	int			ndim;
+	int			nitems;
+	int			count = 0;
+	Datum	   *elements;
+	bool	   *nulls;
+	int16		typlen;
+	bool		typbyval;
+	char		typalign;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	ndim = ARR_NDIM(v);
+	dim = ARR_DIMS(v);
+	nitems = ArrayGetNItems(ndim, dim);
+
+	if (nitems <= 0)
+	{
+		result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, NULL);
+		result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL);
+		return;
+	}
+
+	get_typlenbyvalalign(element_type,
+						 &typlen, &typbyval, &typalign);
+
+	jsonb_categorize_type(element_type,
+						  &tcategory, &outfuncoid);
+
+	deconstruct_array(v, element_type, typlen, typbyval,
+					  typalign, &elements, &nulls,
+					  &nitems);
+
+	array_dim_to_jsonb(result, 0, ndim, dim, elements, nulls, &count, tcategory,
+					   outfuncoid);
+
+	pfree(elements);
+	pfree(nulls);
+}
+
+/*
+ * Turn a composite / record into JSON.
+ */
+static void
+composite_to_jsonb(Datum composite, JsonbInState *result)
+{
+	HeapTupleHeader td;
+	Oid			tupType;
+	int32		tupTypmod;
+	TupleDesc	tupdesc;
+	HeapTupleData tmptup,
+			   *tuple;
+	int			i;
+
+	td = DatumGetHeapTupleHeader(composite);
+
+	/* Extract rowtype info and find a tupdesc */
+	tupType = HeapTupleHeaderGetTypeId(td);
+	tupTypmod = HeapTupleHeaderGetTypMod(td);
+	tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+
+	/* Build a temporary HeapTuple control structure */
+	tmptup.t_len = HeapTupleHeaderGetDatumLength(td);
+	tmptup.t_data = td;
+	tuple = &tmptup;
+
+	result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_OBJECT, NULL);
+
+	for (i = 0; i < tupdesc->natts; i++)
+	{
+		Datum		val;
+		bool		isnull;
+		char	   *attname;
+		JsonbTypeCategory tcategory;
+		Oid			outfuncoid;
+		JsonbValue	v;
+
+		if (tupdesc->attrs[i]->attisdropped)
+			continue;
+
+		attname = NameStr(tupdesc->attrs[i]->attname);
+
+		v.type = jbvString;
+		/* don't need checkStringLen here - can't exceed maximum name length */
+		v.val.string.len = strlen(attname);
+		v.val.string.val = attname;
+
+		result->res = pushJsonbValue(&result->parseState, WJB_KEY, &v);
+
+		val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+		if (isnull)
+		{
+			tcategory = JSONBTYPE_NULL;
+			outfuncoid = InvalidOid;
+		}
+		else
+			jsonb_categorize_type(tupdesc->attrs[i]->atttypid,
+								  &tcategory, &outfuncoid);
+
+		datum_to_jsonb(val, isnull, result, tcategory, outfuncoid, false);
+	}
+
+	result->res = pushJsonbValue(&result->parseState, WJB_END_OBJECT, NULL);
+	ReleaseTupleDesc(tupdesc);
+}
+
+/*
+ * Append JSON text for "val" to "result".
+ *
+ * This is just a thin wrapper around datum_to_jsonb.  If the same type will be
+ * printed many times, avoid using this; better to do the jsonb_categorize_type
+ * lookups only once.
+ */
+
+static void
+add_jsonb(Datum val, bool is_null, JsonbInState *result,
+		  Oid val_type, bool key_scalar)
+{
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	if (is_null)
+	{
+		tcategory = JSONBTYPE_NULL;
+		outfuncoid = InvalidOid;
+	}
+	else
+		jsonb_categorize_type(val_type,
+							  &tcategory, &outfuncoid);
+
+	datum_to_jsonb(val, is_null, result, tcategory, outfuncoid, key_scalar);
+}
+
+/*
+ * SQL function to_jsonb(anyvalue)
+ */
+Datum
+to_jsonb(PG_FUNCTION_ARGS)
+{
+	Datum		val = PG_GETARG_DATUM(0);
+	Oid			val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
+	JsonbInState result;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &result, tcategory, outfuncoid, false);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_object(variadic "any")
+ */
+Datum
+jsonb_build_object(PG_FUNCTION_ARGS)
+{
+	int			nargs = PG_NARGS();
+	int			i;
+	Datum		arg;
+	Oid			val_type;
+	JsonbInState result;
+
+	if (nargs % 2 != 0)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("invalid number or arguments: object must be matched key value pairs")));
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	for (i = 0; i < nargs; i += 2)
+	{
+
+		/* process key */
+
+		if (PG_ARGISNULL(i))
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: key cannot be null", i + 1)));
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+
+		/*
+		 * turn a constant (more or less literal) value that's of unknown type
+		 * into text. Unknowns come in as a cstring pointer.
+		 */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 1)));
+
+		add_jsonb(arg, false, &result, val_type, true);
+
+		/* process value */
+
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i + 1);
+		/* see comments above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i + 1))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i + 1))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i + 1));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i + 1);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 2)));
+		add_jsonb(arg, PG_ARGISNULL(i + 1), &result, val_type, false);
+
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_object where it gets 0 arguments.
+ */
+Datum
+jsonb_build_object_noargs(PG_FUNCTION_ARGS)
+{
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_build_array(variadic "any")
+ */
+Datum
+jsonb_build_array(PG_FUNCTION_ARGS)
+{
+	int			nargs = PG_NARGS();
+	int			i;
+	Datum		arg;
+	Oid			val_type;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+
+	for (i = 0; i < nargs; i++)
+	{
+		val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
+		arg = PG_GETARG_DATUM(i + 1);
+		/* see comments in jsonb_build_object above */
+		if (val_type == UNKNOWNOID && get_fn_expr_arg_stable(fcinfo->flinfo, i))
+		{
+			val_type = TEXTOID;
+			if (PG_ARGISNULL(i))
+				arg = (Datum) 0;
+			else
+				arg = CStringGetTextDatum(PG_GETARG_POINTER(i));
+		}
+		else
+		{
+			arg = PG_GETARG_DATUM(i);
+		}
+		if (val_type == InvalidOid || val_type == UNKNOWNOID)
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("arg %d: could not determine data type", i + 1)));
+		add_jsonb(arg, PG_ARGISNULL(i), &result, val_type, false);
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * degenerate case of jsonb_build_array where it gets 0 arguments.
+ */
+Datum
+jsonb_build_array_noargs(PG_FUNCTION_ARGS)
+{
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
+	result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * SQL function jsonb_object(text[])
+ *
+ * take a one or two dimensional array of text as name value pairs
+ * for a jsonb object.
+ *
+ */
+Datum
+jsonb_object(PG_FUNCTION_ARGS)
+{
+	ArrayType  *in_array = PG_GETARG_ARRAYTYPE_P(0);
+	int			ndims = ARR_NDIM(in_array);
+	Datum	   *in_datums;
+	bool	   *in_nulls;
+	int			in_count,
+				count,
+				i;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	switch (ndims)
+	{
+		case 0:
+			goto close_object;
+			break;
+
+		case 1:
+			if ((ARR_DIMS(in_array)[0]) % 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have even number of elements")));
+			break;
+
+		case 2:
+			if ((ARR_DIMS(in_array)[1]) != 2)
+				ereport(ERROR,
+						(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+						 errmsg("array must have two columns")));
+			break;
+
+		default:
+			ereport(ERROR,
+					(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+					 errmsg("wrong number of array subscripts")));
+	}
+
+	deconstruct_array(in_array,
+					  TEXTOID, -1, false, 'i',
+					  &in_datums, &in_nulls, &in_count);
+
+	count = in_count / 2;
+
+	for (i = 0; i < count; ++i)
+	{
+		JsonbValue	v;
+		char	   *str;
+		int			len;
+
+		if (in_nulls[i * 2])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		str = TextDatumGetCString(in_datums[i * 2]);
+		len = strlen(str);
+
+		v.type = jbvString;
+
+		v.val.string.len = len;
+		v.val.string.val = str;
+
+		result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+		if (in_nulls[i * 2 + 1])
+		{
+			v.type = jbvNull;
+		}
+		else
+		{
+			str = TextDatumGetCString(in_datums[i * 2 + 1]);
+			len = strlen(str);
+
+			v.type = jbvString;
+
+			v.val.string.len = len;
+			v.val.string.val = str;
+		}
+
+		result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+	}
+
+	pfree(in_datums);
+	pfree(in_nulls);
+
+close_object:
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+/*
+ * SQL function jsonb_object(text[], text[])
+ *
+ * take separate name and value arrays of text to construct a jsonb object
+ * pairwise.
+ */
+Datum
+jsonb_object_two_arg(PG_FUNCTION_ARGS)
+{
+	ArrayType  *key_array = PG_GETARG_ARRAYTYPE_P(0);
+	ArrayType  *val_array = PG_GETARG_ARRAYTYPE_P(1);
+	int			nkdims = ARR_NDIM(key_array);
+	int			nvdims = ARR_NDIM(val_array);
+	Datum	   *key_datums,
+			   *val_datums;
+	bool	   *key_nulls,
+			   *val_nulls;
+	int			key_count,
+				val_count,
+				i;
+	JsonbInState result;
+
+	memset(&result, 0, sizeof(JsonbInState));
+
+	result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
+
+	if (nkdims > 1 || nkdims != nvdims)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("wrong number of array subscripts")));
+
+	if (nkdims == 0)
+		PG_RETURN_DATUM(CStringGetTextDatum("{}"));
+
+	deconstruct_array(key_array,
+					  TEXTOID, -1, false, 'i',
+					  &key_datums, &key_nulls, &key_count);
+
+	deconstruct_array(val_array,
+					  TEXTOID, -1, false, 'i',
+					  &val_datums, &val_nulls, &val_count);
+
+	if (key_count != val_count)
+		ereport(ERROR,
+				(errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
+				 errmsg("mismatched array dimensions")));
+
+	for (i = 0; i < key_count; ++i)
+	{
+		JsonbValue	v;
+		char	   *str;
+		int			len;
+
+		if (key_nulls[i])
+			ereport(ERROR,
+					(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+					 errmsg("null value not allowed for object key")));
+
+		str = TextDatumGetCString(key_datums[i]);
+		len = strlen(str);
+
+		v.type = jbvString;
+
+		v.val.string.len = len;
+		v.val.string.val = str;
+
+		result.res = pushJsonbValue(&result.parseState, WJB_KEY, &v);
+
+		if (val_nulls[i])
+		{
+			v.type = jbvNull;
+		}
+		else
+		{
+			str = TextDatumGetCString(val_datums[i]);
+			len = strlen(str);
+
+			v.type = jbvString;
+
+			v.val.string.len = len;
+			v.val.string.val = str;
+		}
+
+		result.res = pushJsonbValue(&result.parseState, WJB_VALUE, &v);
+	}
+
+	result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
+
+	pfree(key_datums);
+	pfree(key_nulls);
+	pfree(val_datums);
+	pfree(val_nulls);
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
+}
+
+
+/*
+ * shallow clone of a parse state, suitable for use in aggregate
+ * final functions that will only append to the values rather than
+ * change them.
+ */
+static JsonbParseState *
+clone_parse_state(JsonbParseState * state)
+{
+	JsonbParseState *result, *icursor, *ocursor;
+
+	if (state == NULL)
+		return NULL;
+
+	result = palloc(sizeof(JsonbParseState));
+	icursor = state;
+	ocursor = result;
+	for(;;)
+	{
+		ocursor->contVal = icursor->contVal;
+		ocursor->size = icursor->size;
+		icursor = icursor->next;
+		if (icursor == NULL)
+			break;
+		ocursor->next= palloc(sizeof(JsonbParseState));
+		ocursor = ocursor->next;
+	}
+	ocursor->next = NULL;
+
+	return result;
+}
+
+
+/*
+ * jsonb_agg aggregate function
+ */
+Datum
+jsonb_agg_transfn(PG_FUNCTION_ARGS)
+{
+	Oid			val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+	MemoryContext oldcontext,
+				aggcontext;
+	JsonbInState elem;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+	Datum		val;
+	JsonbInState *result;
+	bool		single_scalar = false;
+	JsonbIterator *it;
+	Jsonb	   *jbelem;
+	JsonbValue	v;
+	int			type;
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	if (!AggCheckCallContext(fcinfo, &aggcontext))
+	{
+		/* cannot be called directly because of internal-type argument */
+		elog(ERROR, "jsonb_agg_transfn called in non-aggregate context");
+	}
+
+	/* turn the argument into jsonb in the normal function context */
+
+	val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+	jbelem = JsonbValueToJsonb(elem.res);
+
+	/* switch to the aggregate context for accumulation operations */
+
+	oldcontext = MemoryContextSwitchTo(aggcontext);
+
+	/* set up the accumulator on the first go round */
+
+	if (PG_ARGISNULL(0))
+	{
+		result = palloc0(sizeof(JsonbInState));
+		result->res = pushJsonbValue(&result->parseState,
+									 WJB_BEGIN_ARRAY, NULL);
+
+	}
+	else
+	{
+		result = (JsonbInState *) PG_GETARG_POINTER(0);
+	}
+
+	it = JsonbIteratorInit(&jbelem->root);
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (v.val.array.rawScalar)
+					single_scalar = true;
+				else
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_END_ARRAY:
+				if (!single_scalar)
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_BEGIN_OBJECT:
+			case WJB_END_OBJECT:
+				result->res = pushJsonbValue(&result->parseState,
+											 type, NULL);
+				break;
+			case WJB_ELEM:
+			case WJB_KEY:
+			case WJB_VALUE:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else if (v.type == jbvNumeric)
+				{
+					/* same for numeric */
+				  v.val.numeric =
+					DatumGetNumeric(DirectFunctionCall1(numeric_uplus,
+														NumericGetDatum(v.val.numeric)));
+
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 type, &v);
+				break;
+		}
+	}
+
+	MemoryContextSwitchTo(oldcontext);
+
+	PG_RETURN_POINTER(result);
+}
+
+Datum
+jsonb_agg_finalfn(PG_FUNCTION_ARGS)
+{
+	JsonbInState *arg;
+	JsonbInState result;
+	Jsonb	   *out;
+
+	/* cannot be called directly because of internal-type argument */
+	Assert(AggCheckCallContext(fcinfo, NULL));
+
+	if (PG_ARGISNULL(0))
+		PG_RETURN_NULL();		/* returns null iff no input values */
+
+	arg = (JsonbInState *) PG_GETARG_POINTER(0);
+
+	/*
+	 * We need to do a shallow clone of the argument in case the final
+	 * function is called more than once, so we avoid changing the argument.
+	 * A shallow clone is sufficient as we aren't going to change any of the
+	 * values, just add the final array end marker.
+	 */
+
+	result.parseState = clone_parse_state(arg->parseState);
+
+	result.res = pushJsonbValue(&result.parseState,
+								 WJB_END_ARRAY, NULL);
+
+
+	out = JsonbValueToJsonb(result.res);
+
+	PG_RETURN_POINTER(out);
+}
+
+/*
+ * jsonb_object_agg aggregate function
+ */
+Datum
+jsonb_object_agg_transfn(PG_FUNCTION_ARGS)
+{
+	Oid			val_type;
+	MemoryContext oldcontext,
+				aggcontext;
+	JsonbInState elem;
+	JsonbTypeCategory tcategory;
+	Oid			outfuncoid;
+	Datum		val;
+	JsonbInState *result;
+	bool		single_scalar;
+	JsonbIterator *it;
+	Jsonb	   *jbkey,
+			   *jbval;
+	JsonbValue	v;
+	int			type;
+
+	if (!AggCheckCallContext(fcinfo, &aggcontext))
+	{
+		/* cannot be called directly because of internal-type argument */
+		elog(ERROR, "jsonb_object_agg_transfn called in non-aggregate context");
+	}
+
+	/* turn the argument into jsonb in the normal function context */
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, true);
+
+	jbkey = JsonbValueToJsonb(elem.res);
+
+	val_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
+
+	if (val_type == InvalidOid)
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("could not determine input data type")));
+
+	val = PG_ARGISNULL(2) ? (Datum) 0 : PG_GETARG_DATUM(2);
+
+	jsonb_categorize_type(val_type,
+						  &tcategory, &outfuncoid);
+
+	memset(&elem, 0, sizeof(JsonbInState));
+
+	datum_to_jsonb(val, false, &elem, tcategory, outfuncoid, false);
+
+	jbval = JsonbValueToJsonb(elem.res);
+
+	/* switch to the aggregate context for accumulation operations */
+
+	oldcontext = MemoryContextSwitchTo(aggcontext);
+
+	/* set up the accumulator on the first go round */
+
+	if (PG_ARGISNULL(0))
+	{
+		result = palloc0(sizeof(JsonbInState));
+		result->res = pushJsonbValue(&result->parseState,
+									 WJB_BEGIN_OBJECT, NULL);
+
+	}
+	else
+	{
+		result = (JsonbInState *) PG_GETARG_POINTER(0);
+	}
+
+	it = JsonbIteratorInit(&jbkey->root);
+
+	/*
+	 * keys should be scalar, and we should have already checked for that
+	 * above when calling datum_to_jsonb, so we only need to look for these
+	 * things.
+	 */
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (!v.val.array.rawScalar)
+					elog(ERROR, "unexpected structure for key");
+				break;
+			case WJB_ELEM:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else
+				{
+					ereport(ERROR,
+							(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+							 errmsg("object keys must be strings")));
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 WJB_KEY, &v);
+				break;
+			case WJB_END_ARRAY:
+				break;
+			default:
+				elog(ERROR, "unexpected structure for key");
+				break;
+		}
+	}
+
+	it = JsonbIteratorInit(&jbval->root);
+
+	single_scalar = false;
+
+	/*
+	 * values can be anything, including structured and null, so we treate
+	 * them as in json_agg_transfn, except that single scalars are always
+	 * pushed as WJB_VALUE items.
+	 */
+
+	while ((type = JsonbIteratorNext(&it, &v, false)) != WJB_DONE)
+	{
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (v.val.array.rawScalar)
+					single_scalar = true;
+				else
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_END_ARRAY:
+				if (!single_scalar)
+					result->res = pushJsonbValue(&result->parseState,
+												 type, NULL);
+				break;
+			case WJB_BEGIN_OBJECT:
+			case WJB_END_OBJECT:
+				result->res = pushJsonbValue(&result->parseState,
+											 type, NULL);
+				break;
+			case WJB_ELEM:
+			case WJB_KEY:
+			case WJB_VALUE:
+				if (v.type == jbvString)
+				{
+					/* copy string values in the aggreagate context */
+					char	   *buf = palloc(v.val.string.len + 1);;
+					snprintf(buf, v.val.string.len + 1, "%s", v.val.string.val);
+					v.val.string.val = buf;
+				}
+				else if (v.type == jbvNumeric)
+				{
+					/* same for numeric */
+					v.val.numeric =
+					  DatumGetNumeric(DirectFunctionCall1(numeric_uplus,
+														  NumericGetDatum(v.val.numeric)));
+
+				}
+				result->res = pushJsonbValue(&result->parseState,
+											 single_scalar ? WJB_VALUE : type,
+											 &v);
+				break;
+		}
+	}
+
+	MemoryContextSwitchTo(oldcontext);
+
+	PG_RETURN_POINTER(result);
+}
+
+Datum
+jsonb_object_agg_finalfn(PG_FUNCTION_ARGS)
+{
+	JsonbInState *arg;
+	JsonbInState  result;
+	Jsonb	   *out;
+
+	/* cannot be called directly because of internal-type argument */
+	Assert(AggCheckCallContext(fcinfo, NULL));
+
+	if (PG_ARGISNULL(0))
+		PG_RETURN_NULL();		/* returns null iff no input values */
+
+	arg = (JsonbInState *) PG_GETARG_POINTER(0);
+
+	/*
+	 * We need to do a shallow clone of the argument in case the final
+	 * function is called more than once, so we avoid changing the argument.
+	 * A shallow clone is sufficient as we aren't going to change any of the
+	 * values, just add the final object end marker.
+	 */
+
+	result.parseState = clone_parse_state(arg->parseState);
+
+	result.res = pushJsonbValue(&result.parseState,
+								 WJB_END_OBJECT, NULL);
+
+
+	out = JsonbValueToJsonb(result.res);
+
+	PG_RETURN_POINTER(out);
+}
diff --git a/src/backend/utils/adt/jsonb_util.c b/src/backend/utils/adt/jsonb_util.c
index c62941b..b51990f 100644
--- a/src/backend/utils/adt/jsonb_util.c
+++ b/src/backend/utils/adt/jsonb_util.c
@@ -1427,7 +1427,7 @@ convertJsonbValue(StringInfo buffer, JEntry *header, JsonbValue *val, int level)
 	else if (val->type == jbvObject)
 		convertJsonbObject(buffer, header, val, level);
 	else
-		elog(ERROR, "unknown type of jsonb container");
+		elog(ERROR, "unknown type of jsonb container to convert");
 }
 
 static void
diff --git a/src/include/catalog/pg_aggregate.h b/src/include/catalog/pg_aggregate.h
index 3279353..10cdea1 100644
--- a/src/include/catalog/pg_aggregate.h
+++ b/src/include/catalog/pg_aggregate.h
@@ -287,6 +287,10 @@ DATA(insert ( 3545	n 0 bytea_string_agg_transfn	bytea_string_agg_finalfn	-				-
 DATA(insert ( 3175	n 0 json_agg_transfn	json_agg_finalfn			-				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
 DATA(insert ( 3197	n 0 json_object_agg_transfn json_object_agg_finalfn -				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
 
+/* jsonb */
+DATA(insert ( 3267	n 0 jsonb_agg_transfn	jsonb_agg_finalfn			-				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
+DATA(insert ( 3270	n 0 jsonb_object_agg_transfn jsonb_object_agg_finalfn -				-				-				f f 0	2281	0	0		0	_null_ _null_ ));
+
 /* ordered-set and hypothetical-set aggregates */
 DATA(insert ( 3972	o 1 ordered_set_transition			percentile_disc_final					-		-		-		t f 0	2281	0	0		0	_null_ _null_ ));
 DATA(insert ( 3974	o 1 ordered_set_transition			percentile_cont_float8_final			-		-		-		f f 0	2281	0	0		0	_null_ _null_ ));
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index e7db60e..e5912ea 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4656,6 +4656,32 @@ DESCR("I/O");
 DATA(insert OID =  3803 (  jsonb_send		PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 17 "3802" _null_ _null_ _null_ _null_	jsonb_send _null_ _null_ _null_ ));
 DESCR("I/O");
 
+DATA(insert OID = 3263 (  jsonb_object	 PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 3802 "1009" _null_ _null_ _null_ _null_ jsonb_object _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3264 (  jsonb_object	 PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "1009 1009" _null_ _null_ _null_ _null_ jsonb_object_two_arg _null_ _null_ _null_ ));
+DESCR("map text array of key value pairs to jsonb object");
+DATA(insert OID = 3787 (  to_jsonb	   PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 3802 "2283" _null_ _null_ _null_ _null_ to_jsonb _null_ _null_ _null_ ));
+DESCR("map input to jsonb");
+DATA(insert OID = 3265 (  jsonb_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f s 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ jsonb_agg_transfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate transition function");
+DATA(insert OID = 3266 (  jsonb_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f s 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_agg_finalfn _null_ _null_ _null_ ));
+DESCR("jsonb aggregate final function");
+DATA(insert OID = 3267 (  jsonb_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f s 1 0 3802 "2283" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate input into jsonb");
+DATA(insert OID = 3268 (  jsonb_object_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f s 3 0 2281 "2281 2276 2276" _null_ _null_ _null_ _null_ jsonb_object_agg_transfn _null_ _null_ _null_ ));
+DESCR("jsonb object aggregate transition function");
+DATA(insert OID = 3269 (  jsonb_object_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f s 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_object_agg_finalfn _null_ _null_ _null_ ));
+DESCR("jsonb object aggregate final function");
+DATA(insert OID = 3270 (  jsonb_object_agg		   PGNSP PGUID 12 1 0 0 0 t f f f f f i 2 0 3802 "2276 2276" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
+DESCR("aggregate inputs into jsonb object");
+DATA(insert OID = 3271 (  jsonb_build_array	   PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_array _null_ _null_ _null_ ));
+DESCR("build a jsonb array from any inputs");
+DATA(insert OID = 3272 (  jsonb_build_array	   PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802  "" _null_ _null_ _null_ _null_ jsonb_build_array_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb array");
+DATA(insert OID = 3273 (  jsonb_build_object    PGNSP PGUID 12 1 0 2276 0 f f f f f f s 1 0 3802 "2276" "{2276}" "{v}" _null_ _null_ jsonb_build_object _null_ _null_ _null_ ));
+DESCR("build a jsonb object from pairwise key/value inputs");
+DATA(insert OID = 3274 (  jsonb_build_object    PGNSP PGUID 12 1 0 0 0 f f f f f f s 0 0 3802  "" _null_ _null_ _null_ _null_ jsonb_build_object_noargs _null_ _null_ _null_ ));
+DESCR("build an empty jsonb object");
 DATA(insert OID = 3262 (  jsonb_strip_nulls	   PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 3802 "3802" _null_ _null_ _null_ _null_ jsonb_strip_nulls _null_ _null_ _null_ ));
 DESCR("remove object fields with null values from jsonb");
 
diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h
index b89e4cb..d261aaa 100644
--- a/src/include/utils/jsonb.h
+++ b/src/include/utils/jsonb.h
@@ -350,6 +350,22 @@ extern Datum jsonb_recv(PG_FUNCTION_ARGS);
 extern Datum jsonb_send(PG_FUNCTION_ARGS);
 extern Datum jsonb_typeof(PG_FUNCTION_ARGS);
 
+/* generator routines */
+extern Datum to_jsonb(PG_FUNCTION_ARGS);
+
+extern Datum jsonb_build_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_object_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array(PG_FUNCTION_ARGS);
+extern Datum jsonb_build_array_noargs(PG_FUNCTION_ARGS);
+extern Datum jsonb_object(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_two_arg(PG_FUNCTION_ARGS);
+
+/* jsonb_agg, json_object_agg functions */
+extern Datum jsonb_agg_transfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_agg_finalfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_agg_transfn(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_agg_finalfn(PG_FUNCTION_ARGS);
+
 /* Indexing-related ops */
 extern Datum jsonb_exists(PG_FUNCTION_ARGS);
 extern Datum jsonb_exists_any(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
index 3e1d769..aa5686f 100644
--- a/src/test/regress/expected/jsonb.out
+++ b/src/test/regress/expected/jsonb.out
@@ -301,6 +301,65 @@ SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
  [{"a": 1},{"b": [2, 3]}]
 (1 row)
 
+-- to_jsonb, timestamps
+select to_jsonb(timestamp '2014-05-28 12:22:35.614298');
+           to_jsonb           
+------------------------------
+ "2014-05-28T12:22:35.614298"
+(1 row)
+
+BEGIN;
+SET LOCAL TIME ZONE 10.5;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+              to_jsonb              
+------------------------------------
+ "2014-05-29T02:52:35.614298+10:30"
+(1 row)
+
+SET LOCAL TIME ZONE -8;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+              to_jsonb              
+------------------------------------
+ "2014-05-28T08:22:35.614298-08:00"
+(1 row)
+
+COMMIT;
+-- unicode escape - backslash is not escaped
+select to_jsonb(text '\uabcd');
+ to_jsonb 
+----------
+ "\uabcd"
+(1 row)
+
+-- any other backslash is escaped
+select to_jsonb(text '\abcd');
+ to_jsonb 
+----------
+ "\\abcd"
+(1 row)
+
+--jsonb_agg
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+                                                                                                                                                                    jsonb_agg                                                                                                                                                                     
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+(1 row)
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+                               jsonb_agg                               
+-----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+(1 row)
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -1256,6 +1315,120 @@ SELECT jsonb_typeof('"1.0"') AS string;
  string
 (1 row)
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                            jsonb_build_array                            
+-------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+(1 row)
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                           jsonb_build_object                            
+-------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+(1 row)
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+                                       jsonb_build_object                                       
+------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+(1 row)
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+ jsonb_build_array 
+-------------------
+ []
+(1 row)
+
+SELECT jsonb_build_object();
+ jsonb_build_object 
+--------------------
+ {}
+(1 row)
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+ jsonb_build_object 
+--------------------
+ {"1": 2}
+(1 row)
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+ERROR:  arg 1: key cannot be null
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ERROR:  key value must be scalar, not array, composite or json
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+                                                                     jsonb_build_object                                                                      
+-------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+(1 row)
+
+-- jsonb_object
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+ERROR:  array must have even number of elements
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+ERROR:  array must have two columns
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ERROR:  array must have two columns
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ERROR:  wrong number of array subscripts
+--two argument form of jsonb_object
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+                   jsonb_object                   
+--------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+(1 row)
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ERROR:  wrong number of array subscripts
+-- mismatched dimensions
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ERROR:  mismatched array dimensions
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ERROR:  mismatched array dimensions
+-- null key error
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ERROR:  null value not allowed for object key
+-- empty key is allowed
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+                  jsonb_object                   
+-------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
  jsonb_extract_path 
diff --git a/src/test/regress/expected/jsonb_1.out b/src/test/regress/expected/jsonb_1.out
index 35da6f4..687ae63 100644
--- a/src/test/regress/expected/jsonb_1.out
+++ b/src/test/regress/expected/jsonb_1.out
@@ -301,6 +301,65 @@ SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
  [{"a": 1},{"b": [2, 3]}]
 (1 row)
 
+-- to_jsonb, timestamps
+select to_jsonb(timestamp '2014-05-28 12:22:35.614298');
+           to_jsonb           
+------------------------------
+ "2014-05-28T12:22:35.614298"
+(1 row)
+
+BEGIN;
+SET LOCAL TIME ZONE 10.5;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+              to_jsonb              
+------------------------------------
+ "2014-05-29T02:52:35.614298+10:30"
+(1 row)
+
+SET LOCAL TIME ZONE -8;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+              to_jsonb              
+------------------------------------
+ "2014-05-28T08:22:35.614298-08:00"
+(1 row)
+
+COMMIT;
+-- unicode escape - backslash is not escaped
+select to_jsonb(text '\uabcd');
+ to_jsonb 
+----------
+ "\uabcd"
+(1 row)
+
+-- any other backslash is escaped
+select to_jsonb(text '\abcd');
+ to_jsonb 
+----------
+ "\\abcd"
+(1 row)
+
+--jsonb_agg
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+                                                                                                                                                                    jsonb_agg                                                                                                                                                                     
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [{"b": "a1", "c": 4, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a1", "c": 5, "z": [{"f1": 1, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 4, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 4, "f2": [4, 5, 6]}]}, {"b": "a2", "c": 5, "z": [{"f1": 2, "f2": [1, 2, 3]}, {"f1": 5, "f2": [4, 5, 6]}]}]
+(1 row)
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+                               jsonb_agg                               
+-----------------------------------------------------------------------
+ [{"x": 1, "y": "txt1"}, {"x": 2, "y": "txt2"}, {"x": 3, "y": "txt3"}]
+(1 row)
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -1256,6 +1315,120 @@ SELECT jsonb_typeof('"1.0"') AS string;
  string
 (1 row)
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                            jsonb_build_array                            
+-------------------------------------------------------------------------
+ ["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1, 2, 3]}]
+(1 row)
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+                           jsonb_build_object                            
+-------------------------------------------------------------------------
+ {"a": 1, "b": 1.2, "c": true, "d": null, "e": {"x": 3, "y": [1, 2, 3]}}
+(1 row)
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+                                       jsonb_build_object                                       
+------------------------------------------------------------------------------------------------
+ {"a": {"b": false, "c": 99}, "d": {"e": [9, 8, 7], "f": {"name": "pg_class", "relkind": "r"}}}
+(1 row)
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+ jsonb_build_array 
+-------------------
+ []
+(1 row)
+
+SELECT jsonb_build_object();
+ jsonb_build_object 
+--------------------
+ {}
+(1 row)
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+ jsonb_build_object 
+--------------------
+ {"1": 2}
+(1 row)
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+ERROR:  arg 1: key cannot be null
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+ERROR:  key value must be scalar, not array, composite or json
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+ERROR:  key value must be scalar, not array, composite or json
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+                                                                     jsonb_build_object                                                                      
+-------------------------------------------------------------------------------------------------------------------------------------------------------------
+ {"turbines": {"847001": {"name": "t15", "type": "GE1043"}, "847002": {"name": "t16", "type": "GE1043"}, "847003": {"name": "sub-alpha", "type": "GESS90"}}}
+(1 row)
+
+-- jsonb_object
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+                   jsonb_object                    
+---------------------------------------------------
+ {"3": null, "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+ERROR:  array must have even number of elements
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+ERROR:  array must have two columns
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+ERROR:  array must have two columns
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+ERROR:  wrong number of array subscripts
+--two argument form of jsonb_object
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+                   jsonb_object                   
+--------------------------------------------------
+ {"a": "1", "b": "2", "c": "3", "d e f": "a b c"}
+(1 row)
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+ERROR:  wrong number of array subscripts
+-- mismatched dimensions
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+ERROR:  mismatched array dimensions
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+ERROR:  mismatched array dimensions
+-- null key error
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+ERROR:  null value not allowed for object key
+-- empty key is allowed
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+                  jsonb_object                   
+-------------------------------------------------
+ {"": "3", "a": "1", "b": "2", "d e f": "a b c"}
+(1 row)
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
  jsonb_extract_path 
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
index e8e6117..a846103 100644
--- a/src/test/regress/sql/jsonb.sql
+++ b/src/test/regress/sql/jsonb.sql
@@ -62,6 +62,41 @@ SELECT '    '::jsonb;			-- ERROR, no value
 -- make sure jsonb is passed through json generators without being escaped
 SELECT array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
 
+-- to_jsonb, timestamps
+
+select to_jsonb(timestamp '2014-05-28 12:22:35.614298');
+
+BEGIN;
+SET LOCAL TIME ZONE 10.5;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+SET LOCAL TIME ZONE -8;
+select to_jsonb(timestamptz '2014-05-28 12:22:35.614298-04');
+COMMIT;
+
+-- unicode escape - backslash is not escaped
+
+select to_jsonb(text '\uabcd');
+
+-- any other backslash is escaped
+
+select to_jsonb(text '\abcd');
+
+--jsonb_agg
+
+CREATE TEMP TABLE rows AS
+SELECT x, 'txt' || x as y
+FROM generate_series(1,3) AS x;
+
+SELECT jsonb_agg(q)
+  FROM ( SELECT $$a$$ || x AS b, y AS c,
+               ARRAY[ROW(x.*,ARRAY[1,2,3]),
+               ROW(y.*,ARRAY[4,5,6])] AS z
+         FROM generate_series(1,2) x,
+              generate_series(4,5) y) q;
+
+SELECT jsonb_agg(q)
+  FROM rows q;
+
 -- jsonb extraction functions
 CREATE TEMP TABLE test_jsonb (
        json_type text,
@@ -263,6 +298,86 @@ SELECT jsonb_typeof('"hello"') AS string;
 SELECT jsonb_typeof('"true"') AS string;
 SELECT jsonb_typeof('"1.0"') AS string;
 
+-- jsonb_build_array, jsonb_build_object, jsonb_object_agg
+
+SELECT jsonb_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
+
+SELECT jsonb_build_object(
+       'a', jsonb_build_object('b',false,'c',99),
+       'd', jsonb_build_object('e',array[9,8,7]::int[],
+           'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
+
+
+-- empty objects/arrays
+SELECT jsonb_build_array();
+
+SELECT jsonb_build_object();
+
+-- make sure keys are quoted
+SELECT jsonb_build_object(1,2);
+
+-- keys must be scalar and not null
+SELECT jsonb_build_object(null,2);
+
+SELECT jsonb_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
+
+SELECT jsonb_build_object(json '{"a":1,"b":2}', 3);
+
+SELECT jsonb_build_object('{1,2,3}'::int[], 3);
+
+CREATE TEMP TABLE foo (serial_num int, name text, type text);
+INSERT INTO foo VALUES (847001,'t15','GE1043');
+INSERT INTO foo VALUES (847002,'t16','GE1043');
+INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
+
+SELECT jsonb_build_object('turbines',jsonb_object_agg(serial_num,jsonb_build_object('name',name,'type',type)))
+FROM foo;
+
+-- jsonb_object
+
+-- one dimension
+SELECT jsonb_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
+
+-- same but with two dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- odd number error
+SELECT jsonb_object('{a,b,c}');
+
+-- one column error
+SELECT jsonb_object('{{a},{b}}');
+
+-- too many columns error
+SELECT jsonb_object('{{a,b,c},{b,c,d}}');
+
+-- too many dimensions error
+SELECT jsonb_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
+
+--two argument form of jsonb_object
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
+
+-- too many dimensions
+SELECT jsonb_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
+
+-- mismatched dimensions
+
+select jsonb_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
+
+select jsonb_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
+
+-- null key error
+
+select jsonb_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
+
+-- empty key is allowed
+
+select jsonb_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
+
+
+
 -- extract_path, extract_path_as_text
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
 SELECT jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
json_categorize_type.patchapplication/x-patch; name=json_categorize_type.patchDownload
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index 1d6b752..2a9aeec 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -15,7 +15,6 @@
 
 #include "access/htup_details.h"
 #include "access/transam.h"
-#include "catalog/pg_cast.h"
 #include "catalog/pg_type.h"
 #include "executor/spi.h"
 #include "lib/stringinfo.h"
@@ -1281,10 +1280,14 @@ json_categorize_type(Oid typoid,
 	/* Look through any domain */
 	typoid = getBaseType(typoid);
 
-	/* We'll usually need to return the type output function */
-	getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+	*outfuncoid = InvalidOid;
+
+	/*
+	 * We need to get the output function for everything except date and
+	 * timestamp types, array and composite types, booleans,
+	 * and non-builtin types  where there's a cast to json.
+	 */
 
-	/* Check for known types */
 	switch (typoid)
 	{
 		case BOOLOID:
@@ -1297,6 +1300,7 @@ json_categorize_type(Oid typoid,
 		case FLOAT4OID:
 		case FLOAT8OID:
 		case NUMERICOID:
+			getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
 			*tcategory = JSONTYPE_NUMERIC;
 			break;
 
@@ -1314,6 +1318,7 @@ json_categorize_type(Oid typoid,
 
 		case JSONOID:
 		case JSONBOID:
+			getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
 			*tcategory = JSONTYPE_JSON;
 			break;
 
@@ -1330,23 +1335,26 @@ json_categorize_type(Oid typoid,
 				/* but let's look for a cast to json, if it's not built-in */
 				if (typoid >= FirstNormalObjectId)
 				{
-					HeapTuple	tuple;
+					Oid castfunc;
+					CoercionPathType ctype;
 
-					tuple = SearchSysCache2(CASTSOURCETARGET,
-											ObjectIdGetDatum(typoid),
-											ObjectIdGetDatum(JSONOID));
-					if (HeapTupleIsValid(tuple))
+					ctype = find_coercion_pathway(JSONOID, typoid,
+												  COERCION_EXPLICIT, &castfunc);
+					if (ctype == COERCION_PATH_FUNC && OidIsValid(castfunc))
 					{
-						Form_pg_cast castForm = (Form_pg_cast) GETSTRUCT(tuple);
-
-						if (castForm->castmethod == COERCION_METHOD_FUNCTION)
-						{
-							*tcategory = JSONTYPE_CAST;
-							*outfuncoid = castForm->castfunc;
-						}
-
-						ReleaseSysCache(tuple);
+						*tcategory = JSONTYPE_CAST;
+						*outfuncoid = castfunc;
 					}
+					else
+					{
+						/* non builtin type with no cast */
+						getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
+					}
+				}
+				else
+				{
+					/* any other builtin type */
+					getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
 				}
 			}
 			break;
#21Tom Lane
tgl@sss.pgh.pa.us
In reply to: Andrew Dunstan (#20)
Re: jsonb generator functions

Andrew Dunstan <andrew@dunslane.net> writes:

Also here is a patch factored out which applies the
find_coercion_pathway change to json.c. I'm inclined to say we should
backpatch this to 9.4 (and with a small change 9.3). Thoughts?

Meh. Maybe I'm just feeling gunshy because I broke something within
the past 24 hours, but at this point (with 9.4.0 wrap only 3 days away)
I'm inclined to avoid any 9.4 code churn that's not clearly necessary.
You argued upthread that this change would not result in any behavioral
changes in which cast method gets selected. If that's true, then we don't
really need to back-patch; while if it turns out not to be true, we
definitely don't want it in 9.3 and I'd argue it's too late for 9.4 also.

In short, I think it's fine for the 9.4 JSON code to start diverging
from HEAD at this point ...

regards, tom lane

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

#22Andrew Dunstan
andrew@dunslane.net
In reply to: Tom Lane (#21)
Re: jsonb generator functions

On 12/12/2014 01:55 PM, Tom Lane wrote:

Andrew Dunstan <andrew@dunslane.net> writes:

Also here is a patch factored out which applies the
find_coercion_pathway change to json.c. I'm inclined to say we should
backpatch this to 9.4 (and with a small change 9.3). Thoughts?

Meh. Maybe I'm just feeling gunshy because I broke something within
the past 24 hours, but at this point (with 9.4.0 wrap only 3 days away)
I'm inclined to avoid any 9.4 code churn that's not clearly necessary.
You argued upthread that this change would not result in any behavioral
changes in which cast method gets selected. If that's true, then we don't
really need to back-patch; while if it turns out not to be true, we
definitely don't want it in 9.3 and I'd argue it's too late for 9.4 also.

In short, I think it's fine for the 9.4 JSON code to start diverging
from HEAD at this point ...

Ok

cheers

andrew

--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers