PATCH: recursive json_populate_record()
Hi.
The first attached patch implements recursive processing of nested
objects and arrays in json[b]_populate_record[set](),
json[b]_to_record[set](). See regression tests for examples.
It also fixes the following errors/inconsistencies caused by lost
quoting of string json values:
[master]=# select * from json_to_record('{"js": "a"}') as rec(js json);
ERROR: invalid input syntax for type json
DETAIL: Token "a" is invalid.
CONTEXT: JSON data, line 1: a
[master]=# select * from json_to_record('{"js": "true"}') as rec(js json);
js
------
true
[patched]=# select * from json_to_record('{"js": "a"}') as rec(js json);
js
-----
"a"
[patched]=# select * from json_to_record('{"js": "true"}') as rec(js json);
js
--------
"true"
The second patch adds assignment of correct ndims to array fields of
RECORD function result types.
Without this patch, attndims in tuple descriptors of RECORD types is
always 0 and the corresponding assertion fails in the next test:
[patched]=# select json_to_record('{"a": [1, 2, 3]}') as rec(a int[]);
Should I submit these patches to commitfest?
--
Nikita Glukhov
Postgres Professional: http://www.postgrespro.com
The Russian Postgres Company
Attachments:
0001_recursive_json_populate_record_v01.patchtext/x-patch; name=0001_recursive_json_populate_record_v01.patchDownload
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index eca98df..12049a4 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -11249,12 +11249,12 @@ table2-mapping
whose columns match the record type defined by <replaceable>base</>
(see note below).
</entry>
- <entry><literal>select * from json_populate_record(null::myrowtype, '{"a":1,"b":2}')</literal></entry>
+ <entry><literal>select * from json_populate_record(null::myrowtype, '{"a": 1, "b": ["2", "a b"], "c": {"d": 4, "e": "a b c"}}')</literal></entry>
<entry>
<programlisting>
- a | b
----+---
- 1 | 2
+ a | b | c
+---+-----------+-------------
+ 1 | {2,"a b"} | (4,"a b c")
</programlisting>
</entry>
</row>
@@ -11343,12 +11343,12 @@ table2-mapping
explicitly define the structure of the record with an <literal>AS</>
clause.
</entry>
- <entry><literal>select * from json_to_record('{"a":1,"b":[1,2,3],"c":"bar"}') as x(a int, b text, d text) </literal></entry>
+ <entry><literal>select * from json_to_record('{"a":1,"b":[1,2,3],"c":[1,2,3],"e":"bar","r": {"a": 123, "b": "a b c"}}') as x(a int, b text, c int[], d text, r myrowtype) </literal></entry>
<entry>
<programlisting>
- a | b | d
----+---------+---
- 1 | [1,2,3] |
+ a | b | c | d | r
+---+---------+---------+---+---------------
+ 1 | [1,2,3] | {1,2,3} | | (123,"a b c")
</programlisting>
</entry>
</row>
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index 17ee4e4..729826c 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -93,7 +93,7 @@ static void elements_array_element_end(void *state, bool isnull);
static void elements_scalar(void *state, char *token, JsonTokenType tokentype);
/* turn a json object into a hash table */
-static HTAB *get_json_object_as_hash(text *json, const char *funcname);
+static HTAB *get_json_object_as_hash(char *json, int len, const char *funcname);
/* common worker for populate_record and to_record */
static Datum populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
@@ -149,6 +149,33 @@ static void setPathArray(JsonbIterator **it, Datum *path_elems,
int level, Jsonb *newval, uint32 nelems, int op_type);
static void addJsonbToParseState(JsonbParseState **jbps, Jsonb *jb);
+/* helper functions for populate_record[set] */
+typedef struct ColumnIOData ColumnIOData;
+typedef struct RecordIOData RecordIOData;
+
+static HeapTupleHeader
+populate_record(TupleDesc tupdesc,
+ RecordIOData **record_info,
+ HeapTupleHeader template,
+ MemoryContext mcxt,
+ Oid jtype,
+ HTAB *json_hash,
+ JsonbContainer *cont);
+
+static Datum
+populate_record_field(ColumnIOData *col,
+ Oid type,
+ int32 typmod,
+ int32 ndims,
+ const char *colname,
+ MemoryContext mcxt,
+ Datum defaultval,
+ Oid jtype,
+ char *json,
+ bool json_is_string,
+ JsonbValue *jval,
+ bool *isnull);
+
/* state for json_object_keys */
typedef struct OkeysState
{
@@ -216,6 +243,7 @@ typedef struct JhashState
HTAB *hash;
char *saved_scalar;
char *save_json_start;
+ bool saved_scalar_is_string;
} JHashState;
/* hashtable element */
@@ -223,26 +251,55 @@ typedef struct JsonHashEntry
{
char fname[NAMEDATALEN]; /* hash key (MUST BE FIRST) */
char *val;
- char *json;
bool isnull;
+ bool isstring;
} JsonHashEntry;
-/* these two are stolen from hstore / record_out, used in populate_record* */
-typedef struct ColumnIOData
+typedef struct ScalarIOData
{
- Oid column_type;
Oid typiofunc;
Oid typioparam;
FmgrInfo proc;
-} ColumnIOData;
+} ScalarIOData;
-typedef struct RecordIOData
+typedef struct ArrayIOData
+{
+ Oid element_type;
+ int32 element_typmod;
+ ColumnIOData *element_info;
+ int ndims;
+} ArrayIOData;
+
+/*
+ * CompositeIOData is a pointer to a RecordIOData because variable-length
+ * struct RecordIOData can't be used directly in ColumnIOData.io union
+ */
+typedef RecordIOData *CompositeIOData;
+
+/* these two are stolen from hstore / record_out, used in populate_record* */
+struct ColumnIOData
+{
+ Oid typid;
+ int32 typmod;
+ int32 ndims;
+ char type_category; /* 's' - scalar, 'a' - array, 'c' - composite */
+ bool type_is_domain;
+ void *domain_info;
+ union
+ {
+ ScalarIOData scalar;
+ ArrayIOData array;
+ CompositeIOData composite;
+ } io;
+};
+
+struct RecordIOData
{
Oid record_type;
int32 record_typmod;
int ncolumns;
ColumnIOData columns[FLEXIBLE_ARRAY_MEMBER];
-} RecordIOData;
+};
/* state for populate_recordset */
typedef struct PopulateRecordsetState
@@ -252,13 +309,30 @@ typedef struct PopulateRecordsetState
HTAB *json_hash;
char *saved_scalar;
char *save_json_start;
+ bool saved_scalar_is_string;
Tuplestorestate *tuple_store;
TupleDesc ret_tdesc;
HeapTupleHeader rec;
- RecordIOData *my_extra;
+ RecordIOData **my_extra;
MemoryContext fn_mcxt; /* used to stash IO funcs */
} PopulateRecordsetState;
+/* state for populate_array_json */
+typedef struct PopulateArrayState
+{
+ JsonLexContext *lex;
+ ArrayBuildState *astate;
+ ArrayIOData *aio;
+ MemoryContext mcxt;
+ const char *colname;
+ int *sizes;
+ int *dims;
+ int ndims;
+ char *element_start;
+ char *element_scalar;
+ bool element_scalar_is_string;
+} PopulateArrayState;
+
/* state for json_strip_nulls */
typedef struct StripnullState
{
@@ -267,10 +341,6 @@ typedef struct StripnullState
bool skip_next_null;
} StripnullState;
-/* Turn a jsonb object into a record */
-static void make_row_from_rec_and_jsonb(Jsonb *element,
- PopulateRecordsetState *state);
-
/*
* SQL function json_object_keys
*
@@ -2098,158 +2168,580 @@ json_to_record(PG_FUNCTION_ARGS)
return populate_record_worker(fcinfo, "json_to_record", false);
}
-static Datum
-populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
- bool have_record_arg)
+static void
+populate_array_object_start(void *_state)
{
- int json_arg_num = have_record_arg ? 1 : 0;
- Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
- text *json;
- Jsonb *jb = NULL;
- HTAB *json_hash = NULL;
- HeapTupleHeader rec = NULL;
- Oid tupType = InvalidOid;
- int32 tupTypmod = -1;
- TupleDesc tupdesc;
- HeapTupleData tuple;
- HeapTuple rettuple;
- RecordIOData *my_extra;
- int ncolumns;
- int i;
- Datum *values;
- bool *nulls;
+ PopulateArrayState *state = (PopulateArrayState *) _state;
- Assert(jtype == JSONOID || jtype == JSONBOID);
+ if (state->lex->lex_level < state->ndims)
+ elog(ERROR,
+ state->colname ? "expected json array in value of key \"%s\""
+ : "expected json array", state->colname);
+}
- if (have_record_arg)
+static void
+populate_array_array_start(void *_state)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ if (ndim < state->ndims)
+ state->sizes[ndim] = 0;
+}
+
+static void
+populate_array_array_end(void *_state)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ if (ndim < state->ndims)
{
- Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
+ int dim = state->sizes[ndim];
- if (!type_is_rowtype(argtype))
+ if (state->dims[ndim] == -1)
+ state->dims[ndim] = dim;
+ else if (state->dims[ndim] != dim)
ereport(ERROR,
- (errcode(ERRCODE_DATATYPE_MISMATCH),
- errmsg("first argument of %s must be a row type",
- funcname)));
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("malformed json array: \"%s\"", state->lex->input),
+ errdetail("Multidimensional arrays must have "
+ "sub-arrays with matching dimensions.")));
- if (PG_ARGISNULL(0))
+ if (ndim > 0)
+ state->sizes[ndim - 1]++;
+ }
+}
+
+static void
+populate_array_element_start(void *_state, bool isnull)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ if (ndim == state->ndims)
+ {
+ state->element_start = state->lex->token_start;
+ state->element_scalar = NULL;
+ }
+}
+
+static void
+populate_array_element_end(void *_state, bool isnull)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ if (ndim == state->ndims)
+ {
+ size_t len = state->lex->prev_token_terminator - state->element_start;
+ char *str;
+ Datum element;
+ bool element_isnull;
+
+ if (isnull)
+ str = NULL;
+ else if (state->element_scalar)
+ str = state->element_scalar;
+ else
{
- if (PG_ARGISNULL(1))
- PG_RETURN_NULL();
+ str = palloc(len + 1);
+ memcpy(str, state->element_start, len);
+ str[len] = 0;
+ }
- /*
- * have no tuple to look at, so the only source of type info is
- * the argtype. The lookup_rowtype_tupdesc call below will error
- * out if we don't have a known composite type oid here.
- */
- tupType = argtype;
- tupTypmod = -1;
+ element = populate_record_field(state->aio->element_info,
+ state->aio->element_type,
+ state->aio->element_typmod,
+ 0, NULL, state->mcxt,
+ PointerGetDatum(NULL),
+ JSONOID, str,
+ state->element_scalar &&
+ state->element_scalar_is_string,
+ NULL, &element_isnull);
+
+ accumArrayResult(state->astate, element, element_isnull,
+ state->aio->element_type, NULL);
+
+ Assert(ndim > 0);
+ state->sizes[ndim - 1]++;
+ }
+}
+
+static void
+populate_array_scalar(void *_state, char *token, JsonTokenType tokentype)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+
+ if (state->lex->lex_level < state->ndims)
+ elog(ERROR,
+ state->colname ? "expected json array in value of key \"%s\""
+ : "expected json array", state->colname);
+
+ if (state->lex->lex_level == state->ndims)
+ {
+ state->element_scalar = token;
+ state->element_scalar_is_string = tokentype == JSON_TOKEN_STRING;
+ }
+}
+
+static void
+populate_array_json(ArrayBuildState *astate,
+ char *json,
+ const char *colname,
+ ArrayIOData *aio,
+ MemoryContext mcxt,
+ int *dims,
+ int ndims)
+{
+ PopulateArrayState state;
+ JsonSemAction sem;
+
+ state.lex = makeJsonLexContextCstringLen(json, strlen(json), true);
+ state.astate = astate;
+ state.aio = aio;
+ state.mcxt = mcxt;
+ state.colname = colname;
+ state.dims = dims;
+ state.ndims = ndims;
+ state.sizes = palloc0(sizeof(int) * ndims);
+
+ memset(&sem, 0, sizeof(sem));
+ sem.semstate = (void *) &state;
+ sem.object_start = populate_array_object_start;
+ sem.array_start = populate_array_array_start;
+ sem.array_end = populate_array_array_end;
+ sem.array_element_start = populate_array_element_start;
+ sem.array_element_end = populate_array_element_end;
+ sem.scalar = populate_array_scalar;
+
+ pg_parse_json(state.lex, &sem);
+
+ pfree(state.lex);
+ pfree(state.sizes);
+}
+
+static int
+populate_array_dim_jsonb(ArrayBuildState *astate,
+ JsonbValue *jbv,
+ const char *column_name,
+ ArrayIOData *aio,
+ MemoryContext mcxt,
+ int *dims,
+ int ndims,
+ int ndim)
+{
+ JsonbContainer *jbc = jbv->val.binary.data;
+ JsonbIterator *it;
+ JsonbIteratorToken tok;
+ JsonbValue val;
+ int size = 0;
+
+ check_stack_depth();
+
+ if (jbv->type != jbvBinary || !JsonContainerIsArray(jbc))
+ elog(ERROR,
+ column_name ? "expected json array in value of key \"%s\""
+ : "expected json array", column_name);
+
+ Assert(!JsonContainerIsScalar(jbc));
+
+ it = JsonbIteratorInit(jbc);
+
+ while ((tok = JsonbIteratorNext(&it, &val, true)) != WJB_DONE)
+ {
+ if (tok != WJB_ELEM)
+ continue;
+
+ if (ndim < ndims)
+ {
+ int dim = populate_array_dim_jsonb(astate, &val, column_name, aio,
+ mcxt, dims, ndims, ndim + 1);
+ if (dims[ndim] == -1)
+ dims[ndim] = dim;
+ else if (dims[ndim] != dim)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("malformed json array: \"%s\"",
+ JsonbToCString(NULL, jbc, jbv->val.binary.len)),
+ errdetail("Multidimensional arrays must have "
+ "sub-arrays with matching dimensions.")));
}
else
{
- rec = PG_GETARG_HEAPTUPLEHEADER(0);
+ bool isnull;
+ Datum elem = populate_record_field(aio->element_info,
+ aio->element_type,
+ aio->element_typmod,
+ 0, NULL, mcxt,
+ PointerGetDatum(NULL),
+ JSONBOID, NULL, false,
+ &val, &isnull);
+
+ accumArrayResult(astate, elem, isnull, aio->element_type, NULL);
+ }
- if (PG_ARGISNULL(1))
- PG_RETURN_POINTER(rec);
+ size++;
+ }
- /* Extract type info from the tuple itself */
- tupType = HeapTupleHeaderGetTypeId(rec);
- tupTypmod = HeapTupleHeaderGetTypMod(rec);
- }
+ return size;
+}
- tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+static Datum
+populate_array(ArrayIOData *aio,
+ const char *colname,
+ MemoryContext mcxt,
+ Oid jtype,
+ char *json,
+ JsonbValue *jbv)
+{
+ ArrayBuildState *astate;
+ Datum result;
+ MemoryContext acxt = CurrentMemoryContext;
+ int *dims;
+ int *lbs;
+ int ndims = aio->ndims;
+ int i;
+
+ Assert(ndims > 0);
+
+ dims = palloc(sizeof(int) * ndims);
+ lbs = palloc(sizeof(int) * ndims);
+
+ for (i = 0; i < ndims; i++)
+ {
+ dims[i] = -1;
+ lbs[i] = 1;
}
+
+ astate = initArrayResult(aio->element_type, acxt, true);
+
+ if (jtype == JSONOID)
+ populate_array_json(astate, json, colname, aio, mcxt, dims, ndims);
else
- {
- /* json{b}_to_record case */
- if (PG_ARGISNULL(0))
- PG_RETURN_NULL();
+ dims[0] = populate_array_dim_jsonb(astate, jbv, colname, aio, mcxt,
+ dims, ndims, 1);
- if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
- ereport(ERROR,
- (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
- errmsg("function returning record called in context "
- "that cannot accept type record"),
- errhint("Try calling the function in the FROM clause "
- "using a column definition list.")));
- }
+ result = makeMdArrayResult(astate, ndims, dims, lbs, acxt, true);
+
+ pfree(dims);
+ pfree(lbs);
+
+ return result;
+}
+
+static Datum
+populate_composite(CompositeIOData *io,
+ Oid type,
+ int32 typmod,
+ const char *colname,
+ MemoryContext mcxt,
+ HeapTupleHeader defaultval,
+ Oid jtype,
+ char *json,
+ JsonbValue *jbv)
+{
+ HeapTupleHeader tuple;
+ TupleDesc tupdesc;
+ HTAB *json_hash = NULL;
+ JsonbContainer *container = NULL;
+
+ tupdesc = lookup_rowtype_tupdesc(type, typmod); /* FIXME cache */
if (jtype == JSONOID)
+ json_hash = get_json_object_as_hash(json, strlen(json),
+ "populate_composite");
+ else
{
- /* just get the text */
- json = PG_GETARG_TEXT_P(json_arg_num);
+ if (jbv->type != jbvBinary ||
+ !JsonContainerIsObject(jbv->val.binary.data))
+ elog(ERROR,
+ colname ? "expected json object in value of key \"%s\""
+ : "expected json object", colname);
+ container = jbv->val.binary.data;
+ }
- json_hash = get_json_object_as_hash(json, funcname);
+ tuple = populate_record(tupdesc, io, defaultval, mcxt,
+ jtype, json_hash, container);
- /*
- * if the input json is empty, we can only skip the rest if we were
- * passed in a non-null record, since otherwise there may be issues
- * with domain nulls.
- */
- if (hash_get_num_entries(json_hash) == 0 && rec)
+ if (jtype == JSONOID)
+ hash_destroy(json_hash);
+
+ ReleaseTupleDesc(tupdesc);
+
+ return HeapTupleHeaderGetDatum(tuple);
+}
+
+static Datum
+populate_scalar(ScalarIOData *io,
+ Oid typid,
+ int32 typmod,
+ Oid jtype,
+ char *json,
+ bool json_is_string,
+ JsonbValue *jbv)
+{
+ Datum res;
+ char *str = NULL;
+
+ if (jtype == JSONOID)
+ {
+ /* already done the hard work in the json case */
+ if ((typid == JSONOID || typid == JSONBOID) && json_is_string)
{
- hash_destroy(json_hash);
- ReleaseTupleDesc(tupdesc);
- PG_RETURN_POINTER(rec);
+ /*
+ * Add quotes around string value (should be already escaped)
+ * if converting to json or jsonb.
+ */
+ size_t len = strlen(json);
+ str = palloc(len + 3);
+ str[0] = '"';
+ memcpy(&str[1], json, len);
+ str[len + 1] = '"';
+ str[len + 2] = 0;
}
+ else
+ str = json;
}
else
{
- jb = PG_GETARG_JSONB(json_arg_num);
-
- /* same logic as for json */
- if (JB_ROOT_COUNT(jb) == 0 && rec)
+ if (typid == JSONBOID)
+ {
+ Jsonb *jsonb = JsonbValueToJsonb(jbv);
+ return JsonbGetDatum(jsonb);
+ }
+ else if (typid == JSONOID && jbv->type != jbvBinary)
{
- ReleaseTupleDesc(tupdesc);
- PG_RETURN_POINTER(rec);
+ Jsonb *jsonb = JsonbValueToJsonb(jbv);
+ str = JsonbToCString(NULL, &jsonb->root, VARSIZE(jsonb));
}
+ else if (jbv->type == jbvString)
+ str = pnstrdup(jbv->val.string.val, jbv->val.string.len);
+ else if (jbv->type == jbvBool)
+ str = pstrdup(jbv->val.boolean ? "true" : "false");
+ else if (jbv->type == jbvNumeric)
+ str = DatumGetCString(DirectFunctionCall1(numeric_out,
+ PointerGetDatum(jbv->val.numeric)));
+ else if (jbv->type == jbvBinary)
+ str = JsonbToCString(NULL, jbv->val.binary.data,
+ jbv->val.binary.len);
+ else
+ elog(ERROR, "unrecognized jsonb type: %d", (int) jbv->type);
}
- ncolumns = tupdesc->natts;
+ res = InputFunctionCall(&io->proc, str, io->typioparam, typmod);
+
+ if (str != json)
+ pfree(str);
+
+ return res;
+}
- if (rec)
+static void
+prepare_column_info(ColumnIOData *column,
+ Oid typid,
+ int32 typmod,
+ int32 ndims,
+ MemoryContext mcxt,
+ Oid jtype)
+{
+ int32 base_typmod = typmod;
+ Oid base_typid = getBaseTypeAndTypmod(typid, &base_typmod);
+
+ if (type_is_rowtype(base_typid))
{
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = rec;
+ column->type_category = 'c'; /* composite */
+ column->io.composite = NULL;
}
+ else
+ {
+ Oid element_type = get_element_type(base_typid);
+
+ if (OidIsValid(element_type))
+ {
+ int32 base_ndims = ndims;
+
+ if (base_typid != typid)
+ {
+ Assert(ndims == 0);
+ base_ndims = get_type_ndims(typid);
+ }
+
+ Assert(base_ndims > 0);
+
+ column->type_category = 'a'; /* array */
+ column->io.array.element_info =
+ MemoryContextAllocZero(mcxt, sizeof(ColumnIOData));
+ column->io.array.element_type = element_type;
+ column->io.array.element_typmod = base_typmod;
+ column->io.array.ndims = base_ndims;
+ }
+ else
+ {
+ column->type_category = 's'; /* scalar */
+
+ /* don't need input function when converting from jsonb to jsonb */
+ if (jtype != JSONBOID || base_typid != JSONBOID)
+ {
+ getTypeInputInfo(base_typid,
+ &column->io.scalar.typiofunc,
+ &column->io.scalar.typioparam);
+ fmgr_info_cxt(column->io.scalar.typiofunc,
+ &column->io.scalar.proc, mcxt);
+
+ }
+ }
+ }
+
+ column->typid = typid;
+ column->typmod = typmod;
+ column->ndims = ndims;
+ column->type_is_domain = base_typid != typid;
+ column->domain_info = NULL;
+}
+
+static Datum
+populate_record_field(ColumnIOData *col,
+ Oid type,
+ int32 typmod,
+ int32 ndims,
+ const char *colname,
+ MemoryContext mcxt,
+ Datum defaultval,
+ Oid jtype,
+ char *json,
+ bool json_is_string,
+ JsonbValue *jval,
+ bool *isnull)
+{
+ Datum res;
+
+ check_stack_depth();
/*
- * We arrange to look up the needed I/O info just once per series of
- * calls, assuming the record type doesn't change underneath us.
+ * Prepare to convert the column value from text
*/
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- if (my_extra == NULL ||
- my_extra->ncolumns != ncolumns)
+ if (col->typid != type || col->typmod != typmod || col->ndims != ndims)
+ prepare_column_info(col, type, typmod, ndims, mcxt, jtype);
+
+ if (jtype == JSONOID ? json == NULL : jval == NULL || jval->type == jbvNull)
+ {
+ *isnull = true;
+ res = PointerGetDatum(NULL);
+ }
+ else if (jtype == JSONOID && json_is_string && col->type_category != 's')
{
- fcinfo->flinfo->fn_extra =
- MemoryContextAlloc(fcinfo->flinfo->fn_mcxt,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- my_extra->record_type = InvalidOid;
- my_extra->record_typmod = 0;
- my_extra->ncolumns = ncolumns;
- MemSet(my_extra->columns, 0, sizeof(ColumnIOData) * ncolumns);
+ /* don't convert string json value to non-scalar */
+ elog(ERROR, colname ? "expected json %s in value of key \"%s\""
+ : "expected json %s",
+ col->type_category == 'a' ? "array" : "object", colname);
}
+ else
+ {
+ *isnull = false;
- if (have_record_arg && (my_extra->record_type != tupType ||
- my_extra->record_typmod != tupTypmod))
+ switch (col->type_category)
+ {
+ case 's':
+ res = populate_scalar(&col->io.scalar, type, typmod,
+ jtype, json, json_is_string, jval);
+ break;
+
+ case 'a':
+ res = populate_array(&col->io.array, colname, mcxt,
+ jtype, json, jval);
+ break;
+
+ case 'c':
+ res = populate_composite(&col->io.composite, type, typmod,
+ colname, mcxt,
+ DatumGetPointer(defaultval)
+ ? DatumGetHeapTupleHeader(defaultval)
+ : NULL,
+ jtype, json, jval);
+ break;
+
+ default:
+ elog(ERROR, "unrecognized type category '%c'",
+ col->type_category);
+ break;
+ }
+ }
+
+ if (col->type_is_domain)
+ domain_check(res, *isnull, col->typid, &col->domain_info, mcxt);
+
+ return res;
+}
+
+static RecordIOData *
+alloc_record_info(MemoryContext mcxt, int ncolumns)
+{
+ RecordIOData *data = (RecordIOData *)
+ MemoryContextAlloc(mcxt,
+ offsetof(RecordIOData, columns) +
+ ncolumns * sizeof(ColumnIOData));
+
+ data->record_type = InvalidOid;
+ data->record_typmod = 0;
+ data->ncolumns = ncolumns;
+ MemSet(data->columns, 0, sizeof(ColumnIOData) * ncolumns);
+
+ return data;
+}
+
+static HeapTupleHeader
+populate_record(TupleDesc tupdesc,
+ RecordIOData **precord,
+ HeapTupleHeader defaultval,
+ MemoryContext mcxt,
+ Oid jtype,
+ HTAB *json_hash,
+ JsonbContainer *cont)
+{
+ RecordIOData *record = *precord;
+ Datum *values;
+ bool *nulls;
+ HeapTuple res;
+ int ncolumns = tupdesc->natts;
+ int i;
+
+ /*
+ * if the input json is empty, we can only skip the rest if we were
+ * passed in a non-null record, since otherwise there may be issues
+ * with domain nulls.
+ */
+ if ((jtype == JSONOID ? hash_get_num_entries(json_hash) == 0
+ : JsonContainerSize(cont) == 0) && defaultval)
+ return defaultval;
+
+ if (record == NULL ||
+ record->ncolumns != ncolumns)
+ *precord = record = alloc_record_info(mcxt, ncolumns);
+
+ if (record->record_type != tupdesc->tdtypeid ||
+ record->record_typmod != tupdesc->tdtypmod)
{
- MemSet(my_extra, 0,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra->record_type = tupType;
- my_extra->record_typmod = tupTypmod;
- my_extra->ncolumns = ncolumns;
+ MemSet(record, 0, offsetof(RecordIOData, columns) +
+ ncolumns * sizeof(ColumnIOData));
+ record->record_type = tupdesc->tdtypeid;
+ record->record_typmod = tupdesc->tdtypmod;
+ record->ncolumns = ncolumns;
}
values = (Datum *) palloc(ncolumns * sizeof(Datum));
nulls = (bool *) palloc(ncolumns * sizeof(bool));
- if (rec)
+ if (defaultval)
{
+ HeapTupleData tuple;
+
+ /* Build a temporary HeapTuple control structure */
+ tuple.t_len = HeapTupleHeaderGetDatumLength(defaultval);
+ ItemPointerSetInvalid(&(tuple.t_self));
+ tuple.t_tableOid = InvalidOid;
+ tuple.t_data = defaultval;
+
/* Break down the tuple into fields */
heap_deform_tuple(&tuple, tupdesc, values, nulls);
}
@@ -2264,31 +2756,23 @@ populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
for (i = 0; i < ncolumns; ++i)
{
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- JsonbValue *v = NULL;
- JsonHashEntry *hashentry = NULL;
+ Form_pg_attribute att = tupdesc->attrs[i];
+ char *colname = NameStr(att->attname);
+ JsonbValue *val = NULL;
+ JsonHashEntry *hashentry = NULL;
/* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
+ if (att->attisdropped)
{
nulls[i] = true;
continue;
}
if (jtype == JSONOID)
- {
- hashentry = hash_search(json_hash,
- NameStr(tupdesc->attrs[i]->attname),
- HASH_FIND, NULL);
- }
+ hashentry = hash_search(json_hash, colname, HASH_FIND, NULL);
else
- {
- char *key = NameStr(tupdesc->attrs[i]->attname);
-
- v = findJsonbValueFromContainerLen(&jb->root, JB_FOBJECT, key,
- strlen(key));
- }
+ val = findJsonbValueFromContainerLen(cont, JB_FOBJECT, colname,
+ strlen(colname));
/*
* we can't just skip here if the key wasn't found since we might have
@@ -2298,73 +2782,133 @@ populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
* then every field which we don't populate needs to be run through
* the input function just in case it's a domain type.
*/
- if (((jtype == JSONOID && hashentry == NULL) ||
- (jtype == JSONBOID && v == NULL)) && rec)
+ if ((jtype == JSONOID ? hashentry == NULL : val == NULL) && defaultval)
continue;
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
- {
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- fcinfo->flinfo->fn_mcxt);
- column_info->column_type = column_type;
- }
- if ((jtype == JSONOID && (hashentry == NULL || hashentry->isnull)) ||
- (jtype == JSONBOID && (v == NULL || v->type == jbvNull)))
+ values[i] = populate_record_field(&record->columns[i],
+ att->atttypid,
+ att->atttypmod,
+ att->attndims,
+ colname,
+ mcxt,
+ nulls[i] ? PointerGetDatum(NULL)
+ : values[i],
+ jtype,
+ hashentry && !hashentry->isnull ?
+ hashentry->val : NULL,
+ hashentry && hashentry->isstring,
+ val,
+ &nulls[i]);
+ }
+
+ res = heap_form_tuple(tupdesc, values, nulls);
+
+ pfree(values);
+ pfree(nulls);
+
+ return res->t_data;
+}
+
+static Datum
+populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
+ bool have_record_arg)
+{
+ int json_arg_num = have_record_arg ? 1 : 0;
+ Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
+ text *json;
+ Jsonb *jb = NULL;
+ HTAB *json_hash = NULL;
+ HeapTupleHeader rec = NULL;
+ Oid tupType = InvalidOid;
+ int32 tupTypmod = -1;
+ TupleDesc tupdesc;
+ HeapTupleHeader rettuple;
+
+ Assert(jtype == JSONOID || jtype == JSONBOID);
+
+ if (have_record_arg)
+ {
+ Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
+
+ if (!type_is_rowtype(argtype))
+ ereport(ERROR,
+ (errcode(ERRCODE_DATATYPE_MISMATCH),
+ errmsg("first argument of %s must be a row type",
+ funcname)));
+
+ if (PG_ARGISNULL(0))
{
+ if (PG_ARGISNULL(1))
+ PG_RETURN_NULL();
+
/*
- * need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
+ * have no tuple to look at, so the only source of type info is
+ * the argtype. The lookup_rowtype_tupdesc call below will error
+ * out if we don't have a known composite type oid here.
*/
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
+ tupType = argtype;
+ tupTypmod = -1;
}
else
{
- char *s = NULL;
+ rec = PG_GETARG_HEAPTUPLEHEADER(0);
- if (jtype == JSONOID)
- {
- /* already done the hard work in the json case */
- s = hashentry->val;
- }
- else
- {
- if (v->type == jbvString)
- s = pnstrdup(v->val.string.val, v->val.string.len);
- else if (v->type == jbvBool)
- s = pnstrdup((v->val.boolean) ? "t" : "f", 1);
- else if (v->type == jbvNumeric)
- s = DatumGetCString(DirectFunctionCall1(numeric_out,
- PointerGetDatum(v->val.numeric)));
- else if (v->type == jbvBinary)
- s = JsonbToCString(NULL, (JsonbContainer *) v->val.binary.data, v->val.binary.len);
- else
- elog(ERROR, "unrecognized jsonb type: %d", (int) v->type);
- }
+ if (PG_ARGISNULL(1))
+ PG_RETURN_POINTER(rec);
- values[i] = InputFunctionCall(&column_info->proc, s,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
+ /* Extract type info from the tuple itself */
+ tupType = HeapTupleHeaderGetTypeId(rec);
+ tupTypmod = HeapTupleHeaderGetTypMod(rec);
}
+
+ tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
}
+ else
+ {
+ /* json{b}_to_record case */
+ if (PG_ARGISNULL(0))
+ PG_RETURN_NULL();
+
+ if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
+ ereport(ERROR,
+ (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
+ errmsg("function returning record called in context "
+ "that cannot accept type record"),
+ errhint("Try calling the function in the FROM clause "
+ "using a column definition list.")));
- rettuple = heap_form_tuple(tupdesc, values, nulls);
+ Assert(tupdesc);
+ }
- ReleaseTupleDesc(tupdesc);
+ if (jtype == JSONOID)
+ {
+ /* just get the text */
+ json = PG_GETARG_TEXT_P(json_arg_num);
+ json_hash = get_json_object_as_hash(VARDATA(json),
+ VARSIZE(json) - VARHDRSZ,
+ funcname);
+ }
+ else
+ jb = PG_GETARG_JSONB(json_arg_num);
+
+ /*
+ * We arrange to look up the needed I/O info just once per series of
+ * calls, assuming the record type doesn't change underneath us.
+ */
+ rettuple = populate_record(tupdesc,
+ (RecordIOData **) &fcinfo->flinfo->fn_extra,
+ rec,
+ fcinfo->flinfo->fn_mcxt,
+ jtype,
+ json_hash,
+ &jb->root);
if (json_hash)
hash_destroy(json_hash);
- PG_RETURN_DATUM(HeapTupleGetDatum(rettuple));
+ ReleaseTupleDesc(tupdesc);
+
+ PG_RETURN_HEAPTUPLEHEADER(rettuple);
}
/*
@@ -2373,12 +2917,12 @@ populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
* decompose a json object into a hash table.
*/
static HTAB *
-get_json_object_as_hash(text *json, const char *funcname)
+get_json_object_as_hash(char *json, int len, const char *funcname)
{
HASHCTL ctl;
HTAB *tab;
JHashState *state;
- JsonLexContext *lex = makeJsonLexContext(json, true);
+ JsonLexContext *lex = makeJsonLexContextCstringLen(json, len, true);
JsonSemAction *sem;
memset(&ctl, 0, sizeof(ctl));
@@ -2460,6 +3004,7 @@ hash_object_field_end(void *state, char *fname, bool isnull)
*/
hashentry->isnull = isnull;
+
if (_state->save_json_start != NULL)
{
int len = _state->lex->prev_token_terminator - _state->save_json_start;
@@ -2468,11 +3013,13 @@ hash_object_field_end(void *state, char *fname, bool isnull)
memcpy(val, _state->save_json_start, len);
val[len] = '\0';
hashentry->val = val;
+ hashentry->isstring = false;
}
else
{
/* must have had a scalar instead */
hashentry->val = _state->saved_scalar;
+ hashentry->isstring = _state->saved_scalar_is_string;
}
}
@@ -2498,7 +3045,10 @@ hash_scalar(void *state, char *token, JsonTokenType tokentype)
errmsg("cannot call %s on a scalar", _state->function_name)));
if (_state->lex->lex_level == 1)
+ {
_state->saved_scalar = token;
+ _state->saved_scalar_is_string = tokentype == JSON_TOKEN_STRING;
+ }
}
@@ -2537,121 +3087,24 @@ json_to_recordset(PG_FUNCTION_ARGS)
}
static void
-make_row_from_rec_and_jsonb(Jsonb *element, PopulateRecordsetState *state)
+populate_recordset_record(PopulateRecordsetState *state,
+ Oid jtype, HTAB *json_hash, JsonbContainer *jsonb)
{
- Datum *values;
- bool *nulls;
- int i;
- RecordIOData *my_extra = state->my_extra;
- int ncolumns = my_extra->ncolumns;
- TupleDesc tupdesc = state->ret_tdesc;
- HeapTupleHeader rec = state->rec;
- HeapTuple rettuple;
-
- values = (Datum *) palloc(ncolumns * sizeof(Datum));
- nulls = (bool *) palloc(ncolumns * sizeof(bool));
-
- if (state->rec)
- {
- HeapTupleData tuple;
-
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(state->rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = state->rec;
-
- /* Break down the tuple into fields */
- heap_deform_tuple(&tuple, tupdesc, values, nulls);
- }
- else
- {
- for (i = 0; i < ncolumns; ++i)
- {
- values[i] = (Datum) 0;
- nulls[i] = true;
- }
- }
-
- for (i = 0; i < ncolumns; ++i)
- {
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- JsonbValue *v = NULL;
- char *key;
-
- /* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
- {
- nulls[i] = true;
- continue;
- }
-
- key = NameStr(tupdesc->attrs[i]->attname);
-
- v = findJsonbValueFromContainerLen(&element->root, JB_FOBJECT,
- key, strlen(key));
-
- /*
- * We can't just skip here if the key wasn't found since we might have
- * a domain to deal with. If we were passed in a non-null record
- * datum, we assume that the existing values are valid (if they're
- * not, then it's not our fault), but if we were passed in a null,
- * then every field which we don't populate needs to be run through
- * the input function just in case it's a domain type.
- */
- if (v == NULL && rec)
- continue;
-
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
- {
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- state->fn_mcxt);
- column_info->column_type = column_type;
- }
- if (v == NULL || v->type == jbvNull)
- {
- /*
- * Need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
- */
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
- }
- else
- {
- char *s = NULL;
-
- if (v->type == jbvString)
- s = pnstrdup(v->val.string.val, v->val.string.len);
- else if (v->type == jbvBool)
- s = pnstrdup((v->val.boolean) ? "t" : "f", 1);
- else if (v->type == jbvNumeric)
- s = DatumGetCString(DirectFunctionCall1(numeric_out,
- PointerGetDatum(v->val.numeric)));
- else if (v->type == jbvBinary)
- s = JsonbToCString(NULL, (JsonbContainer *) v->val.binary.data, v->val.binary.len);
- else
- elog(ERROR, "unrecognized jsonb type: %d", (int) v->type);
-
- values[i] = InputFunctionCall(&column_info->proc, s,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
- }
- }
-
- rettuple = heap_form_tuple(tupdesc, values, nulls);
-
- tuplestore_puttuple(state->tuple_store, rettuple);
+ HeapTupleData tuple;
+ HeapTupleHeader tuphead = populate_record(state->ret_tdesc,
+ state->my_extra,
+ state->rec,
+ state->fn_mcxt,
+ jtype,
+ json_hash,
+ jsonb);
+
+ tuple.t_len = HeapTupleHeaderGetDatumLength(tuphead);
+ ItemPointerSetInvalid(&(tuple.t_self));
+ tuple.t_tableOid = InvalidOid;
+ tuple.t_data = tuphead;
+
+ tuplestore_puttuple(state->tuple_store, &tuple);
}
/*
@@ -2665,12 +3118,8 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
ReturnSetInfo *rsi;
MemoryContext old_cxt;
- Oid tupType;
- int32 tupTypmod;
HeapTupleHeader rec;
TupleDesc tupdesc;
- RecordIOData *my_extra;
- int ncolumns;
PopulateRecordsetState *state;
if (have_record_arg)
@@ -2716,38 +3165,6 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
else
rec = PG_GETARG_HEAPTUPLEHEADER(0);
- tupType = tupdesc->tdtypeid;
- tupTypmod = tupdesc->tdtypmod;
- ncolumns = tupdesc->natts;
-
- /*
- * We arrange to look up the needed I/O info just once per series of
- * calls, assuming the record type doesn't change underneath us.
- */
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- if (my_extra == NULL ||
- my_extra->ncolumns != ncolumns)
- {
- fcinfo->flinfo->fn_extra =
- MemoryContextAlloc(fcinfo->flinfo->fn_mcxt,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- my_extra->record_type = InvalidOid;
- my_extra->record_typmod = 0;
- }
-
- if (my_extra->record_type != tupType ||
- my_extra->record_typmod != tupTypmod)
- {
- MemSet(my_extra, 0,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra->record_type = tupType;
- my_extra->record_typmod = tupTypmod;
- my_extra->ncolumns = ncolumns;
- }
-
state = palloc0(sizeof(PopulateRecordsetState));
/* make these in a sufficiently long-lived memory context */
@@ -2760,7 +3177,7 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
MemoryContextSwitchTo(old_cxt);
state->function_name = funcname;
- state->my_extra = my_extra;
+ state->my_extra = (RecordIOData **) &fcinfo->flinfo->fn_extra;
state->rec = rec;
state->fn_mcxt = fcinfo->flinfo->fn_mcxt;
@@ -2811,14 +3228,15 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
if (r == WJB_ELEM)
{
- Jsonb *element = JsonbValueToJsonb(&v);
-
- if (!JB_ROOT_IS_OBJECT(element))
+ if (v.type != jbvBinary &&
+ !(JsonContainerIsObject(v.val.binary.data)))
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("argument of %s must be an array of objects",
funcname)));
- make_row_from_rec_and_jsonb(element, state);
+
+ populate_recordset_record(state, JSONBOID, NULL,
+ v.val.binary.data);
}
}
}
@@ -2862,115 +3280,16 @@ static void
populate_recordset_object_end(void *state)
{
PopulateRecordsetState *_state = (PopulateRecordsetState *) state;
- HTAB *json_hash = _state->json_hash;
- Datum *values;
- bool *nulls;
- int i;
- RecordIOData *my_extra = _state->my_extra;
- int ncolumns = my_extra->ncolumns;
- TupleDesc tupdesc = _state->ret_tdesc;
- JsonHashEntry *hashentry;
- HeapTupleHeader rec = _state->rec;
- HeapTuple rettuple;
/* Nested objects require no special processing */
if (_state->lex->lex_level > 1)
return;
/* Otherwise, construct and return a tuple based on this level-1 object */
- values = (Datum *) palloc(ncolumns * sizeof(Datum));
- nulls = (bool *) palloc(ncolumns * sizeof(bool));
-
- if (_state->rec)
- {
- HeapTupleData tuple;
-
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(_state->rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = _state->rec;
-
- /* Break down the tuple into fields */
- heap_deform_tuple(&tuple, tupdesc, values, nulls);
- }
- else
- {
- for (i = 0; i < ncolumns; ++i)
- {
- values[i] = (Datum) 0;
- nulls[i] = true;
- }
- }
-
- for (i = 0; i < ncolumns; ++i)
- {
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- char *value;
-
- /* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
- {
- nulls[i] = true;
- continue;
- }
-
- hashentry = hash_search(json_hash,
- NameStr(tupdesc->attrs[i]->attname),
- HASH_FIND, NULL);
-
- /*
- * we can't just skip here if the key wasn't found since we might have
- * a domain to deal with. If we were passed in a non-null record
- * datum, we assume that the existing values are valid (if they're
- * not, then it's not our fault), but if we were passed in a null,
- * then every field which we don't populate needs to be run through
- * the input function just in case it's a domain type.
- */
- if (hashentry == NULL && rec)
- continue;
-
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
- {
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- _state->fn_mcxt);
- column_info->column_type = column_type;
- }
- if (hashentry == NULL || hashentry->isnull)
- {
- /*
- * need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
- */
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
- }
- else
- {
- value = hashentry->val;
-
- values[i] = InputFunctionCall(&column_info->proc, value,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
- }
- }
-
- rettuple = heap_form_tuple(tupdesc, values, nulls);
-
- tuplestore_puttuple(_state->tuple_store, rettuple);
+ populate_recordset_record(_state, JSONOID, _state->json_hash, NULL);
/* Done with hash for this object */
- hash_destroy(json_hash);
+ hash_destroy(_state->json_hash);
_state->json_hash = NULL;
}
@@ -3066,11 +3385,13 @@ populate_recordset_object_field_end(void *state, char *fname, bool isnull)
memcpy(val, _state->save_json_start, len);
val[len] = '\0';
hashentry->val = val;
+ hashentry->isstring = false;
}
else
{
/* must have had a scalar instead */
hashentry->val = _state->saved_scalar;
+ hashentry->isstring = _state->saved_scalar_is_string;
}
}
diff --git a/src/backend/utils/cache/lsyscache.c b/src/backend/utils/cache/lsyscache.c
index 13ae6ad..641005b 100644
--- a/src/backend/utils/cache/lsyscache.c
+++ b/src/backend/utils/cache/lsyscache.c
@@ -2492,6 +2492,26 @@ get_array_type(Oid typid)
}
/*
+ * get_type_ndims
+ *
+ * Given the type OID, get the typndims.
+ */
+int32
+get_type_ndims(Oid typid)
+{
+ HeapTuple tp;
+ int32 result = -1;
+
+ tp = SearchSysCache1(TYPEOID, ObjectIdGetDatum(typid));
+ if (HeapTupleIsValid(tp))
+ {
+ result = ((Form_pg_type) GETSTRUCT(tp))->typndims;
+ ReleaseSysCache(tp);
+ }
+ return result;
+}
+
+/*
* get_promoted_array_type
*
* The "promoted" type is what you'd get from an ARRAY(SELECT ...)
diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h
index 470d5b1..850a825 100644
--- a/src/include/utils/jsonb.h
+++ b/src/include/utils/jsonb.h
@@ -218,6 +218,11 @@ typedef struct
#define JB_ROOT_IS_OBJECT(jbp_) ( *(uint32*) VARDATA(jbp_) & JB_FOBJECT)
#define JB_ROOT_IS_ARRAY(jbp_) ( *(uint32*) VARDATA(jbp_) & JB_FARRAY)
+#define JsonContainerSize(jc) ((jc)->header & JB_CMASK)
+#define JsonContainerIsObject(jc) ((jc)->header & JB_FOBJECT)
+#define JsonContainerIsArray(jc) ((jc)->header & JB_FARRAY)
+#define JsonContainerIsScalar(jc) ((jc)->header & JB_FSCALAR)
+
enum jbvType
{
diff --git a/src/include/utils/lsyscache.h b/src/include/utils/lsyscache.h
index dcb8980..d25cac7 100644
--- a/src/include/utils/lsyscache.h
+++ b/src/include/utils/lsyscache.h
@@ -136,6 +136,7 @@ extern Oid get_element_type(Oid typid);
extern Oid get_array_type(Oid typid);
extern Oid get_promoted_array_type(Oid typid);
extern Oid get_base_element_type(Oid typid);
+extern int32 get_type_ndims(Oid typid);
extern void getTypeInputInfo(Oid type, Oid *typInput, Oid *typIOParam);
extern void getTypeOutputInfo(Oid type, Oid *typOutput, bool *typIsVarlena);
extern void getTypeBinaryInputInfo(Oid type, Oid *typReceive, Oid *typIOParam);
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index efcdc41..3edc398 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -1304,6 +1304,30 @@ select * from json_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8
-- populate_record
create type jpop as (a text, b int, c timestamp);
+CREATE DOMAIN js_int_not_null AS int NOT NULL;
+CREATE DOMAIN js_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN js_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+CREATE TYPE jsrec AS (
+ i int,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d js_int_array_1d,
+ ia2d js_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jpop,
+ reca jpop[]
+);
+CREATE TYPE jsrec_i_not_null AS (
+ i js_int_not_null
+);
select * from json_populate_record(null::jpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+---
@@ -1342,6 +1366,302 @@ select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"c":[100,200,false],"x":43.2}') q;
ERROR: invalid input syntax for type timestamp: "[100,200,false]"
+select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{}') q;
+ a | b | c
+---+---+--------------------------
+ x | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"x": 43.2}') q;
+ERROR: domain js_int_not_null does not allow null values
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": null}') q;
+ERROR: domain js_int_not_null does not allow null values
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": 12345}') q;
+ i
+-------
+ 12345
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": null}') q;
+ ia1
+-----
+
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": 123}') q;
+ERROR: expected json array in value of key "ia1"
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [1, "2", null, 4]}') q;
+ ia1
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [[1, 2, 3]]}') q;
+ERROR: invalid input syntax for integer: "[1, 2, 3]"
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": null}') q;
+ ia1d
+------
+
+(1 row)
+
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": 123}') q;
+ERROR: expected json array in value of key "ia1d"
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null, 4]}') q;
+ERROR: value for domain js_int_array_1d violates check constraint "js_int_array_1d_check"
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null]}') q;
+ ia1d
+------------
+ {1,2,NULL}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [1, "2", null, 4]}') q;
+ERROR: expected json array in value of key "ia2"
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+ ia2
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[], []]}') q;
+ ia2
+-----
+ {}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [3]]}') q;
+ERROR: malformed json array: "[[1, 2], [3]]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], 3, 4]}') q;
+ERROR: expected json array in value of key "ia2"
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+ERROR: value for domain js_int_array_2d violates check constraint "js_int_array_2d_check"
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+ ia2d
+----------------------
+ {{1,2,3},{NULL,5,6}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [1, "2", null, 4]}') q;
+ERROR: expected json array in value of key "ia3"
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+ERROR: expected json array in value of key "ia3"
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+ ia3
+-----
+ {}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+ ia3
+-------------------
+ {{{1,2}},{{3,4}}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+ ia3
+-------------------------------
+ {{{1,2},{3,4}},{{5,6},{7,8}}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+ERROR: malformed json array: "[ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": null}') q;
+ ta
+----
+
+(1 row)
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": 123}') q;
+ERROR: expected json array in value of key "ta"
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [1, "2", null, 4]}') q;
+ ta
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+ ta
+--------------------------------
+ {"[1, 2, 3]","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": null}') q;
+ c
+---
+
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaa"}') q;
+ c
+------------
+ aaa
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaa"}') q;
+ c
+------------
+ aaaaaaaaaa
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaaaaa"}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": null}') q;
+ ca
+----
+
+(1 row)
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": 123}') q;
+ERROR: expected json array in value of key "ca"
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [1, "2", null, 4]}') q;
+ ca
+-----------------------------------------------
+ {"1 ","2 ",NULL,"4 "}
+(1 row)
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+ ca
+---------------------------------
+ {"[1, 2, 3] ","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": null}') q;
+ js
+----
+
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": true}') q;
+ js
+------
+ true
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": 123.45}') q;
+ js
+--------
+ 123.45
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "123.45"}') q;
+ js
+----------
+ "123.45"
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "abc"}') q;
+ js
+-------
+ "abc"
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+ js
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ js
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": null}') q;
+ jsb
+-----
+
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": true}') q;
+ jsb
+------
+ true
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": 123.45}') q;
+ jsb
+--------
+ 123.45
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "123.45"}') q;
+ jsb
+----------
+ "123.45"
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "abc"}') q;
+ jsb
+-------
+ "abc"
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+ jsb
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ jsb
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": null}') q;
+ jsa
+-----
+
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": 123}') q;
+ERROR: expected json array in value of key "jsa"
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": [1, "2", null, 4]}') q;
+ jsa
+--------------------
+ {1,"\"2\"",NULL,4}
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+ jsa
+----------------------------------------------------------
+ {"\"aaa\"",NULL,"[1, 2, \"3\", {}]","{ \"k\" : \"v\" }"}
+(1 row)
+
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": 123}') q;
+ERROR: cannot call populate_composite on a scalar
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": [1, 2]}') q;
+ERROR: cannot call populate_composite on an array
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+ rec
+-----------------------------------
+ (abc,,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": 123}') q;
+ERROR: expected json array in value of key "reca"
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [1, 2]}') q;
+ERROR: cannot call populate_composite on a scalar
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+ reca
+--------------------------------------------------------
+ {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT rec FROM json_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jpop,NULL)::jsrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+ rec
+------------------------------------
+ (abc,3,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
-- populate_recordset
select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
@@ -1408,6 +1728,28 @@ select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":[100,200,3
{"z":true} | 3 | Fri Jan 20 10:42:53 2012
(2 rows)
+-- test type info caching in json_populate_record()
+CREATE TEMP TABLE jspoptest (js json);
+INSERT INTO jspoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::json
+FROM generate_series(1, 3);
+SELECT (json_populate_record(NULL::jsrec, js)).* FROM jspoptest;
+ i | ia1 | ia2 | ia3 | ia1d | ia2d | t | ta | c | ca | ts | js | jsb | jsa | rec | reca
+---+-----+-----+-----+------+------+---+----+---+----+----+----+-----+--------------------+-----------------------------------+--------------------------------------------------------
+ | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(3 rows)
+
+DROP TYPE jsrec;
+DROP TYPE jsrec_i_not_null;
+DROP DOMAIN js_int_not_null;
+DROP DOMAIN js_int_array_1d;
+DROP DOMAIN js_int_array_2d;
--json_typeof() function
select value, json_typeof(value)
from (values (json '123.4'),
@@ -1600,11 +1942,11 @@ select * from json_to_recordset('[{"a":1,"b":{"d":"foo"},"c":true},{"a":2,"c":fa
(2 rows)
select *, c is null as c_is_null
-from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::json)
- as t(a int, b json, c text, x int);
- a | b | c | x | c_is_null
----+-----------------+---+---+-----------
- 1 | {"c":16, "d":2} | | 8 | t
+from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::json)
+ as t(a int, b json, c text, x int, ca char(5)[], ia int[][], r jpop);
+ a | b | c | x | ca | ia | r | c_is_null
+---+-----------------+---+---+-------------------+---------------+------------+-----------
+ 1 | {"c":16, "d":2} | | 8 | {"1 2 ","3 "} | {{1,2},{3,4}} | (aaa,123,) | t
(1 row)
select *, c is null as c_is_null
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
index e2cb08a..4c7727e 100644
--- a/src/test/regress/expected/jsonb.out
+++ b/src/test/regress/expected/jsonb.out
@@ -1888,6 +1888,30 @@ SELECT * FROM jsonb_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,
-- populate_record
CREATE TYPE jbpop AS (a text, b int, c timestamp);
+CREATE DOMAIN jsb_int_not_null AS int NOT NULL;
+CREATE DOMAIN jsb_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN jsb_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+CREATE TYPE jsbrec AS (
+ i int,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d jsb_int_array_1d,
+ ia2d jsb_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jbpop,
+ reca jbpop[]
+);
+CREATE TYPE jsbrec_i_not_null AS (
+ i jsb_int_not_null
+);
SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+---
@@ -1926,6 +1950,302 @@ SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"c":[100,200,false],"x":43.2}') q;
ERROR: invalid input syntax for type timestamp: "[100, 200, false]"
+SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop, '{}') q;
+ a | b | c
+---+---+--------------------------
+ x | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"x": 43.2}') q;
+ERROR: domain jsb_int_not_null does not allow null values
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": null}') q;
+ERROR: domain jsb_int_not_null does not allow null values
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": 12345}') q;
+ i
+-------
+ 12345
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": null}') q;
+ ia1
+-----
+
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": 123}') q;
+ERROR: expected json array in value of key "ia1"
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [1, "2", null, 4]}') q;
+ ia1
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [[1, 2, 3]]}') q;
+ERROR: invalid input syntax for integer: "[1, 2, 3]"
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": null}') q;
+ ia1d
+------
+
+(1 row)
+
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": 123}') q;
+ERROR: expected json array in value of key "ia1d"
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null, 4]}') q;
+ERROR: value for domain jsb_int_array_1d violates check constraint "jsb_int_array_1d_check"
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null]}') q;
+ ia1d
+------------
+ {1,2,NULL}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [1, "2", null, 4]}') q;
+ERROR: expected json array in value of key "ia2"
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+ ia2
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[], []]}') q;
+ ia2
+-----
+ {}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [3]]}') q;
+ERROR: malformed json array: "[[1, 2], [3]]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], 3, 4]}') q;
+ERROR: expected json array in value of key "ia2"
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+ERROR: value for domain jsb_int_array_2d violates check constraint "jsb_int_array_2d_check"
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+ ia2d
+----------------------
+ {{1,2,3},{NULL,5,6}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [1, "2", null, 4]}') q;
+ERROR: expected json array in value of key "ia3"
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+ERROR: expected json array in value of key "ia3"
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+ ia3
+-----
+ {}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+ ia3
+-------------------
+ {{{1,2}},{{3,4}}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+ ia3
+-------------------------------
+ {{{1,2},{3,4}},{{5,6},{7,8}}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+ERROR: malformed json array: "[[[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]]]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": null}') q;
+ ta
+----
+
+(1 row)
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": 123}') q;
+ERROR: expected json array in value of key "ta"
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [1, "2", null, 4]}') q;
+ ta
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+ ta
+--------------------------------
+ {"[1, 2, 3]","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": null}') q;
+ c
+---
+
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaa"}') q;
+ c
+------------
+ aaa
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaa"}') q;
+ c
+------------
+ aaaaaaaaaa
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaaaaa"}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": null}') q;
+ ca
+----
+
+(1 row)
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": 123}') q;
+ERROR: expected json array in value of key "ca"
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [1, "2", null, 4]}') q;
+ ca
+-----------------------------------------------
+ {"1 ","2 ",NULL,"4 "}
+(1 row)
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+ ca
+---------------------------------
+ {"[1, 2, 3] ","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": null}') q;
+ js
+----
+
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": true}') q;
+ js
+------
+ true
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": 123.45}') q;
+ js
+--------
+ 123.45
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "123.45"}') q;
+ js
+----------
+ "123.45"
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "abc"}') q;
+ js
+-------
+ "abc"
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+ js
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ js
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": null}') q;
+ jsb
+-----
+
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": true}') q;
+ jsb
+------
+ true
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": 123.45}') q;
+ jsb
+--------
+ 123.45
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "123.45"}') q;
+ jsb
+----------
+ "123.45"
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "abc"}') q;
+ jsb
+-------
+ "abc"
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+ jsb
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ jsb
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": null}') q;
+ jsa
+-----
+
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": 123}') q;
+ERROR: expected json array in value of key "jsa"
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": [1, "2", null, 4]}') q;
+ jsa
+--------------------
+ {1,"\"2\"",NULL,4}
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+ jsa
+-------------------------------------------------------
+ {"\"aaa\"",NULL,"[1, 2, \"3\", {}]","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": 123}') q;
+ERROR: expected json object in value of key "rec"
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": [1, 2]}') q;
+ERROR: expected json object in value of key "rec"
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+ rec
+-----------------------------------
+ (abc,,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": 123}') q;
+ERROR: expected json array in value of key "reca"
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [1, 2]}') q;
+ERROR: expected json object
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+ reca
+--------------------------------------------------------
+ {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jbpop,NULL)::jsbrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+ rec
+------------------------------------
+ (abc,3,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
-- populate_recordset
SELECT * FROM jsonb_populate_recordset(NULL::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
@@ -2002,11 +2322,11 @@ select * from jsonb_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar",
(2 rows)
select *, c is null as c_is_null
-from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::jsonb)
- as t(a int, b jsonb, c text, x int);
- a | b | c | x | c_is_null
----+-------------------+---+---+-----------
- 1 | {"c": 16, "d": 2} | | 8 | t
+from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::jsonb)
+ as t(a int, b jsonb, c text, x int, ca char(5)[], ia int[][], r jbpop);
+ a | b | c | x | ca | ia | r | c_is_null
+---+-------------------+---+---+-------------------+---------------+------------+-----------
+ 1 | {"c": 16, "d": 2} | | 8 | {"1 2 ","3 "} | {{1,2},{3,4}} | (aaa,123,) | t
(1 row)
select *, c is null as c_is_null
@@ -2017,6 +2337,28 @@ from jsonb_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::jsonb)
1 | {"c": 16, "d": 2} | | 8 | t
(1 row)
+-- test type info caching in jsonb_populate_record()
+CREATE TEMP TABLE jsbpoptest (js jsonb);
+INSERT INTO jsbpoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::jsonb
+FROM generate_series(1, 3);
+SELECT (jsonb_populate_record(NULL::jsbrec, js)).* FROM jsbpoptest;
+ i | ia1 | ia2 | ia3 | ia1d | ia2d | t | ta | c | ca | ts | js | jsb | jsa | rec | reca
+---+-----+-----+-----+------+------+---+----+---+----+----+----+-----+--------------------+-----------------------------------+--------------------------------------------------------
+ | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(3 rows)
+
+DROP TYPE jsbrec;
+DROP TYPE jsbrec_i_not_null;
+DROP DOMAIN jsb_int_not_null;
+DROP DOMAIN jsb_int_array_1d;
+DROP DOMAIN jsb_int_array_2d;
-- indexing
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":null}';
count
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index 603288b..965a0e0 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -378,6 +378,33 @@ select * from json_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8
-- populate_record
create type jpop as (a text, b int, c timestamp);
+CREATE DOMAIN js_int_not_null AS int NOT NULL;
+CREATE DOMAIN js_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN js_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+
+CREATE TYPE jsrec AS (
+ i int,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d js_int_array_1d,
+ ia2d js_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jpop,
+ reca jpop[]
+);
+
+CREATE TYPE jsrec_i_not_null AS (
+ i js_int_not_null
+);
+
select * from json_populate_record(null::jpop,'{"a":"blurfl","x":43.2}') q;
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":"blurfl","x":43.2}') q;
@@ -388,6 +415,89 @@ select * from json_populate_record(null::jpop,'{"a":[100,200,false],"x":43.2}')
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":[100,200,false],"x":43.2}') q;
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"c":[100,200,false],"x":43.2}') q;
+select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{}') q;
+
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"x": 43.2}') q;
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": null}') q;
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": 12345}') q;
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": null}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": 123}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [1, "2", null, 4]}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [[1, 2, 3]]}') q;
+
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": null}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": 123}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null, 4]}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null]}') q;
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [1, "2", null, 4]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[], []]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [3]]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], 3, 4]}') q;
+
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [1, "2", null, 4]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": null}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": 123}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [1, "2", null, 4]}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": null}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaa"}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaa"}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaaaaa"}') q;
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": null}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": 123}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [1, "2", null, 4]}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": null}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": true}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": 123.45}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "123.45"}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "abc"}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": null}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": true}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": 123.45}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "123.45"}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "abc"}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": null}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": 123}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": [1, "2", null, 4]}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": 123}') q;
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": [1, 2]}') q;
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": 123}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [1, 2]}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+
+SELECT rec FROM json_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jpop,NULL)::jsrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+
-- populate_recordset
select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
@@ -404,6 +514,25 @@ select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
+-- test type info caching in json_populate_record()
+CREATE TEMP TABLE jspoptest (js json);
+
+INSERT INTO jspoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::json
+FROM generate_series(1, 3);
+
+SELECT (json_populate_record(NULL::jsrec, js)).* FROM jspoptest;
+
+DROP TYPE jsrec;
+DROP TYPE jsrec_i_not_null;
+DROP DOMAIN js_int_not_null;
+DROP DOMAIN js_int_array_1d;
+DROP DOMAIN js_int_array_2d;
+
--json_typeof() function
select value, json_typeof(value)
from (values (json '123.4'),
@@ -520,8 +649,8 @@ select * from json_to_recordset('[{"a":1,"b":{"d":"foo"},"c":true},{"a":2,"c":fa
as x(a int, b json, c boolean);
select *, c is null as c_is_null
-from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::json)
- as t(a int, b json, c text, x int);
+from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::json)
+ as t(a int, b json, c text, x int, ca char(5)[], ia int[][], r jpop);
select *, c is null as c_is_null
from json_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::json)
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
index 6b4c796..4fde752 100644
--- a/src/test/regress/sql/jsonb.sql
+++ b/src/test/regress/sql/jsonb.sql
@@ -478,6 +478,33 @@ SELECT * FROM jsonb_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,
-- populate_record
CREATE TYPE jbpop AS (a text, b int, c timestamp);
+CREATE DOMAIN jsb_int_not_null AS int NOT NULL;
+CREATE DOMAIN jsb_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN jsb_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+
+CREATE TYPE jsbrec AS (
+ i int,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d jsb_int_array_1d,
+ ia2d jsb_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jbpop,
+ reca jbpop[]
+);
+
+CREATE TYPE jsbrec_i_not_null AS (
+ i jsb_int_not_null
+);
+
SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":"blurfl","x":43.2}') q;
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":"blurfl","x":43.2}') q;
@@ -488,6 +515,89 @@ SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":[100,200,false],"x":43.2}'
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":[100,200,false],"x":43.2}') q;
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"c":[100,200,false],"x":43.2}') q;
+SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop, '{}') q;
+
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"x": 43.2}') q;
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": null}') q;
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": 12345}') q;
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": null}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": 123}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [1, "2", null, 4]}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [[1, 2, 3]]}') q;
+
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": null}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": 123}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null, 4]}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null]}') q;
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [1, "2", null, 4]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[], []]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [3]]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], 3, 4]}') q;
+
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [1, "2", null, 4]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": null}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": 123}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [1, "2", null, 4]}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": null}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaa"}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaa"}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaaaaa"}') q;
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": null}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": 123}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [1, "2", null, 4]}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": null}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": true}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": 123.45}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "123.45"}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "abc"}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": null}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": true}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": 123.45}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "123.45"}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "abc"}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": null}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": 123}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": [1, "2", null, 4]}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": 123}') q;
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": [1, 2]}') q;
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": 123}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [1, 2]}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+
+SELECT rec FROM jsonb_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jbpop,NULL)::jsbrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+
-- populate_recordset
SELECT * FROM jsonb_populate_recordset(NULL::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
SELECT * FROM jsonb_populate_recordset(row('def',99,NULL)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
@@ -509,13 +619,32 @@ select * from jsonb_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar",
as x(a int, b text, c boolean);
select *, c is null as c_is_null
-from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::jsonb)
- as t(a int, b jsonb, c text, x int);
+from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::jsonb)
+ as t(a int, b jsonb, c text, x int, ca char(5)[], ia int[][], r jbpop);
select *, c is null as c_is_null
from jsonb_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::jsonb)
as t(a int, b jsonb, c text, x int);
+-- test type info caching in jsonb_populate_record()
+CREATE TEMP TABLE jsbpoptest (js jsonb);
+
+INSERT INTO jsbpoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::jsonb
+FROM generate_series(1, 3);
+
+SELECT (jsonb_populate_record(NULL::jsbrec, js)).* FROM jsbpoptest;
+
+DROP TYPE jsbrec;
+DROP TYPE jsbrec_i_not_null;
+DROP DOMAIN jsb_int_not_null;
+DROP DOMAIN jsb_int_array_1d;
+DROP DOMAIN jsb_int_array_2d;
+
-- indexing
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":null}';
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":"CC"}';
0002_assign_ndims_to_record_function_result_types_v01.patchtext/x-patch; name=0002_assign_ndims_to_record_function_result_types_v01.patchDownload
diff --git a/src/backend/access/common/tupdesc.c b/src/backend/access/common/tupdesc.c
index b56d0e3..60510c3 100644
--- a/src/backend/access/common/tupdesc.c
+++ b/src/backend/access/common/tupdesc.c
@@ -673,7 +673,7 @@ BuildDescForRelation(List *schema)
* BuildDescFromLists
*
* Build a TupleDesc given lists of column names (as String nodes),
- * column type OIDs, typmods, and collation OIDs.
+ * column type OIDs, typmods, collation OIDs and ndims.
*
* No constraints are generated.
*
@@ -681,7 +681,8 @@ BuildDescForRelation(List *schema)
* with functions returning RECORD.
*/
TupleDesc
-BuildDescFromLists(List *names, List *types, List *typmods, List *collations)
+BuildDescFromLists(List *names, List *types, List *typmods, List *collations,
+ List *ndims)
{
int natts;
AttrNumber attnum;
@@ -689,12 +690,14 @@ BuildDescFromLists(List *names, List *types, List *typmods, List *collations)
ListCell *l2;
ListCell *l3;
ListCell *l4;
+ ListCell *l5;
TupleDesc desc;
natts = list_length(names);
Assert(natts == list_length(types));
Assert(natts == list_length(typmods));
Assert(natts == list_length(collations));
+ Assert(natts == list_length(ndims));
/*
* allocate a new tuple descriptor
@@ -706,12 +709,15 @@ BuildDescFromLists(List *names, List *types, List *typmods, List *collations)
l2 = list_head(types);
l3 = list_head(typmods);
l4 = list_head(collations);
+ l5 = list_head(ndims);
+
foreach(l1, names)
{
char *attname = strVal(lfirst(l1));
Oid atttypid;
int32 atttypmod;
Oid attcollation;
+ int32 attndims;
atttypid = lfirst_oid(l2);
l2 = lnext(l2);
@@ -719,10 +725,12 @@ BuildDescFromLists(List *names, List *types, List *typmods, List *collations)
l3 = lnext(l3);
attcollation = lfirst_oid(l4);
l4 = lnext(l4);
+ attndims = lfirst_oid(l5);
+ l5 = lnext(l5);
attnum++;
- TupleDescInitEntry(desc, attnum, attname, atttypid, atttypmod, 0);
+ TupleDescInitEntry(desc, attnum, attname, atttypid, atttypmod, attndims);
TupleDescInitEntryCollation(desc, attnum, attcollation);
}
diff --git a/src/backend/executor/nodeFunctionscan.c b/src/backend/executor/nodeFunctionscan.c
index 5a0f324..6d93cb3 100644
--- a/src/backend/executor/nodeFunctionscan.c
+++ b/src/backend/executor/nodeFunctionscan.c
@@ -410,7 +410,8 @@ ExecInitFunctionScan(FunctionScan *node, EState *estate, int eflags)
tupdesc = BuildDescFromLists(rtfunc->funccolnames,
rtfunc->funccoltypes,
rtfunc->funccoltypmods,
- rtfunc->funccolcollations);
+ rtfunc->funccolcollations,
+ rtfunc->funccolndims);
/*
* For RECORD results, make sure a typmod has been assigned. (The
diff --git a/src/backend/nodes/copyfuncs.c b/src/backend/nodes/copyfuncs.c
index d973225..08f5ce5 100644
--- a/src/backend/nodes/copyfuncs.c
+++ b/src/backend/nodes/copyfuncs.c
@@ -2181,6 +2181,7 @@ _copyRangeTblFunction(const RangeTblFunction *from)
COPY_NODE_FIELD(funccoltypes);
COPY_NODE_FIELD(funccoltypmods);
COPY_NODE_FIELD(funccolcollations);
+ COPY_NODE_FIELD(funccolndims);
COPY_BITMAPSET_FIELD(funcparams);
return newnode;
diff --git a/src/backend/nodes/equalfuncs.c b/src/backend/nodes/equalfuncs.c
index edc1797..d7efd00 100644
--- a/src/backend/nodes/equalfuncs.c
+++ b/src/backend/nodes/equalfuncs.c
@@ -2490,6 +2490,7 @@ _equalRangeTblFunction(const RangeTblFunction *a, const RangeTblFunction *b)
COMPARE_NODE_FIELD(funccoltypes);
COMPARE_NODE_FIELD(funccoltypmods);
COMPARE_NODE_FIELD(funccolcollations);
+ COMPARE_NODE_FIELD(funccolndims);
COMPARE_BITMAPSET_FIELD(funcparams);
return true;
diff --git a/src/backend/nodes/outfuncs.c b/src/backend/nodes/outfuncs.c
index 7258c03..97d4439 100644
--- a/src/backend/nodes/outfuncs.c
+++ b/src/backend/nodes/outfuncs.c
@@ -2880,6 +2880,7 @@ _outRangeTblFunction(StringInfo str, const RangeTblFunction *node)
WRITE_NODE_FIELD(funccoltypes);
WRITE_NODE_FIELD(funccoltypmods);
WRITE_NODE_FIELD(funccolcollations);
+ WRITE_NODE_FIELD(funccolndims);
WRITE_BITMAPSET_FIELD(funcparams);
}
diff --git a/src/backend/nodes/readfuncs.c b/src/backend/nodes/readfuncs.c
index d608530..ac3d141 100644
--- a/src/backend/nodes/readfuncs.c
+++ b/src/backend/nodes/readfuncs.c
@@ -1359,6 +1359,7 @@ _readRangeTblFunction(void)
READ_NODE_FIELD(funccoltypes);
READ_NODE_FIELD(funccoltypmods);
READ_NODE_FIELD(funccolcollations);
+ READ_NODE_FIELD(funccolndims);
READ_BITMAPSET_FIELD(funcparams);
READ_DONE();
diff --git a/src/backend/parser/parse_relation.c b/src/backend/parser/parse_relation.c
index 58f7050..19f5fe2 100644
--- a/src/backend/parser/parse_relation.c
+++ b/src/backend/parser/parse_relation.c
@@ -1444,6 +1444,7 @@ addRangeTableEntryForFunction(ParseState *pstate,
rtfunc->funccoltypes = NIL;
rtfunc->funccoltypmods = NIL;
rtfunc->funccolcollations = NIL;
+ rtfunc->funccolndims = NIL;
rtfunc->funcparams = NULL; /* not set until planning */
/*
@@ -1509,6 +1510,7 @@ addRangeTableEntryForFunction(ParseState *pstate,
Oid attrtype;
int32 attrtypmod;
Oid attrcollation;
+ int32 attrndims;
attrname = n->colname;
if (n->typeName->setof)
@@ -1520,12 +1522,13 @@ addRangeTableEntryForFunction(ParseState *pstate,
typenameTypeIdAndMod(pstate, n->typeName,
&attrtype, &attrtypmod);
attrcollation = GetColumnDefCollation(pstate, n, attrtype);
+ attrndims = list_length(n->typeName->arrayBounds);
TupleDescInitEntry(tupdesc,
(AttrNumber) i,
attrname,
attrtype,
attrtypmod,
- 0);
+ attrndims);
TupleDescInitEntryCollation(tupdesc,
(AttrNumber) i,
attrcollation);
@@ -1537,6 +1540,8 @@ addRangeTableEntryForFunction(ParseState *pstate,
attrtypmod);
rtfunc->funccolcollations = lappend_oid(rtfunc->funccolcollations,
attrcollation);
+ rtfunc->funccolndims = lappend_int(rtfunc->funccolndims,
+ attrndims);
i++;
}
diff --git a/src/include/access/tupdesc.h b/src/include/access/tupdesc.h
index de18f74..9ed44fd 100644
--- a/src/include/access/tupdesc.h
+++ b/src/include/access/tupdesc.h
@@ -125,6 +125,7 @@ extern void TupleDescInitEntryCollation(TupleDesc desc,
extern TupleDesc BuildDescForRelation(List *schema);
-extern TupleDesc BuildDescFromLists(List *names, List *types, List *typmods, List *collations);
+extern TupleDesc BuildDescFromLists(List *names, List *types, List *typmods,
+ List *collations, List *ndims);
#endif /* TUPDESC_H */
diff --git a/src/include/nodes/parsenodes.h b/src/include/nodes/parsenodes.h
index fc532fb..d27db6e 100644
--- a/src/include/nodes/parsenodes.h
+++ b/src/include/nodes/parsenodes.h
@@ -969,7 +969,8 @@ typedef struct RangeTblEntry
* If the function had a column definition list (required for an
* otherwise-unspecified RECORD result), funccolnames lists the names given
* in the definition list, funccoltypes lists their declared column types,
- * funccoltypmods lists their typmods, funccolcollations their collations.
+ * funccoltypmods lists their typmods, funccolcollations their collations,
+ * funccolndims lists their numbers of array dimensions.
* Otherwise, those fields are NIL.
*
* Notice we don't attempt to store info about the results of functions
@@ -989,6 +990,7 @@ typedef struct RangeTblFunction
List *funccoltypes; /* OID list of column type OIDs */
List *funccoltypmods; /* integer list of column typmods */
List *funccolcollations; /* OID list of column collation OIDs */
+ List *funccolndims; /* integer list of column array ndims */
/* This is set during planning for use by the executor: */
Bitmapset *funcparams; /* PARAM_EXEC Param IDs affecting this func */
} RangeTblFunction;
On Tue, Dec 13, 2016 at 9:38 AM, Nikita Glukhov <n.gluhov@postgrespro.ru> wrote:
It also fixes the following errors/inconsistencies caused by lost quoting of
string json values:[master]=# select * from json_to_record('{"js": "a"}') as rec(js json);
ERROR: invalid input syntax for type json
DETAIL: Token "a" is invalid.
CONTEXT: JSON data, line 1: a
The docs mention that this is based on a best effort now. It may be
interesting to improve that.
[master]=# select * from json_to_record('{"js": "true"}') as rec(js json);
js
------
true[patched]=# select * from json_to_record('{"js": "a"}') as rec(js json);
js
-----
"a"
That's indeed valid JSON.
[patched]=# select * from json_to_record('{"js": "true"}') as rec(js json);
js
--------
"true"
And so is that.
The second patch adds assignment of correct ndims to array fields of RECORD
function result types.
Without this patch, attndims in tuple descriptors of RECORD types is always
0 and the corresponding assertion fails in the next test:[patched]=# select json_to_record('{"a": [1, 2, 3]}') as rec(a int[]);
Should I submit these patches to commitfest?
It seems to me that it would be a good idea to add them.
--
Michael
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
Hello.
I've noticed that this patch is on CF and needs a reviewer so I decided
to take a look. Code looks good to me in general, it's well covered by
tests and passes `make check-world`.
However it would be nice to have a little more comments. In my opinion
every procedure have to have at least a short description - what it
does, what arguments it receives and what it returns, even if it's a
static procedure. Same applies for structures and their fields.
Another thing that bothers me is a FIXME comment:
```
tupdesc = lookup_rowtype_tupdesc(type, typmod); /* FIXME cache */
```
I suggest remove it or implement a caching here as planned.
On Tue, Dec 13, 2016 at 10:07:46AM +0900, Michael Paquier wrote:
On Tue, Dec 13, 2016 at 9:38 AM, Nikita Glukhov <n.gluhov@postgrespro.ru> wrote:
It also fixes the following errors/inconsistencies caused by lost quoting of
string json values:[master]=# select * from json_to_record('{"js": "a"}') as rec(js json);
ERROR: invalid input syntax for type json
DETAIL: Token "a" is invalid.
CONTEXT: JSON data, line 1: aThe docs mention that this is based on a best effort now. It may be
interesting to improve that.[master]=# select * from json_to_record('{"js": "true"}') as rec(js json);
js
------
true[patched]=# select * from json_to_record('{"js": "a"}') as rec(js json);
js
-----
"a"That's indeed valid JSON.
[patched]=# select * from json_to_record('{"js": "true"}') as rec(js json);
js
--------
"true"And so is that.
The second patch adds assignment of correct ndims to array fields of RECORD
function result types.
Without this patch, attndims in tuple descriptors of RECORD types is always
0 and the corresponding assertion fails in the next test:[patched]=# select json_to_record('{"a": [1, 2, 3]}') as rec(a int[]);
Should I submit these patches to commitfest?
It seems to me that it would be a good idea to add them.
--
Michael--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
--
Best regards,
Aleksander Alekseev
I've noticed that this patch is on CF and needs a reviewer so I decided
to take a look. Code looks good to me in general, it's well covered by
tests and passes `make check-world`.
Thanks for your review.
However it would be nice to have a little more comments. In my opinion
every procedure have to have at least a short description - what it
does, what arguments it receives and what it returns, even if it's a
static procedure. Same applies for structures and their fields.
I have added some comments for functions and structures in the second
version of this patch.
Another thing that bothers me is a FIXME comment:
```
tupdesc = lookup_rowtype_tupdesc(type, typmod); /* FIXME cache */
```I suggest remove it or implement a caching here as planned.
I have implemented tuple descriptor caching here in populate_composite()
and also in populate_record_worker() (by using populate_composite()
instead of populate_record()). These improvements can speed up bulk
jsonb conversion by 15-20% in the simple test with two nested records.
--
Nikita Glukhov
Postgres Professional: http://www.postgrespro.com
The Russian Postgres Company
Attachments:
0001_recursive_json_populate_record_v02.patchtext/x-patch; name=0001_recursive_json_populate_record_v02.patchDownload
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index 10e3186..55cacfb 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -11249,12 +11249,12 @@ table2-mapping
whose columns match the record type defined by <replaceable>base</>
(see note below).
</entry>
- <entry><literal>select * from json_populate_record(null::myrowtype, '{"a":1,"b":2}')</literal></entry>
+ <entry><literal>select * from json_populate_record(null::myrowtype, '{"a": 1, "b": ["2", "a b"], "c": {"d": 4, "e": "a b c"}}')</literal></entry>
<entry>
<programlisting>
- a | b
----+---
- 1 | 2
+ a | b | c
+---+-----------+-------------
+ 1 | {2,"a b"} | (4,"a b c")
</programlisting>
</entry>
</row>
@@ -11343,12 +11343,12 @@ table2-mapping
explicitly define the structure of the record with an <literal>AS</>
clause.
</entry>
- <entry><literal>select * from json_to_record('{"a":1,"b":[1,2,3],"c":"bar"}') as x(a int, b text, d text) </literal></entry>
+ <entry><literal>select * from json_to_record('{"a":1,"b":[1,2,3],"c":[1,2,3],"e":"bar","r": {"a": 123, "b": "a b c"}}') as x(a int, b text, c int[], d text, r myrowtype) </literal></entry>
<entry>
<programlisting>
- a | b | d
----+---------+---
- 1 | [1,2,3] |
+ a | b | c | d | r
+---+---------+---------+---+---------------
+ 1 | [1,2,3] | {1,2,3} | | (123,"a b c")
</programlisting>
</entry>
</row>
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index 17ee4e4..bb72b8e 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -93,7 +93,7 @@ static void elements_array_element_end(void *state, bool isnull);
static void elements_scalar(void *state, char *token, JsonTokenType tokentype);
/* turn a json object into a hash table */
-static HTAB *get_json_object_as_hash(text *json, const char *funcname);
+static HTAB *get_json_object_as_hash(char *json, int len, const char *funcname);
/* common worker for populate_record and to_record */
static Datum populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
@@ -149,6 +149,33 @@ static void setPathArray(JsonbIterator **it, Datum *path_elems,
int level, Jsonb *newval, uint32 nelems, int op_type);
static void addJsonbToParseState(JsonbParseState **jbps, Jsonb *jb);
+/* helper functions for populate_record[set] */
+typedef struct ColumnIOData ColumnIOData;
+typedef struct RecordIOData RecordIOData;
+
+static HeapTupleHeader
+populate_record(TupleDesc tupdesc,
+ RecordIOData **record_info,
+ HeapTupleHeader template,
+ MemoryContext mcxt,
+ Oid jtype,
+ HTAB *json_hash,
+ JsonbContainer *cont);
+
+static Datum
+populate_record_field(ColumnIOData *col,
+ Oid typid,
+ int32 typmod,
+ int32 ndims,
+ const char *colname,
+ MemoryContext mcxt,
+ Datum defaultval,
+ Oid jtype,
+ char *json,
+ bool json_is_string,
+ JsonbValue *jval,
+ bool *isnull);
+
/* state for json_object_keys */
typedef struct OkeysState
{
@@ -216,6 +243,7 @@ typedef struct JhashState
HTAB *hash;
char *saved_scalar;
char *save_json_start;
+ bool saved_scalar_is_string;
} JHashState;
/* hashtable element */
@@ -223,26 +251,67 @@ typedef struct JsonHashEntry
{
char fname[NAMEDATALEN]; /* hash key (MUST BE FIRST) */
char *val;
- char *json;
bool isnull;
+ bool isstring;
} JsonHashEntry;
-/* these two are stolen from hstore / record_out, used in populate_record* */
-typedef struct ColumnIOData
+/* structure to cache type I/O metadata needed for populate_scalar() */
+typedef struct ScalarIOData
{
- Oid column_type;
- Oid typiofunc;
Oid typioparam;
- FmgrInfo proc;
-} ColumnIOData;
+ FmgrInfo typiofunc;
+} ScalarIOData;
-typedef struct RecordIOData
+/* structure to cache metadata needed for populate_array() */
+typedef struct ArrayIOData
+{
+ Oid element_type; /* array element type id */
+ int32 element_typmod; /* array element type modifier */
+ ColumnIOData *element_info; /* metadata cache */
+ int ndims; /* number of dimensions of array */
+} ArrayIOData;
+
+/* structure to cache metadata needed for populate_composite() */
+typedef struct CompositeIOData
+{
+ /*
+ * We use pointer to a RecordIOData here because variable-length
+ * struct RecordIOData can't be used directly in ColumnIOData.io union
+ */
+ RecordIOData *recordio; /* metadata cache for populate_record() */
+ TupleDesc tupdesc; /* cached tuple descriptor */
+} CompositeIOData;
+
+/* these two are stolen from hstore / record_out, used in populate_record* */
+
+/* structure to cache record metadata needed for populate_record_field() */
+struct ColumnIOData
+{
+ Oid typid; /* column type id */
+ int32 typmod; /* column type modifier */
+ int32 ndims; /* number of array dimensions if column is array */
+ char type_category; /* column type category:
+ * 's' - scalar,
+ * 'a' - array,
+ * 'c' - composite */
+ bool type_is_domain; /* column type is domain */
+ void *domain_info; /* opaque cache for domain checks */
+ union
+ {
+ ScalarIOData scalar;
+ ArrayIOData array;
+ CompositeIOData composite;
+ } io; /* metadata cache for various column type categories */
+};
+
+/* structure to cache record metadata needed for populate_record() */
+struct RecordIOData
{
Oid record_type;
int32 record_typmod;
int ncolumns;
ColumnIOData columns[FLEXIBLE_ARRAY_MEMBER];
-} RecordIOData;
+};
/* state for populate_recordset */
typedef struct PopulateRecordsetState
@@ -252,13 +321,30 @@ typedef struct PopulateRecordsetState
HTAB *json_hash;
char *saved_scalar;
char *save_json_start;
+ bool saved_scalar_is_string;
Tuplestorestate *tuple_store;
TupleDesc ret_tdesc;
HeapTupleHeader rec;
- RecordIOData *my_extra;
+ RecordIOData **my_extra;
MemoryContext fn_mcxt; /* used to stash IO funcs */
} PopulateRecordsetState;
+/* state for populate_array_json() */
+typedef struct PopulateArrayState
+{
+ JsonLexContext *lex; /* json lexer */
+ ArrayBuildState *astate; /* array build state */
+ ArrayIOData *aio; /* metadata cache */
+ MemoryContext mcxt; /* metadata cache memory context */
+ const char *colname; /* column name (for diagnostics only) */
+ int *sizes; /* current dimension size counters */
+ int *dims; /* array dimensions */
+ int ndims; /* number of dimensions in array */
+ char *element_start; /* start of the current array element */
+ char *element_scalar; /* current array element token if it is a scalar */
+ bool element_scalar_is_string; /* is current scalar array element string */
+} PopulateArrayState;
+
/* state for json_strip_nulls */
typedef struct StripnullState
{
@@ -267,10 +353,6 @@ typedef struct StripnullState
bool skip_next_null;
} StripnullState;
-/* Turn a jsonb object into a record */
-static void make_row_from_rec_and_jsonb(Jsonb *element,
- PopulateRecordsetState *state);
-
/*
* SQL function json_object_keys
*
@@ -2098,158 +2180,653 @@ json_to_record(PG_FUNCTION_ARGS)
return populate_record_worker(fcinfo, "json_to_record", false);
}
-static Datum
-populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
- bool have_record_arg)
+/* json object start handler for populate_array_json() */
+static void
+populate_array_object_start(void *_state)
{
- int json_arg_num = have_record_arg ? 1 : 0;
- Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
- text *json;
- Jsonb *jb = NULL;
- HTAB *json_hash = NULL;
- HeapTupleHeader rec = NULL;
- Oid tupType = InvalidOid;
- int32 tupTypmod = -1;
- TupleDesc tupdesc;
- HeapTupleData tuple;
- HeapTuple rettuple;
- RecordIOData *my_extra;
- int ncolumns;
- int i;
- Datum *values;
- bool *nulls;
+ PopulateArrayState *state = (PopulateArrayState *) _state;
- Assert(jtype == JSONOID || jtype == JSONBOID);
+ if (state->lex->lex_level < state->ndims)
+ elog(ERROR,
+ state->colname ? "expected json array in value of key \"%s\""
+ : "expected json array", state->colname);
+}
- if (have_record_arg)
+/* json array start handler for populate_array_json() */
+static void
+populate_array_array_start(void *_state)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ /* reset current array dimension size counter */
+ if (ndim < state->ndims)
+ state->sizes[ndim] = 0;
+}
+
+/* json array end handler for populate_array_json() */
+static void
+populate_array_array_end(void *_state)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ if (ndim < state->ndims)
{
- Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
+ /* check sub-arrays dimensions */
+ int dim = state->sizes[ndim]; /* current dimension counter */
- if (!type_is_rowtype(argtype))
+ if (state->dims[ndim] == -1)
+ state->dims[ndim] = dim; /* assign dimension if it is unknown yet */
+ else if (state->dims[ndim] != dim)
ereport(ERROR,
- (errcode(ERRCODE_DATATYPE_MISMATCH),
- errmsg("first argument of %s must be a row type",
- funcname)));
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("malformed json array: \"%s\"", state->lex->input),
+ errdetail("Multidimensional arrays must have "
+ "sub-arrays with matching dimensions.")));
- if (PG_ARGISNULL(0))
+ /* increment parent dimension counter if it is a nested sub-array */
+ if (ndim > 0)
+ state->sizes[ndim - 1]++;
+ }
+}
+
+/* json array element start handler for populate_array_json() */
+static void
+populate_array_element_start(void *_state, bool isnull)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ if (ndim == state->ndims)
+ {
+ /* remember current array element start */
+ state->element_start = state->lex->token_start;
+ state->element_scalar = NULL;
+ }
+}
+
+/* json array element end handler for populate_array_json() */
+static void
+populate_array_element_end(void *_state, bool isnull)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ if (ndim == state->ndims)
+ {
+ char *str;
+ Datum element;
+ bool element_isnull;
+
+ if (isnull)
+ str = NULL;
+ else if (state->element_scalar)
+ str = state->element_scalar;
+ else
{
- if (PG_ARGISNULL(1))
- PG_RETURN_NULL();
+ size_t len = state->lex->prev_token_terminator -
+ state->element_start;
- /*
- * have no tuple to look at, so the only source of type info is
- * the argtype. The lookup_rowtype_tupdesc call below will error
- * out if we don't have a known composite type oid here.
- */
- tupType = argtype;
- tupTypmod = -1;
+ str = palloc(len + 1);
+ memcpy(str, state->element_start, len);
+ str[len] = 0;
+ }
+
+ /* populate array element */
+ element = populate_record_field(state->aio->element_info,
+ state->aio->element_type,
+ state->aio->element_typmod,
+ 0, NULL, state->mcxt,
+ PointerGetDatum(NULL),
+ JSONOID, str,
+ state->element_scalar &&
+ state->element_scalar_is_string,
+ NULL, &element_isnull);
+
+ accumArrayResult(state->astate, element, element_isnull,
+ state->aio->element_type, NULL);
+
+ Assert(ndim > 0);
+ state->sizes[ndim - 1]++; /* increment current dimension counter */
+ }
+}
+
+/* json scalar handler for populate_array_json() */
+static void
+populate_array_scalar(void *_state, char *token, JsonTokenType tokentype)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+
+ if (state->lex->lex_level < state->ndims)
+ elog(ERROR,
+ state->colname ? "expected json array in value of key \"%s\""
+ : "expected json array", state->colname);
+
+ if (state->lex->lex_level == state->ndims)
+ {
+ /* remeber scalar element token */
+ state->element_scalar = token;
+ state->element_scalar_is_string = tokentype == JSON_TOKEN_STRING;
+ }
+}
+
+/* parse json array and populate array using given ArrayBuildState */
+static void
+populate_array_json(ArrayBuildState *astate, /* array build state */
+ char *json, /* json array */
+ const char *colname, /* for diagnostics only */
+ ArrayIOData *aio, /* metadata cache */
+ MemoryContext mcxt, /* cache memory context */
+ int ndims, /* number of dimensions */
+ int *dims) /* dimensions (output) */
+{
+ PopulateArrayState state;
+ JsonSemAction sem;
+
+ state.lex = makeJsonLexContextCstringLen(json, strlen(json), true);
+ state.astate = astate;
+ state.aio = aio;
+ state.mcxt = mcxt;
+ state.colname = colname;
+ state.dims = dims;
+ state.ndims = ndims;
+ state.sizes = palloc0(sizeof(int) * ndims);
+
+ memset(&sem, 0, sizeof(sem));
+ sem.semstate = (void *) &state;
+ sem.object_start = populate_array_object_start;
+ sem.array_start = populate_array_array_start;
+ sem.array_end = populate_array_array_end;
+ sem.array_element_start = populate_array_element_start;
+ sem.array_element_end = populate_array_element_end;
+ sem.scalar = populate_array_scalar;
+
+ pg_parse_json(state.lex, &sem);
+
+ pfree(state.lex);
+ pfree(state.sizes);
+}
+
+/*
+ * populate_array_dim_jsonb() -- Iterate recursively through jsonb sub-array
+ * elements and accumulate result using given ArrayBuildState.
+ *
+ * This function computes child sub-array dimensions and place it
+ * into dims[ndim..ndims-1].
+ *
+ * Returns computed sub-array dimension (placed by caller into dims[ndim - 1]).
+ */
+static int
+populate_array_dim_jsonb(ArrayBuildState *astate, /* array build state */
+ JsonbValue *jbv, /* jsonb sub-array */
+ const char *colname,/* for diagnostics only */
+ ArrayIOData *aio, /* metadata cache */
+ MemoryContext mcxt, /* cache memory context */
+ int ndims, /* number of dimensions */
+ int ndim, /* curr. dimension number */
+ int *dims) /* dimensions (output) */
+{
+ JsonbContainer *jbc = jbv->val.binary.data;
+ JsonbIterator *it;
+ JsonbIteratorToken tok;
+ JsonbValue val;
+ int size = 0; /* current dimension size counter */
+
+ check_stack_depth();
+
+ if (jbv->type != jbvBinary || !JsonContainerIsArray(jbc))
+ elog(ERROR,
+ colname ? "expected json array in value of key \"%s\""
+ : "expected json array", colname);
+
+ Assert(!JsonContainerIsScalar(jbc));
+
+ it = JsonbIteratorInit(jbc);
+
+ while ((tok = JsonbIteratorNext(&it, &val, true)) != WJB_DONE)
+ {
+ if (tok != WJB_ELEM)
+ continue;
+
+ if (ndim < ndims)
+ {
+ /* populate child sub-array */
+ int dim = populate_array_dim_jsonb(astate, &val, colname, aio,
+ mcxt, ndims, ndim + 1, dims);
+
+ /* check sub-array dimension */
+ if (dims[ndim] == -1)
+ dims[ndim] = dim; /* assign dimension if it is unknown yet */
+ else if (dims[ndim] != dim)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("malformed json array: \"%s\"",
+ JsonbToCString(NULL, jbc, jbv->val.binary.len)),
+ errdetail("Multidimensional arrays must have "
+ "sub-arrays with matching dimensions.")));
}
else
{
- rec = PG_GETARG_HEAPTUPLEHEADER(0);
+ /* populate current array element */
+ bool isnull;
+ Datum elem = populate_record_field(aio->element_info,
+ aio->element_type,
+ aio->element_typmod,
+ 0, NULL, mcxt,
+ PointerGetDatum(NULL),
+ JSONBOID, NULL, false,
+ &val, &isnull);
+
+ accumArrayResult(astate, elem, isnull, aio->element_type, NULL);
+ }
- if (PG_ARGISNULL(1))
- PG_RETURN_POINTER(rec);
+ size++;
+ }
- /* Extract type info from the tuple itself */
- tupType = HeapTupleHeaderGetTypeId(rec);
- tupTypmod = HeapTupleHeaderGetTypMod(rec);
- }
+ return size;
+}
- tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+/* populate recursively json/jsonb array */
+static Datum
+populate_array(ArrayIOData *aio, /* metadata cache */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ Oid jtype, /* json/jsonb */
+ char *json, /* valid only if json */
+ JsonbValue *jbv) /* valid only if jsonb */
+{
+ ArrayBuildState *astate;
+ Datum result;
+ MemoryContext acxt = CurrentMemoryContext;
+ int *dims;
+ int *lbs;
+ int ndims = aio->ndims;
+ int i;
+
+ Assert(ndims > 0);
+
+ dims = palloc(sizeof(int) * ndims);
+ lbs = palloc(sizeof(int) * ndims);
+
+ for (i = 0; i < ndims; i++)
+ {
+ dims[i] = -1; /* uknown yet */
+ lbs[i] = 1;
}
+
+ astate = initArrayResult(aio->element_type, acxt, true);
+
+ if (jtype == JSONOID)
+ populate_array_json(astate, json, colname, aio, mcxt, ndims, dims);
else
+ dims[0] = populate_array_dim_jsonb(astate, jbv, colname, aio, mcxt,
+ ndims, 1, dims);
+
+ result = makeMdArrayResult(astate, ndims, dims, lbs, acxt, true);
+
+ pfree(dims);
+ pfree(lbs);
+
+ return result;
+}
+
+/* populate recursively composite (row type) value from json/jsonb */
+static Datum
+populate_composite(CompositeIOData *io, /* metadata cache */
+ Oid typeid, /* row type id */
+ int32 typmod, /* row type modifier */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ HeapTupleHeader defaultval, /* default row value if any */
+ Oid jtype, /* json/jsonb */
+ char *json, /* valid only if json */
+ int jsonlen, /* valid only if json */
+ JsonbValue *jbv) /* valid only if jsonb only */
+{
+ HeapTupleHeader tuple;
+ HTAB *json_hash = NULL;
+ JsonbContainer *container = NULL;
+
+ /* acquire cached tuple descriptor */
+ if (!io->tupdesc ||
+ io->tupdesc->tdtypeid != typeid ||
+ io->tupdesc->tdtypmod != typmod)
{
- /* json{b}_to_record case */
- if (PG_ARGISNULL(0))
- PG_RETURN_NULL();
+ TupleDesc tupdesc = lookup_rowtype_tupdesc(typeid, typmod);
+ MemoryContext oldcxt;
- if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
- ereport(ERROR,
- (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
- errmsg("function returning record called in context "
- "that cannot accept type record"),
- errhint("Try calling the function in the FROM clause "
- "using a column definition list.")));
+ if (io->tupdesc)
+ FreeTupleDesc(io->tupdesc);
+
+ /* copy tuple desc without constraints into cache memory context */
+ oldcxt = MemoryContextSwitchTo(mcxt);
+ io->tupdesc = CreateTupleDescCopy(tupdesc);
+ MemoryContextSwitchTo(oldcxt);
+
+ ReleaseTupleDesc(tupdesc);
}
+ /* prepare input value */
if (jtype == JSONOID)
{
- /* just get the text */
- json = PG_GETARG_TEXT_P(json_arg_num);
+ /* convert plain-text json into a hash table */
+ json_hash = get_json_object_as_hash(json, jsonlen,
+ "populate_composite");
+ }
+ else
+ {
+ if (jbv->type != jbvBinary ||
+ !JsonContainerIsObject(jbv->val.binary.data))
+ elog(ERROR,
+ colname ? "expected json object in value of key \"%s\""
+ : "expected json object", colname);
+ container = jbv->val.binary.data;
+ }
- json_hash = get_json_object_as_hash(json, funcname);
+ /* populate resulting record tuple */
+ tuple = populate_record(io->tupdesc, &io->recordio, defaultval, mcxt,
+ jtype, json_hash, container);
- /*
- * if the input json is empty, we can only skip the rest if we were
- * passed in a non-null record, since otherwise there may be issues
- * with domain nulls.
- */
- if (hash_get_num_entries(json_hash) == 0 && rec)
+ if (jtype == JSONOID)
+ hash_destroy(json_hash);
+
+ return HeapTupleHeaderGetDatum(tuple);
+}
+
+/* populate non-null scalar value from json/jsonb value */
+static Datum
+populate_scalar(ScalarIOData *io, /* metadata cache */
+ Oid typid, /* scalar type id */
+ int32 typmod, /* scalar type modifier */
+ Oid jtype, /* json/jsonb */
+ char *json, /* valid only if json */
+ bool json_is_string, /* valid only if json */
+ JsonbValue *jbv) /* valid only if jsonb */
+{
+ Datum res;
+ char *str = NULL;
+
+ if (jtype == JSONOID)
+ {
+ /* already done the hard work in the json case */
+ if ((typid == JSONOID || typid == JSONBOID) && json_is_string)
{
- hash_destroy(json_hash);
- ReleaseTupleDesc(tupdesc);
- PG_RETURN_POINTER(rec);
+ /*
+ * Add quotes around string value (should be already escaped)
+ * if converting to json or jsonb.
+ */
+ size_t len = strlen(json);
+
+ str = palloc(len + 3);
+ str[0] = '"';
+ memcpy(&str[1], json, len);
+ str[len + 1] = '"';
+ str[len + 2] = 0;
+ }
+ else
+ {
+ str = json;
}
}
else
{
- jb = PG_GETARG_JSONB(json_arg_num);
-
- /* same logic as for json */
- if (JB_ROOT_COUNT(jb) == 0 && rec)
+ if (typid == JSONBOID)
{
- ReleaseTupleDesc(tupdesc);
- PG_RETURN_POINTER(rec);
+ Jsonb *jsonb = JsonbValueToJsonb(jbv); /* directly use jsonb */
+ return JsonbGetDatum(jsonb);
}
+ /* convert jsonb to string for typio call */
+ else if (typid == JSONOID && jbv->type != jbvBinary)
+ {
+ /*
+ * Convert scalar jsonb (non-scalars are passed here as jbvBinary)
+ * to json string, preserving quotes around top-level strings.
+ */
+ Jsonb *jsonb = JsonbValueToJsonb(jbv);
+ str = JsonbToCString(NULL, &jsonb->root, VARSIZE(jsonb));
+ }
+ else if (jbv->type == jbvString) /* quotes are stripped */
+ str = pnstrdup(jbv->val.string.val, jbv->val.string.len);
+ else if (jbv->type == jbvBool)
+ str = pstrdup(jbv->val.boolean ? "true" : "false");
+ else if (jbv->type == jbvNumeric)
+ str = DatumGetCString(DirectFunctionCall1(numeric_out,
+ PointerGetDatum(jbv->val.numeric)));
+ else if (jbv->type == jbvBinary)
+ str = JsonbToCString(NULL, jbv->val.binary.data,
+ jbv->val.binary.len);
+ else
+ elog(ERROR, "unrecognized jsonb type: %d", (int) jbv->type);
}
- ncolumns = tupdesc->natts;
+ res = InputFunctionCall(&io->typiofunc, str, io->typioparam, typmod);
+
+ /* free temporary buffer */
+ if (str != json)
+ pfree(str);
- if (rec)
+ return res;
+}
+
+/* prepare column metadata cache for the given type */
+static void
+prepare_column_cache(ColumnIOData *column, /* metadata cache */
+ Oid typid, /* column type id */
+ int32 typmod, /* column type modifier */
+ int32 ndims, /* number of array dimensions */
+ MemoryContext mcxt, /* cache memory context */
+ Oid jtype) /* json/jsonb */
+{
+ /* if the type is a domain, get its base type id and type modifier */
+ int32 base_typmod = typmod;
+ Oid base_typid = getBaseTypeAndTypmod(typid, &base_typmod);
+ bool type_is_domain = base_typid != typid;
+
+ if (type_is_rowtype(base_typid))
{
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = rec;
+ column->type_category = 'c'; /* composite */
+ column->io.composite.recordio = NULL;
+ column->io.composite.tupdesc = NULL;
}
+ else
+ {
+ Oid element_type = get_element_type(base_typid);
- /*
- * We arrange to look up the needed I/O info just once per series of
- * calls, assuming the record type doesn't change underneath us.
- */
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- if (my_extra == NULL ||
- my_extra->ncolumns != ncolumns)
+ if (OidIsValid(element_type))
+ {
+ /* type is an array */
+ int32 base_ndims = ndims;
+
+ if (type_is_domain)
+ {
+ Assert(ndims == 0); /* arrays of domains are not supported */
+
+ /* get number of dimensions of domain array */
+ base_ndims = get_type_ndims(typid);
+ }
+
+ Assert(base_ndims > 0);
+
+ column->type_category = 'a'; /* array */
+ column->io.array.element_info =
+ MemoryContextAllocZero(mcxt, sizeof(ColumnIOData));
+ column->io.array.element_type = element_type;
+ /* array element typemod stored in attribute typmod */
+ column->io.array.element_typmod = base_typmod;
+ column->io.array.ndims = base_ndims;
+ }
+ else
+ {
+ /* type is a scalar */
+ column->type_category = 's'; /* scalar */
+
+ /* don't need input function when converting from jsonb to jsonb */
+ if (jtype != JSONBOID || base_typid != JSONBOID)
+ {
+ Oid typioproc;
+
+ getTypeInputInfo(base_typid, &typioproc,
+ &column->io.scalar.typioparam);
+ fmgr_info_cxt(typioproc, &column->io.scalar.typiofunc, mcxt);
+ }
+ }
+ }
+
+ column->typid = typid;
+ column->typmod = typmod;
+ column->ndims = ndims;
+ column->type_is_domain = type_is_domain;
+ column->domain_info = NULL;
+}
+
+/* populate recursively a record field or an array element from json/jsonb value */
+static Datum
+populate_record_field(ColumnIOData *col, /* metadata cache */
+ Oid typid, /* field type id */
+ int32 typmod, /* field type modifier */
+ int32 ndims, /* number of array dimensions */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ Datum defaultval, /* default record value if any */
+ Oid jtype, /* json/jsonb */
+ char *json, /* valid only if json */
+ bool json_is_string, /* valid only if json */
+ JsonbValue *jval, /* valid only if jsonb */
+ bool *isnull) /* value is NULL */
+{
+ Datum res;
+
+ check_stack_depth();
+
+ /* prepare column metadata cache for the given type */
+ if (col->typid != typid || col->typmod != typmod || col->ndims != ndims)
+ prepare_column_cache(col, typid, typmod, ndims, mcxt, jtype);
+
+ if (jtype == JSONOID ? json == NULL : jval == NULL || jval->type == jbvNull)
+ {
+ *isnull = true;
+ res = PointerGetDatum(NULL);
+ }
+ else if (jtype == JSONOID && json_is_string && col->type_category != 's')
+ {
+ /* don't convert a string json value to a non-scalar */
+ elog(ERROR, colname ? "expected json %s in value of key \"%s\""
+ : "expected json %s",
+ col->type_category == 'a' ? "array" : "object", colname);
+ }
+ else
{
- fcinfo->flinfo->fn_extra =
- MemoryContextAlloc(fcinfo->flinfo->fn_mcxt,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- my_extra->record_type = InvalidOid;
- my_extra->record_typmod = 0;
- my_extra->ncolumns = ncolumns;
- MemSet(my_extra->columns, 0, sizeof(ColumnIOData) * ncolumns);
+ *isnull = false;
+
+ switch (col->type_category)
+ {
+ case 's':
+ res = populate_scalar(&col->io.scalar, typid, typmod,
+ jtype, json, json_is_string, jval);
+ break;
+
+ case 'a':
+ res = populate_array(&col->io.array, colname, mcxt,
+ jtype, json, jval);
+ break;
+
+ case 'c':
+ res = populate_composite(&col->io.composite, typid, typmod,
+ colname, mcxt,
+ DatumGetPointer(defaultval)
+ ? DatumGetHeapTupleHeader(defaultval)
+ : NULL,
+ jtype,
+ json,
+ jtype == JSONOID ? strlen(json) : 0,
+ jval);
+ break;
+
+ default:
+ elog(ERROR, "unrecognized type category '%c'",
+ col->type_category);
+ break;
+ }
}
- if (have_record_arg && (my_extra->record_type != tupType ||
- my_extra->record_typmod != tupTypmod))
+ if (col->type_is_domain)
+ domain_check(res, *isnull, col->typid, &col->domain_info, mcxt);
+
+ return res;
+}
+
+static RecordIOData *
+allocate_record_info(MemoryContext mcxt, int ncolumns)
+{
+ RecordIOData *data = (RecordIOData *)
+ MemoryContextAlloc(mcxt,
+ offsetof(RecordIOData, columns) +
+ ncolumns * sizeof(ColumnIOData));
+
+ data->record_type = InvalidOid;
+ data->record_typmod = 0;
+ data->ncolumns = ncolumns;
+ MemSet(data->columns, 0, sizeof(ColumnIOData) * ncolumns);
+
+ return data;
+}
+
+/* populate record tuple from json/jsonb value */
+static HeapTupleHeader
+populate_record(TupleDesc tupdesc, /* record tuple descriptor */
+ RecordIOData **precord, /* metadata cache location */
+ HeapTupleHeader defaultval, /* default record value if any */
+ MemoryContext mcxt, /* cache memory context */
+ Oid jtype, /* json/jsonb */
+ HTAB *json_hash, /* valid only if json */
+ JsonbContainer *cont) /* valid only if jsonb */
+{
+ RecordIOData *record = *precord;
+ Datum *values;
+ bool *nulls;
+ HeapTuple res;
+ int ncolumns = tupdesc->natts;
+ int i;
+
+ /*
+ * if the input json is empty, we can only skip the rest if we were
+ * passed in a non-null record, since otherwise there may be issues
+ * with domain nulls.
+ */
+ if ((jtype == JSONOID ? hash_get_num_entries(json_hash) == 0
+ : JsonContainerSize(cont) == 0) && defaultval)
+ return defaultval;
+
+ /* (re)allocate metadata cache */
+ if (record == NULL ||
+ record->ncolumns != ncolumns)
+ *precord = record = allocate_record_info(mcxt, ncolumns);
+
+ /* invalidate metadata cache if the record type has changed */
+ if (record->record_type != tupdesc->tdtypeid ||
+ record->record_typmod != tupdesc->tdtypmod)
{
- MemSet(my_extra, 0,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra->record_type = tupType;
- my_extra->record_typmod = tupTypmod;
- my_extra->ncolumns = ncolumns;
+ MemSet(record, 0, offsetof(RecordIOData, columns) +
+ ncolumns * sizeof(ColumnIOData));
+ record->record_type = tupdesc->tdtypeid;
+ record->record_typmod = tupdesc->tdtypmod;
+ record->ncolumns = ncolumns;
}
values = (Datum *) palloc(ncolumns * sizeof(Datum));
nulls = (bool *) palloc(ncolumns * sizeof(bool));
- if (rec)
+ if (defaultval)
{
+ HeapTupleData tuple;
+
+ /* Build a temporary HeapTuple control structure */
+ tuple.t_len = HeapTupleHeaderGetDatumLength(defaultval);
+ ItemPointerSetInvalid(&(tuple.t_self));
+ tuple.t_tableOid = InvalidOid;
+ tuple.t_data = defaultval;
+
/* Break down the tuple into fields */
heap_deform_tuple(&tuple, tupdesc, values, nulls);
}
@@ -2264,31 +2841,23 @@ populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
for (i = 0; i < ncolumns; ++i)
{
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- JsonbValue *v = NULL;
- JsonHashEntry *hashentry = NULL;
+ Form_pg_attribute att = tupdesc->attrs[i];
+ char *colname = NameStr(att->attname);
+ JsonbValue *val = NULL;
+ JsonHashEntry *hashentry = NULL;
/* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
+ if (att->attisdropped)
{
nulls[i] = true;
continue;
}
if (jtype == JSONOID)
- {
- hashentry = hash_search(json_hash,
- NameStr(tupdesc->attrs[i]->attname),
- HASH_FIND, NULL);
- }
+ hashentry = hash_search(json_hash, colname, HASH_FIND, NULL);
else
- {
- char *key = NameStr(tupdesc->attrs[i]->attname);
-
- v = findJsonbValueFromContainerLen(&jb->root, JB_FOBJECT, key,
- strlen(key));
- }
+ val = findJsonbValueFromContainerLen(cont, JB_FOBJECT, colname,
+ strlen(colname));
/*
* we can't just skip here if the key wasn't found since we might have
@@ -2298,73 +2867,165 @@ populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
* then every field which we don't populate needs to be run through
* the input function just in case it's a domain type.
*/
- if (((jtype == JSONOID && hashentry == NULL) ||
- (jtype == JSONBOID && v == NULL)) && rec)
+ if ((jtype == JSONOID ? hashentry == NULL : val == NULL) && defaultval)
continue;
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
+ values[i] = populate_record_field(&record->columns[i],
+ att->atttypid,
+ att->atttypmod,
+ att->attndims,
+ colname,
+ mcxt,
+ nulls[i] ? PointerGetDatum(NULL)
+ : values[i],
+ jtype,
+ hashentry && !hashentry->isnull ?
+ hashentry->val : NULL,
+ hashentry && hashentry->isstring,
+ val,
+ &nulls[i]);
+ }
+
+ res = heap_form_tuple(tupdesc, values, nulls);
+
+ pfree(values);
+ pfree(nulls);
+
+ return res->t_data;
+}
+
+/* structure to cache metadata needed for populate_record_worker() */
+typedef struct PopulateRecordCache
+{
+ Oid argtype; /* verified row type of the first argument */
+ CompositeIOData io; /* metadata cache for populate_composite() */
+} PopulateRecordCache;
+
+static Datum
+populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
+ bool have_record_arg)
+{
+ int json_arg_num = have_record_arg ? 1 : 0;
+ Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
+ text *json;
+ Jsonb *jb;
+ JsonbValue jbv;
+ HeapTupleHeader rec = NULL;
+ Oid tupType;
+ int32 tupTypmod;
+ TupleDesc tupdesc = NULL;
+ Datum rettuple;
+ MemoryContext fnmcxt = fcinfo->flinfo->fn_mcxt;
+ PopulateRecordCache *cache = fcinfo->flinfo->fn_extra;
+
+ Assert(jtype == JSONOID || jtype == JSONBOID);
+
+ /*
+ * We arrange to look up the needed I/O info just once per series of
+ * calls, assuming the record type doesn't change underneath us.
+ */
+ if (!cache)
+ fcinfo->flinfo->fn_extra = cache =
+ MemoryContextAllocZero(fnmcxt, sizeof(*cache));
+
+ if (have_record_arg)
+ {
+ Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
+
+ if (cache->argtype != argtype)
{
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- fcinfo->flinfo->fn_mcxt);
- column_info->column_type = column_type;
+ if (!type_is_rowtype(argtype))
+ ereport(ERROR,
+ (errcode(ERRCODE_DATATYPE_MISMATCH),
+ errmsg("first argument of %s must be a row type",
+ funcname)));
+
+ cache->argtype = argtype;
}
- if ((jtype == JSONOID && (hashentry == NULL || hashentry->isnull)) ||
- (jtype == JSONBOID && (v == NULL || v->type == jbvNull)))
+
+ if (PG_ARGISNULL(0))
{
+ if (PG_ARGISNULL(1))
+ PG_RETURN_NULL();
+
/*
- * need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
+ * have no tuple to look at, so the only source of type info is
+ * the argtype. The lookup_rowtype_tupdesc call below will error
+ * out if we don't have a known composite type oid here.
*/
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
+ tupType = argtype;
+ tupTypmod = -1;
}
else
{
- char *s = NULL;
+ rec = PG_GETARG_HEAPTUPLEHEADER(0);
- if (jtype == JSONOID)
- {
- /* already done the hard work in the json case */
- s = hashentry->val;
- }
- else
- {
- if (v->type == jbvString)
- s = pnstrdup(v->val.string.val, v->val.string.len);
- else if (v->type == jbvBool)
- s = pnstrdup((v->val.boolean) ? "t" : "f", 1);
- else if (v->type == jbvNumeric)
- s = DatumGetCString(DirectFunctionCall1(numeric_out,
- PointerGetDatum(v->val.numeric)));
- else if (v->type == jbvBinary)
- s = JsonbToCString(NULL, (JsonbContainer *) v->val.binary.data, v->val.binary.len);
- else
- elog(ERROR, "unrecognized jsonb type: %d", (int) v->type);
- }
+ if (PG_ARGISNULL(1))
+ PG_RETURN_POINTER(rec);
- values[i] = InputFunctionCall(&column_info->proc, s,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
+ /* Extract type info from the tuple itself */
+ tupType = HeapTupleHeaderGetTypeId(rec);
+ tupTypmod = HeapTupleHeaderGetTypMod(rec);
}
}
+ else
+ {
+ /* json{b}_to_record case */
+ if (PG_ARGISNULL(0))
+ PG_RETURN_NULL();
+
+ if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
+ ereport(ERROR,
+ (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
+ errmsg("function returning record called in context "
+ "that cannot accept type record"),
+ errhint("Try calling the function in the FROM clause "
+ "using a column definition list.")));
- rettuple = heap_form_tuple(tupdesc, values, nulls);
+ Assert(tupdesc);
- ReleaseTupleDesc(tupdesc);
+ /*
+ * Add tupdesc to the cache and set the appropriate values of
+ * tupType/tupTypmod for proper cache usage in populate_composite().
+ */
+ cache->io.tupdesc = tupdesc;
- if (json_hash)
- hash_destroy(json_hash);
+ tupType = tupdesc->tdtypeid;
+ tupTypmod = tupdesc->tdtypmod;
+ }
+
+ if (jtype == JSONOID)
+ {
+ json = PG_GETARG_TEXT_P(json_arg_num);
+ }
+ else
+ {
+ jb = PG_GETARG_JSONB(json_arg_num);
+
+ /* fill binary jsonb value pointing to jb */
+ jbv.type = jbvBinary;
+ jbv.val.binary.data = &jb->root;
+ jbv.val.binary.len = VARSIZE(jb) - VARHDRSZ;
+ }
+
+ rettuple = populate_composite(&cache->io,
+ tupType,
+ tupTypmod,
+ NULL,
+ fnmcxt,
+ rec,
+ jtype,
+ jtype == JSONOID ? VARDATA(json) : NULL,
+ jtype == JSONOID ? VARSIZE(json) - VARHDRSZ : 0,
+ jtype == JSONOID ? NULL : &jbv);
+
+ if (tupdesc)
+ {
+ cache->io.tupdesc = NULL;
+ ReleaseTupleDesc(tupdesc);
+ }
- PG_RETURN_DATUM(HeapTupleGetDatum(rettuple));
+ PG_RETURN_DATUM(rettuple);
}
/*
@@ -2373,12 +3034,12 @@ populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
* decompose a json object into a hash table.
*/
static HTAB *
-get_json_object_as_hash(text *json, const char *funcname)
+get_json_object_as_hash(char *json, int len, const char *funcname)
{
HASHCTL ctl;
HTAB *tab;
JHashState *state;
- JsonLexContext *lex = makeJsonLexContext(json, true);
+ JsonLexContext *lex = makeJsonLexContextCstringLen(json, len, true);
JsonSemAction *sem;
memset(&ctl, 0, sizeof(ctl));
@@ -2460,6 +3121,7 @@ hash_object_field_end(void *state, char *fname, bool isnull)
*/
hashentry->isnull = isnull;
+
if (_state->save_json_start != NULL)
{
int len = _state->lex->prev_token_terminator - _state->save_json_start;
@@ -2468,11 +3130,13 @@ hash_object_field_end(void *state, char *fname, bool isnull)
memcpy(val, _state->save_json_start, len);
val[len] = '\0';
hashentry->val = val;
+ hashentry->isstring = false;
}
else
{
/* must have had a scalar instead */
hashentry->val = _state->saved_scalar;
+ hashentry->isstring = _state->saved_scalar_is_string;
}
}
@@ -2498,7 +3162,10 @@ hash_scalar(void *state, char *token, JsonTokenType tokentype)
errmsg("cannot call %s on a scalar", _state->function_name)));
if (_state->lex->lex_level == 1)
+ {
_state->saved_scalar = token;
+ _state->saved_scalar_is_string = tokentype == JSON_TOKEN_STRING;
+ }
}
@@ -2537,121 +3204,24 @@ json_to_recordset(PG_FUNCTION_ARGS)
}
static void
-make_row_from_rec_and_jsonb(Jsonb *element, PopulateRecordsetState *state)
+populate_recordset_record(PopulateRecordsetState *state,
+ Oid jtype, HTAB *json_hash, JsonbContainer *jsonb)
{
- Datum *values;
- bool *nulls;
- int i;
- RecordIOData *my_extra = state->my_extra;
- int ncolumns = my_extra->ncolumns;
- TupleDesc tupdesc = state->ret_tdesc;
- HeapTupleHeader rec = state->rec;
- HeapTuple rettuple;
-
- values = (Datum *) palloc(ncolumns * sizeof(Datum));
- nulls = (bool *) palloc(ncolumns * sizeof(bool));
-
- if (state->rec)
- {
- HeapTupleData tuple;
-
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(state->rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = state->rec;
-
- /* Break down the tuple into fields */
- heap_deform_tuple(&tuple, tupdesc, values, nulls);
- }
- else
- {
- for (i = 0; i < ncolumns; ++i)
- {
- values[i] = (Datum) 0;
- nulls[i] = true;
- }
- }
-
- for (i = 0; i < ncolumns; ++i)
- {
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- JsonbValue *v = NULL;
- char *key;
-
- /* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
- {
- nulls[i] = true;
- continue;
- }
-
- key = NameStr(tupdesc->attrs[i]->attname);
-
- v = findJsonbValueFromContainerLen(&element->root, JB_FOBJECT,
- key, strlen(key));
-
- /*
- * We can't just skip here if the key wasn't found since we might have
- * a domain to deal with. If we were passed in a non-null record
- * datum, we assume that the existing values are valid (if they're
- * not, then it's not our fault), but if we were passed in a null,
- * then every field which we don't populate needs to be run through
- * the input function just in case it's a domain type.
- */
- if (v == NULL && rec)
- continue;
-
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
- {
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- state->fn_mcxt);
- column_info->column_type = column_type;
- }
- if (v == NULL || v->type == jbvNull)
- {
- /*
- * Need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
- */
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
- }
- else
- {
- char *s = NULL;
-
- if (v->type == jbvString)
- s = pnstrdup(v->val.string.val, v->val.string.len);
- else if (v->type == jbvBool)
- s = pnstrdup((v->val.boolean) ? "t" : "f", 1);
- else if (v->type == jbvNumeric)
- s = DatumGetCString(DirectFunctionCall1(numeric_out,
- PointerGetDatum(v->val.numeric)));
- else if (v->type == jbvBinary)
- s = JsonbToCString(NULL, (JsonbContainer *) v->val.binary.data, v->val.binary.len);
- else
- elog(ERROR, "unrecognized jsonb type: %d", (int) v->type);
-
- values[i] = InputFunctionCall(&column_info->proc, s,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
- }
- }
-
- rettuple = heap_form_tuple(tupdesc, values, nulls);
-
- tuplestore_puttuple(state->tuple_store, rettuple);
+ HeapTupleData tuple;
+ HeapTupleHeader tuphead = populate_record(state->ret_tdesc,
+ state->my_extra,
+ state->rec,
+ state->fn_mcxt,
+ jtype,
+ json_hash,
+ jsonb);
+
+ tuple.t_len = HeapTupleHeaderGetDatumLength(tuphead);
+ ItemPointerSetInvalid(&(tuple.t_self));
+ tuple.t_tableOid = InvalidOid;
+ tuple.t_data = tuphead;
+
+ tuplestore_puttuple(state->tuple_store, &tuple);
}
/*
@@ -2665,12 +3235,8 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
ReturnSetInfo *rsi;
MemoryContext old_cxt;
- Oid tupType;
- int32 tupTypmod;
HeapTupleHeader rec;
TupleDesc tupdesc;
- RecordIOData *my_extra;
- int ncolumns;
PopulateRecordsetState *state;
if (have_record_arg)
@@ -2716,38 +3282,6 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
else
rec = PG_GETARG_HEAPTUPLEHEADER(0);
- tupType = tupdesc->tdtypeid;
- tupTypmod = tupdesc->tdtypmod;
- ncolumns = tupdesc->natts;
-
- /*
- * We arrange to look up the needed I/O info just once per series of
- * calls, assuming the record type doesn't change underneath us.
- */
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- if (my_extra == NULL ||
- my_extra->ncolumns != ncolumns)
- {
- fcinfo->flinfo->fn_extra =
- MemoryContextAlloc(fcinfo->flinfo->fn_mcxt,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- my_extra->record_type = InvalidOid;
- my_extra->record_typmod = 0;
- }
-
- if (my_extra->record_type != tupType ||
- my_extra->record_typmod != tupTypmod)
- {
- MemSet(my_extra, 0,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra->record_type = tupType;
- my_extra->record_typmod = tupTypmod;
- my_extra->ncolumns = ncolumns;
- }
-
state = palloc0(sizeof(PopulateRecordsetState));
/* make these in a sufficiently long-lived memory context */
@@ -2760,7 +3294,7 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
MemoryContextSwitchTo(old_cxt);
state->function_name = funcname;
- state->my_extra = my_extra;
+ state->my_extra = (RecordIOData **) &fcinfo->flinfo->fn_extra;
state->rec = rec;
state->fn_mcxt = fcinfo->flinfo->fn_mcxt;
@@ -2811,14 +3345,15 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
if (r == WJB_ELEM)
{
- Jsonb *element = JsonbValueToJsonb(&v);
-
- if (!JB_ROOT_IS_OBJECT(element))
+ if (v.type != jbvBinary &&
+ !(JsonContainerIsObject(v.val.binary.data)))
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("argument of %s must be an array of objects",
funcname)));
- make_row_from_rec_and_jsonb(element, state);
+
+ populate_recordset_record(state, JSONBOID, NULL,
+ v.val.binary.data);
}
}
}
@@ -2862,115 +3397,16 @@ static void
populate_recordset_object_end(void *state)
{
PopulateRecordsetState *_state = (PopulateRecordsetState *) state;
- HTAB *json_hash = _state->json_hash;
- Datum *values;
- bool *nulls;
- int i;
- RecordIOData *my_extra = _state->my_extra;
- int ncolumns = my_extra->ncolumns;
- TupleDesc tupdesc = _state->ret_tdesc;
- JsonHashEntry *hashentry;
- HeapTupleHeader rec = _state->rec;
- HeapTuple rettuple;
/* Nested objects require no special processing */
if (_state->lex->lex_level > 1)
return;
/* Otherwise, construct and return a tuple based on this level-1 object */
- values = (Datum *) palloc(ncolumns * sizeof(Datum));
- nulls = (bool *) palloc(ncolumns * sizeof(bool));
-
- if (_state->rec)
- {
- HeapTupleData tuple;
-
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(_state->rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = _state->rec;
-
- /* Break down the tuple into fields */
- heap_deform_tuple(&tuple, tupdesc, values, nulls);
- }
- else
- {
- for (i = 0; i < ncolumns; ++i)
- {
- values[i] = (Datum) 0;
- nulls[i] = true;
- }
- }
-
- for (i = 0; i < ncolumns; ++i)
- {
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- char *value;
-
- /* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
- {
- nulls[i] = true;
- continue;
- }
-
- hashentry = hash_search(json_hash,
- NameStr(tupdesc->attrs[i]->attname),
- HASH_FIND, NULL);
-
- /*
- * we can't just skip here if the key wasn't found since we might have
- * a domain to deal with. If we were passed in a non-null record
- * datum, we assume that the existing values are valid (if they're
- * not, then it's not our fault), but if we were passed in a null,
- * then every field which we don't populate needs to be run through
- * the input function just in case it's a domain type.
- */
- if (hashentry == NULL && rec)
- continue;
-
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
- {
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- _state->fn_mcxt);
- column_info->column_type = column_type;
- }
- if (hashentry == NULL || hashentry->isnull)
- {
- /*
- * need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
- */
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
- }
- else
- {
- value = hashentry->val;
-
- values[i] = InputFunctionCall(&column_info->proc, value,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
- }
- }
-
- rettuple = heap_form_tuple(tupdesc, values, nulls);
-
- tuplestore_puttuple(_state->tuple_store, rettuple);
+ populate_recordset_record(_state, JSONOID, _state->json_hash, NULL);
/* Done with hash for this object */
- hash_destroy(json_hash);
+ hash_destroy(_state->json_hash);
_state->json_hash = NULL;
}
@@ -3066,11 +3502,13 @@ populate_recordset_object_field_end(void *state, char *fname, bool isnull)
memcpy(val, _state->save_json_start, len);
val[len] = '\0';
hashentry->val = val;
+ hashentry->isstring = false;
}
else
{
/* must have had a scalar instead */
hashentry->val = _state->saved_scalar;
+ hashentry->isstring = _state->saved_scalar_is_string;
}
}
diff --git a/src/backend/utils/cache/lsyscache.c b/src/backend/utils/cache/lsyscache.c
index 13ae6ad..641005b 100644
--- a/src/backend/utils/cache/lsyscache.c
+++ b/src/backend/utils/cache/lsyscache.c
@@ -2492,6 +2492,26 @@ get_array_type(Oid typid)
}
/*
+ * get_type_ndims
+ *
+ * Given the type OID, get the typndims.
+ */
+int32
+get_type_ndims(Oid typid)
+{
+ HeapTuple tp;
+ int32 result = -1;
+
+ tp = SearchSysCache1(TYPEOID, ObjectIdGetDatum(typid));
+ if (HeapTupleIsValid(tp))
+ {
+ result = ((Form_pg_type) GETSTRUCT(tp))->typndims;
+ ReleaseSysCache(tp);
+ }
+ return result;
+}
+
+/*
* get_promoted_array_type
*
* The "promoted" type is what you'd get from an ARRAY(SELECT ...)
diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h
index 470d5b1..850a825 100644
--- a/src/include/utils/jsonb.h
+++ b/src/include/utils/jsonb.h
@@ -218,6 +218,11 @@ typedef struct
#define JB_ROOT_IS_OBJECT(jbp_) ( *(uint32*) VARDATA(jbp_) & JB_FOBJECT)
#define JB_ROOT_IS_ARRAY(jbp_) ( *(uint32*) VARDATA(jbp_) & JB_FARRAY)
+#define JsonContainerSize(jc) ((jc)->header & JB_CMASK)
+#define JsonContainerIsObject(jc) ((jc)->header & JB_FOBJECT)
+#define JsonContainerIsArray(jc) ((jc)->header & JB_FARRAY)
+#define JsonContainerIsScalar(jc) ((jc)->header & JB_FSCALAR)
+
enum jbvType
{
diff --git a/src/include/utils/lsyscache.h b/src/include/utils/lsyscache.h
index dcb8980..d25cac7 100644
--- a/src/include/utils/lsyscache.h
+++ b/src/include/utils/lsyscache.h
@@ -136,6 +136,7 @@ extern Oid get_element_type(Oid typid);
extern Oid get_array_type(Oid typid);
extern Oid get_promoted_array_type(Oid typid);
extern Oid get_base_element_type(Oid typid);
+extern int32 get_type_ndims(Oid typid);
extern void getTypeInputInfo(Oid type, Oid *typInput, Oid *typIOParam);
extern void getTypeOutputInfo(Oid type, Oid *typOutput, bool *typIsVarlena);
extern void getTypeBinaryInputInfo(Oid type, Oid *typReceive, Oid *typIOParam);
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index efcdc41..3edc398 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -1304,6 +1304,30 @@ select * from json_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8
-- populate_record
create type jpop as (a text, b int, c timestamp);
+CREATE DOMAIN js_int_not_null AS int NOT NULL;
+CREATE DOMAIN js_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN js_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+CREATE TYPE jsrec AS (
+ i int,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d js_int_array_1d,
+ ia2d js_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jpop,
+ reca jpop[]
+);
+CREATE TYPE jsrec_i_not_null AS (
+ i js_int_not_null
+);
select * from json_populate_record(null::jpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+---
@@ -1342,6 +1366,302 @@ select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"c":[100,200,false],"x":43.2}') q;
ERROR: invalid input syntax for type timestamp: "[100,200,false]"
+select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{}') q;
+ a | b | c
+---+---+--------------------------
+ x | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"x": 43.2}') q;
+ERROR: domain js_int_not_null does not allow null values
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": null}') q;
+ERROR: domain js_int_not_null does not allow null values
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": 12345}') q;
+ i
+-------
+ 12345
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": null}') q;
+ ia1
+-----
+
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": 123}') q;
+ERROR: expected json array in value of key "ia1"
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [1, "2", null, 4]}') q;
+ ia1
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [[1, 2, 3]]}') q;
+ERROR: invalid input syntax for integer: "[1, 2, 3]"
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": null}') q;
+ ia1d
+------
+
+(1 row)
+
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": 123}') q;
+ERROR: expected json array in value of key "ia1d"
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null, 4]}') q;
+ERROR: value for domain js_int_array_1d violates check constraint "js_int_array_1d_check"
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null]}') q;
+ ia1d
+------------
+ {1,2,NULL}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [1, "2", null, 4]}') q;
+ERROR: expected json array in value of key "ia2"
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+ ia2
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[], []]}') q;
+ ia2
+-----
+ {}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [3]]}') q;
+ERROR: malformed json array: "[[1, 2], [3]]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], 3, 4]}') q;
+ERROR: expected json array in value of key "ia2"
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+ERROR: value for domain js_int_array_2d violates check constraint "js_int_array_2d_check"
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+ ia2d
+----------------------
+ {{1,2,3},{NULL,5,6}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [1, "2", null, 4]}') q;
+ERROR: expected json array in value of key "ia3"
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+ERROR: expected json array in value of key "ia3"
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+ ia3
+-----
+ {}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+ ia3
+-------------------
+ {{{1,2}},{{3,4}}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+ ia3
+-------------------------------
+ {{{1,2},{3,4}},{{5,6},{7,8}}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+ERROR: malformed json array: "[ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": null}') q;
+ ta
+----
+
+(1 row)
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": 123}') q;
+ERROR: expected json array in value of key "ta"
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [1, "2", null, 4]}') q;
+ ta
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+ ta
+--------------------------------
+ {"[1, 2, 3]","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": null}') q;
+ c
+---
+
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaa"}') q;
+ c
+------------
+ aaa
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaa"}') q;
+ c
+------------
+ aaaaaaaaaa
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaaaaa"}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": null}') q;
+ ca
+----
+
+(1 row)
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": 123}') q;
+ERROR: expected json array in value of key "ca"
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [1, "2", null, 4]}') q;
+ ca
+-----------------------------------------------
+ {"1 ","2 ",NULL,"4 "}
+(1 row)
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+ ca
+---------------------------------
+ {"[1, 2, 3] ","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": null}') q;
+ js
+----
+
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": true}') q;
+ js
+------
+ true
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": 123.45}') q;
+ js
+--------
+ 123.45
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "123.45"}') q;
+ js
+----------
+ "123.45"
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "abc"}') q;
+ js
+-------
+ "abc"
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+ js
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ js
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": null}') q;
+ jsb
+-----
+
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": true}') q;
+ jsb
+------
+ true
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": 123.45}') q;
+ jsb
+--------
+ 123.45
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "123.45"}') q;
+ jsb
+----------
+ "123.45"
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "abc"}') q;
+ jsb
+-------
+ "abc"
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+ jsb
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ jsb
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": null}') q;
+ jsa
+-----
+
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": 123}') q;
+ERROR: expected json array in value of key "jsa"
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": [1, "2", null, 4]}') q;
+ jsa
+--------------------
+ {1,"\"2\"",NULL,4}
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+ jsa
+----------------------------------------------------------
+ {"\"aaa\"",NULL,"[1, 2, \"3\", {}]","{ \"k\" : \"v\" }"}
+(1 row)
+
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": 123}') q;
+ERROR: cannot call populate_composite on a scalar
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": [1, 2]}') q;
+ERROR: cannot call populate_composite on an array
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+ rec
+-----------------------------------
+ (abc,,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": 123}') q;
+ERROR: expected json array in value of key "reca"
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [1, 2]}') q;
+ERROR: cannot call populate_composite on a scalar
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+ reca
+--------------------------------------------------------
+ {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT rec FROM json_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jpop,NULL)::jsrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+ rec
+------------------------------------
+ (abc,3,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
-- populate_recordset
select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
@@ -1408,6 +1728,28 @@ select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":[100,200,3
{"z":true} | 3 | Fri Jan 20 10:42:53 2012
(2 rows)
+-- test type info caching in json_populate_record()
+CREATE TEMP TABLE jspoptest (js json);
+INSERT INTO jspoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::json
+FROM generate_series(1, 3);
+SELECT (json_populate_record(NULL::jsrec, js)).* FROM jspoptest;
+ i | ia1 | ia2 | ia3 | ia1d | ia2d | t | ta | c | ca | ts | js | jsb | jsa | rec | reca
+---+-----+-----+-----+------+------+---+----+---+----+----+----+-----+--------------------+-----------------------------------+--------------------------------------------------------
+ | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(3 rows)
+
+DROP TYPE jsrec;
+DROP TYPE jsrec_i_not_null;
+DROP DOMAIN js_int_not_null;
+DROP DOMAIN js_int_array_1d;
+DROP DOMAIN js_int_array_2d;
--json_typeof() function
select value, json_typeof(value)
from (values (json '123.4'),
@@ -1600,11 +1942,11 @@ select * from json_to_recordset('[{"a":1,"b":{"d":"foo"},"c":true},{"a":2,"c":fa
(2 rows)
select *, c is null as c_is_null
-from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::json)
- as t(a int, b json, c text, x int);
- a | b | c | x | c_is_null
----+-----------------+---+---+-----------
- 1 | {"c":16, "d":2} | | 8 | t
+from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::json)
+ as t(a int, b json, c text, x int, ca char(5)[], ia int[][], r jpop);
+ a | b | c | x | ca | ia | r | c_is_null
+---+-----------------+---+---+-------------------+---------------+------------+-----------
+ 1 | {"c":16, "d":2} | | 8 | {"1 2 ","3 "} | {{1,2},{3,4}} | (aaa,123,) | t
(1 row)
select *, c is null as c_is_null
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
index e2cb08a..4c7727e 100644
--- a/src/test/regress/expected/jsonb.out
+++ b/src/test/regress/expected/jsonb.out
@@ -1888,6 +1888,30 @@ SELECT * FROM jsonb_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,
-- populate_record
CREATE TYPE jbpop AS (a text, b int, c timestamp);
+CREATE DOMAIN jsb_int_not_null AS int NOT NULL;
+CREATE DOMAIN jsb_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN jsb_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+CREATE TYPE jsbrec AS (
+ i int,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d jsb_int_array_1d,
+ ia2d jsb_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jbpop,
+ reca jbpop[]
+);
+CREATE TYPE jsbrec_i_not_null AS (
+ i jsb_int_not_null
+);
SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+---
@@ -1926,6 +1950,302 @@ SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"c":[100,200,false],"x":43.2}') q;
ERROR: invalid input syntax for type timestamp: "[100, 200, false]"
+SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop, '{}') q;
+ a | b | c
+---+---+--------------------------
+ x | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"x": 43.2}') q;
+ERROR: domain jsb_int_not_null does not allow null values
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": null}') q;
+ERROR: domain jsb_int_not_null does not allow null values
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": 12345}') q;
+ i
+-------
+ 12345
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": null}') q;
+ ia1
+-----
+
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": 123}') q;
+ERROR: expected json array in value of key "ia1"
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [1, "2", null, 4]}') q;
+ ia1
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [[1, 2, 3]]}') q;
+ERROR: invalid input syntax for integer: "[1, 2, 3]"
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": null}') q;
+ ia1d
+------
+
+(1 row)
+
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": 123}') q;
+ERROR: expected json array in value of key "ia1d"
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null, 4]}') q;
+ERROR: value for domain jsb_int_array_1d violates check constraint "jsb_int_array_1d_check"
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null]}') q;
+ ia1d
+------------
+ {1,2,NULL}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [1, "2", null, 4]}') q;
+ERROR: expected json array in value of key "ia2"
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+ ia2
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[], []]}') q;
+ ia2
+-----
+ {}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [3]]}') q;
+ERROR: malformed json array: "[[1, 2], [3]]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], 3, 4]}') q;
+ERROR: expected json array in value of key "ia2"
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+ERROR: value for domain jsb_int_array_2d violates check constraint "jsb_int_array_2d_check"
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+ ia2d
+----------------------
+ {{1,2,3},{NULL,5,6}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [1, "2", null, 4]}') q;
+ERROR: expected json array in value of key "ia3"
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+ERROR: expected json array in value of key "ia3"
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+ ia3
+-----
+ {}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+ ia3
+-------------------
+ {{{1,2}},{{3,4}}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+ ia3
+-------------------------------
+ {{{1,2},{3,4}},{{5,6},{7,8}}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+ERROR: malformed json array: "[[[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]]]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": null}') q;
+ ta
+----
+
+(1 row)
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": 123}') q;
+ERROR: expected json array in value of key "ta"
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [1, "2", null, 4]}') q;
+ ta
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+ ta
+--------------------------------
+ {"[1, 2, 3]","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": null}') q;
+ c
+---
+
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaa"}') q;
+ c
+------------
+ aaa
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaa"}') q;
+ c
+------------
+ aaaaaaaaaa
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaaaaa"}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": null}') q;
+ ca
+----
+
+(1 row)
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": 123}') q;
+ERROR: expected json array in value of key "ca"
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [1, "2", null, 4]}') q;
+ ca
+-----------------------------------------------
+ {"1 ","2 ",NULL,"4 "}
+(1 row)
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+ ca
+---------------------------------
+ {"[1, 2, 3] ","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": null}') q;
+ js
+----
+
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": true}') q;
+ js
+------
+ true
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": 123.45}') q;
+ js
+--------
+ 123.45
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "123.45"}') q;
+ js
+----------
+ "123.45"
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "abc"}') q;
+ js
+-------
+ "abc"
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+ js
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ js
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": null}') q;
+ jsb
+-----
+
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": true}') q;
+ jsb
+------
+ true
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": 123.45}') q;
+ jsb
+--------
+ 123.45
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "123.45"}') q;
+ jsb
+----------
+ "123.45"
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "abc"}') q;
+ jsb
+-------
+ "abc"
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+ jsb
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ jsb
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": null}') q;
+ jsa
+-----
+
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": 123}') q;
+ERROR: expected json array in value of key "jsa"
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": [1, "2", null, 4]}') q;
+ jsa
+--------------------
+ {1,"\"2\"",NULL,4}
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+ jsa
+-------------------------------------------------------
+ {"\"aaa\"",NULL,"[1, 2, \"3\", {}]","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": 123}') q;
+ERROR: expected json object in value of key "rec"
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": [1, 2]}') q;
+ERROR: expected json object in value of key "rec"
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+ rec
+-----------------------------------
+ (abc,,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": 123}') q;
+ERROR: expected json array in value of key "reca"
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [1, 2]}') q;
+ERROR: expected json object
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+ reca
+--------------------------------------------------------
+ {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jbpop,NULL)::jsbrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+ rec
+------------------------------------
+ (abc,3,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
-- populate_recordset
SELECT * FROM jsonb_populate_recordset(NULL::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
@@ -2002,11 +2322,11 @@ select * from jsonb_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar",
(2 rows)
select *, c is null as c_is_null
-from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::jsonb)
- as t(a int, b jsonb, c text, x int);
- a | b | c | x | c_is_null
----+-------------------+---+---+-----------
- 1 | {"c": 16, "d": 2} | | 8 | t
+from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::jsonb)
+ as t(a int, b jsonb, c text, x int, ca char(5)[], ia int[][], r jbpop);
+ a | b | c | x | ca | ia | r | c_is_null
+---+-------------------+---+---+-------------------+---------------+------------+-----------
+ 1 | {"c": 16, "d": 2} | | 8 | {"1 2 ","3 "} | {{1,2},{3,4}} | (aaa,123,) | t
(1 row)
select *, c is null as c_is_null
@@ -2017,6 +2337,28 @@ from jsonb_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::jsonb)
1 | {"c": 16, "d": 2} | | 8 | t
(1 row)
+-- test type info caching in jsonb_populate_record()
+CREATE TEMP TABLE jsbpoptest (js jsonb);
+INSERT INTO jsbpoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::jsonb
+FROM generate_series(1, 3);
+SELECT (jsonb_populate_record(NULL::jsbrec, js)).* FROM jsbpoptest;
+ i | ia1 | ia2 | ia3 | ia1d | ia2d | t | ta | c | ca | ts | js | jsb | jsa | rec | reca
+---+-----+-----+-----+------+------+---+----+---+----+----+----+-----+--------------------+-----------------------------------+--------------------------------------------------------
+ | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(3 rows)
+
+DROP TYPE jsbrec;
+DROP TYPE jsbrec_i_not_null;
+DROP DOMAIN jsb_int_not_null;
+DROP DOMAIN jsb_int_array_1d;
+DROP DOMAIN jsb_int_array_2d;
-- indexing
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":null}';
count
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index 603288b..965a0e0 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -378,6 +378,33 @@ select * from json_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8
-- populate_record
create type jpop as (a text, b int, c timestamp);
+CREATE DOMAIN js_int_not_null AS int NOT NULL;
+CREATE DOMAIN js_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN js_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+
+CREATE TYPE jsrec AS (
+ i int,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d js_int_array_1d,
+ ia2d js_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jpop,
+ reca jpop[]
+);
+
+CREATE TYPE jsrec_i_not_null AS (
+ i js_int_not_null
+);
+
select * from json_populate_record(null::jpop,'{"a":"blurfl","x":43.2}') q;
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":"blurfl","x":43.2}') q;
@@ -388,6 +415,89 @@ select * from json_populate_record(null::jpop,'{"a":[100,200,false],"x":43.2}')
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":[100,200,false],"x":43.2}') q;
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"c":[100,200,false],"x":43.2}') q;
+select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{}') q;
+
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"x": 43.2}') q;
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": null}') q;
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": 12345}') q;
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": null}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": 123}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [1, "2", null, 4]}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [[1, 2, 3]]}') q;
+
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": null}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": 123}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null, 4]}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null]}') q;
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [1, "2", null, 4]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[], []]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [3]]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], 3, 4]}') q;
+
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [1, "2", null, 4]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": null}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": 123}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [1, "2", null, 4]}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": null}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaa"}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaa"}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaaaaa"}') q;
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": null}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": 123}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [1, "2", null, 4]}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": null}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": true}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": 123.45}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "123.45"}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "abc"}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": null}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": true}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": 123.45}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "123.45"}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "abc"}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": null}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": 123}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": [1, "2", null, 4]}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": 123}') q;
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": [1, 2]}') q;
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": 123}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [1, 2]}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+
+SELECT rec FROM json_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jpop,NULL)::jsrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+
-- populate_recordset
select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
@@ -404,6 +514,25 @@ select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
+-- test type info caching in json_populate_record()
+CREATE TEMP TABLE jspoptest (js json);
+
+INSERT INTO jspoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::json
+FROM generate_series(1, 3);
+
+SELECT (json_populate_record(NULL::jsrec, js)).* FROM jspoptest;
+
+DROP TYPE jsrec;
+DROP TYPE jsrec_i_not_null;
+DROP DOMAIN js_int_not_null;
+DROP DOMAIN js_int_array_1d;
+DROP DOMAIN js_int_array_2d;
+
--json_typeof() function
select value, json_typeof(value)
from (values (json '123.4'),
@@ -520,8 +649,8 @@ select * from json_to_recordset('[{"a":1,"b":{"d":"foo"},"c":true},{"a":2,"c":fa
as x(a int, b json, c boolean);
select *, c is null as c_is_null
-from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::json)
- as t(a int, b json, c text, x int);
+from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::json)
+ as t(a int, b json, c text, x int, ca char(5)[], ia int[][], r jpop);
select *, c is null as c_is_null
from json_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::json)
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
index 6b4c796..4fde752 100644
--- a/src/test/regress/sql/jsonb.sql
+++ b/src/test/regress/sql/jsonb.sql
@@ -478,6 +478,33 @@ SELECT * FROM jsonb_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,
-- populate_record
CREATE TYPE jbpop AS (a text, b int, c timestamp);
+CREATE DOMAIN jsb_int_not_null AS int NOT NULL;
+CREATE DOMAIN jsb_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN jsb_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+
+CREATE TYPE jsbrec AS (
+ i int,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d jsb_int_array_1d,
+ ia2d jsb_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jbpop,
+ reca jbpop[]
+);
+
+CREATE TYPE jsbrec_i_not_null AS (
+ i jsb_int_not_null
+);
+
SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":"blurfl","x":43.2}') q;
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":"blurfl","x":43.2}') q;
@@ -488,6 +515,89 @@ SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":[100,200,false],"x":43.2}'
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":[100,200,false],"x":43.2}') q;
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"c":[100,200,false],"x":43.2}') q;
+SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop, '{}') q;
+
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"x": 43.2}') q;
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": null}') q;
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": 12345}') q;
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": null}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": 123}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [1, "2", null, 4]}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [[1, 2, 3]]}') q;
+
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": null}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": 123}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null, 4]}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null]}') q;
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [1, "2", null, 4]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[], []]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [3]]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], 3, 4]}') q;
+
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [1, "2", null, 4]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": null}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": 123}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [1, "2", null, 4]}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": null}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaa"}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaa"}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaaaaa"}') q;
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": null}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": 123}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [1, "2", null, 4]}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": null}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": true}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": 123.45}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "123.45"}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "abc"}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": null}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": true}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": 123.45}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "123.45"}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "abc"}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": null}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": 123}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": [1, "2", null, 4]}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": 123}') q;
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": [1, 2]}') q;
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": 123}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [1, 2]}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+
+SELECT rec FROM jsonb_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jbpop,NULL)::jsbrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+
-- populate_recordset
SELECT * FROM jsonb_populate_recordset(NULL::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
SELECT * FROM jsonb_populate_recordset(row('def',99,NULL)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
@@ -509,13 +619,32 @@ select * from jsonb_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar",
as x(a int, b text, c boolean);
select *, c is null as c_is_null
-from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::jsonb)
- as t(a int, b jsonb, c text, x int);
+from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::jsonb)
+ as t(a int, b jsonb, c text, x int, ca char(5)[], ia int[][], r jbpop);
select *, c is null as c_is_null
from jsonb_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::jsonb)
as t(a int, b jsonb, c text, x int);
+-- test type info caching in jsonb_populate_record()
+CREATE TEMP TABLE jsbpoptest (js jsonb);
+
+INSERT INTO jsbpoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::jsonb
+FROM generate_series(1, 3);
+
+SELECT (jsonb_populate_record(NULL::jsbrec, js)).* FROM jsbpoptest;
+
+DROP TYPE jsbrec;
+DROP TYPE jsbrec_i_not_null;
+DROP DOMAIN jsb_int_not_null;
+DROP DOMAIN jsb_int_array_1d;
+DROP DOMAIN jsb_int_array_2d;
+
-- indexing
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":null}';
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":"CC"}';
0002_assign_ndims_to_record_function_result_types_v02.patchtext/x-patch; name=0002_assign_ndims_to_record_function_result_types_v02.patchDownload
diff --git a/src/backend/access/common/tupdesc.c b/src/backend/access/common/tupdesc.c
index b56d0e3..60510c3 100644
--- a/src/backend/access/common/tupdesc.c
+++ b/src/backend/access/common/tupdesc.c
@@ -673,7 +673,7 @@ BuildDescForRelation(List *schema)
* BuildDescFromLists
*
* Build a TupleDesc given lists of column names (as String nodes),
- * column type OIDs, typmods, and collation OIDs.
+ * column type OIDs, typmods, collation OIDs and ndims.
*
* No constraints are generated.
*
@@ -681,7 +681,8 @@ BuildDescForRelation(List *schema)
* with functions returning RECORD.
*/
TupleDesc
-BuildDescFromLists(List *names, List *types, List *typmods, List *collations)
+BuildDescFromLists(List *names, List *types, List *typmods, List *collations,
+ List *ndims)
{
int natts;
AttrNumber attnum;
@@ -689,12 +690,14 @@ BuildDescFromLists(List *names, List *types, List *typmods, List *collations)
ListCell *l2;
ListCell *l3;
ListCell *l4;
+ ListCell *l5;
TupleDesc desc;
natts = list_length(names);
Assert(natts == list_length(types));
Assert(natts == list_length(typmods));
Assert(natts == list_length(collations));
+ Assert(natts == list_length(ndims));
/*
* allocate a new tuple descriptor
@@ -706,12 +709,15 @@ BuildDescFromLists(List *names, List *types, List *typmods, List *collations)
l2 = list_head(types);
l3 = list_head(typmods);
l4 = list_head(collations);
+ l5 = list_head(ndims);
+
foreach(l1, names)
{
char *attname = strVal(lfirst(l1));
Oid atttypid;
int32 atttypmod;
Oid attcollation;
+ int32 attndims;
atttypid = lfirst_oid(l2);
l2 = lnext(l2);
@@ -719,10 +725,12 @@ BuildDescFromLists(List *names, List *types, List *typmods, List *collations)
l3 = lnext(l3);
attcollation = lfirst_oid(l4);
l4 = lnext(l4);
+ attndims = lfirst_oid(l5);
+ l5 = lnext(l5);
attnum++;
- TupleDescInitEntry(desc, attnum, attname, atttypid, atttypmod, 0);
+ TupleDescInitEntry(desc, attnum, attname, atttypid, atttypmod, attndims);
TupleDescInitEntryCollation(desc, attnum, attcollation);
}
diff --git a/src/backend/executor/nodeFunctionscan.c b/src/backend/executor/nodeFunctionscan.c
index 5a0f324..6d93cb3 100644
--- a/src/backend/executor/nodeFunctionscan.c
+++ b/src/backend/executor/nodeFunctionscan.c
@@ -410,7 +410,8 @@ ExecInitFunctionScan(FunctionScan *node, EState *estate, int eflags)
tupdesc = BuildDescFromLists(rtfunc->funccolnames,
rtfunc->funccoltypes,
rtfunc->funccoltypmods,
- rtfunc->funccolcollations);
+ rtfunc->funccolcollations,
+ rtfunc->funccolndims);
/*
* For RECORD results, make sure a typmod has been assigned. (The
diff --git a/src/backend/nodes/copyfuncs.c b/src/backend/nodes/copyfuncs.c
index 6955298..6ca0c91 100644
--- a/src/backend/nodes/copyfuncs.c
+++ b/src/backend/nodes/copyfuncs.c
@@ -2181,6 +2181,7 @@ _copyRangeTblFunction(const RangeTblFunction *from)
COPY_NODE_FIELD(funccoltypes);
COPY_NODE_FIELD(funccoltypmods);
COPY_NODE_FIELD(funccolcollations);
+ COPY_NODE_FIELD(funccolndims);
COPY_BITMAPSET_FIELD(funcparams);
return newnode;
diff --git a/src/backend/nodes/equalfuncs.c b/src/backend/nodes/equalfuncs.c
index 548a2aa..b0e0851 100644
--- a/src/backend/nodes/equalfuncs.c
+++ b/src/backend/nodes/equalfuncs.c
@@ -2490,6 +2490,7 @@ _equalRangeTblFunction(const RangeTblFunction *a, const RangeTblFunction *b)
COMPARE_NODE_FIELD(funccoltypes);
COMPARE_NODE_FIELD(funccoltypmods);
COMPARE_NODE_FIELD(funccolcollations);
+ COMPARE_NODE_FIELD(funccolndims);
COMPARE_BITMAPSET_FIELD(funcparams);
return true;
diff --git a/src/backend/nodes/outfuncs.c b/src/backend/nodes/outfuncs.c
index 9fe9873..3b9439c 100644
--- a/src/backend/nodes/outfuncs.c
+++ b/src/backend/nodes/outfuncs.c
@@ -2880,6 +2880,7 @@ _outRangeTblFunction(StringInfo str, const RangeTblFunction *node)
WRITE_NODE_FIELD(funccoltypes);
WRITE_NODE_FIELD(funccoltypmods);
WRITE_NODE_FIELD(funccolcollations);
+ WRITE_NODE_FIELD(funccolndims);
WRITE_BITMAPSET_FIELD(funcparams);
}
diff --git a/src/backend/nodes/readfuncs.c b/src/backend/nodes/readfuncs.c
index 63f6336..4525298 100644
--- a/src/backend/nodes/readfuncs.c
+++ b/src/backend/nodes/readfuncs.c
@@ -1359,6 +1359,7 @@ _readRangeTblFunction(void)
READ_NODE_FIELD(funccoltypes);
READ_NODE_FIELD(funccoltypmods);
READ_NODE_FIELD(funccolcollations);
+ READ_NODE_FIELD(funccolndims);
READ_BITMAPSET_FIELD(funcparams);
READ_DONE();
diff --git a/src/backend/parser/parse_relation.c b/src/backend/parser/parse_relation.c
index 58f7050..19f5fe2 100644
--- a/src/backend/parser/parse_relation.c
+++ b/src/backend/parser/parse_relation.c
@@ -1444,6 +1444,7 @@ addRangeTableEntryForFunction(ParseState *pstate,
rtfunc->funccoltypes = NIL;
rtfunc->funccoltypmods = NIL;
rtfunc->funccolcollations = NIL;
+ rtfunc->funccolndims = NIL;
rtfunc->funcparams = NULL; /* not set until planning */
/*
@@ -1509,6 +1510,7 @@ addRangeTableEntryForFunction(ParseState *pstate,
Oid attrtype;
int32 attrtypmod;
Oid attrcollation;
+ int32 attrndims;
attrname = n->colname;
if (n->typeName->setof)
@@ -1520,12 +1522,13 @@ addRangeTableEntryForFunction(ParseState *pstate,
typenameTypeIdAndMod(pstate, n->typeName,
&attrtype, &attrtypmod);
attrcollation = GetColumnDefCollation(pstate, n, attrtype);
+ attrndims = list_length(n->typeName->arrayBounds);
TupleDescInitEntry(tupdesc,
(AttrNumber) i,
attrname,
attrtype,
attrtypmod,
- 0);
+ attrndims);
TupleDescInitEntryCollation(tupdesc,
(AttrNumber) i,
attrcollation);
@@ -1537,6 +1540,8 @@ addRangeTableEntryForFunction(ParseState *pstate,
attrtypmod);
rtfunc->funccolcollations = lappend_oid(rtfunc->funccolcollations,
attrcollation);
+ rtfunc->funccolndims = lappend_int(rtfunc->funccolndims,
+ attrndims);
i++;
}
diff --git a/src/include/access/tupdesc.h b/src/include/access/tupdesc.h
index de18f74..9ed44fd 100644
--- a/src/include/access/tupdesc.h
+++ b/src/include/access/tupdesc.h
@@ -125,6 +125,7 @@ extern void TupleDescInitEntryCollation(TupleDesc desc,
extern TupleDesc BuildDescForRelation(List *schema);
-extern TupleDesc BuildDescFromLists(List *names, List *types, List *typmods, List *collations);
+extern TupleDesc BuildDescFromLists(List *names, List *types, List *typmods,
+ List *collations, List *ndims);
#endif /* TUPDESC_H */
diff --git a/src/include/nodes/parsenodes.h b/src/include/nodes/parsenodes.h
index fc532fb..d27db6e 100644
--- a/src/include/nodes/parsenodes.h
+++ b/src/include/nodes/parsenodes.h
@@ -969,7 +969,8 @@ typedef struct RangeTblEntry
* If the function had a column definition list (required for an
* otherwise-unspecified RECORD result), funccolnames lists the names given
* in the definition list, funccoltypes lists their declared column types,
- * funccoltypmods lists their typmods, funccolcollations their collations.
+ * funccoltypmods lists their typmods, funccolcollations their collations,
+ * funccolndims lists their numbers of array dimensions.
* Otherwise, those fields are NIL.
*
* Notice we don't attempt to store info about the results of functions
@@ -989,6 +990,7 @@ typedef struct RangeTblFunction
List *funccoltypes; /* OID list of column type OIDs */
List *funccoltypmods; /* integer list of column typmods */
List *funccolcollations; /* OID list of column collation OIDs */
+ List *funccolndims; /* integer list of column array ndims */
/* This is set during planning for use by the executor: */
Bitmapset *funcparams; /* PARAM_EXEC Param IDs affecting this func */
} RangeTblFunction;
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
[ 0001_recursive_json_populate_record_v02.patch ]
[ 0002_assign_ndims_to_record_function_result_types_v02.patch ]
I do not see the point of the second one of these, and it adds no test
case showing why it would be needed. The example you quoted at the
start of the thread doesn't fail for me in HEAD, so I surmise that
it's falling foul of some assertion you added in the 0001 patch, but
if so I think that assertion is wrong. attndims is really syntactic
sugar only and doesn't affect anything meaningful semantically. Here
is an example showing why it shouldn't:
regression=# create table foo (d0 _int4, d1 int[], d2 int[3][4]);
CREATE TABLE
regression=# select attname,atttypid,attndims from pg_attribute where attrelid = 'foo'::regclass and attnum > 0;
attname | atttypid | attndims
---------+----------+----------
d0 | 1007 | 0
d1 | 1007 | 1
d2 | 1007 | 2
(3 rows)
Columns d0,d1,d2 are really all of the same type, and any code that
treats d0 and d1 differently is certainly broken.
So there should be no need to track a particular attndims for an output
column of a function result, and in most cases it's not clear to me where
you would get an attndims value from anyway.
regards, tom lane
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On 01/08/2017 09:52 PM, Tom Lane wrote:
The example you quoted at the start of the thread doesn't fail for me
in HEAD, so I surmise that it's falling foul of some assertion you added
in the 0001 patch, but if so I think that assertion is wrong. attndims
is really syntactic sugar only and doesn't affect anything meaningful
semantically. Here is an example showing why it shouldn't:regression=# create table foo (d0 _int4, d1 int[], d2 int[3][4]);
CREATE TABLE
regression=# select attname,atttypid,attndims from pg_attribute where attrelid = 'foo'::regclass and attnum > 0;
attname | atttypid | attndims
---------+----------+----------
d0 | 1007 | 0
d1 | 1007 | 1
d2 | 1007 | 2
(3 rows)Columns d0,d1,d2 are really all of the same type, and any code that
treats d0 and d1 differently is certainly broken.
Thank you for this example with raw _int4 type. I didn't expect that
attndims can legally be zero. There was really wrong assertion "ndims > 0"
that was necessary because I used attndims for verification of a
number of dimensions of a populated json array.
I have fixed the first patch: when the number of dimensionsis unknown
we determine it simply by the number of successive opening brackets at
the start of a json array. But I'm still using for verification non-zero
ndims values that can be get from composite type attribute (attndims) or
from domain array type (typndims) through specially added function
get_type_ndims().
On 01/08/2017 09:52 PM, Tom Lane wrote:
I do not see the point of the second one of these, and it adds no test
case showing why it would be needed.
I also have added special test cases for json_to_record() showing difference
in behavior that the second patch brings in (see changes in json.out and
jsonb.out).
--
Nikita Glukhov
Postgres Professional: http://www.postgrespro.com
The Russian Postgres Company
Attachments:
0001_recursive_json_populate_record_v03.patchtext/x-patch; name=0001_recursive_json_populate_record_v03.patchDownload
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index 10e3186..55cacfb 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -11249,12 +11249,12 @@ table2-mapping
whose columns match the record type defined by <replaceable>base</>
(see note below).
</entry>
- <entry><literal>select * from json_populate_record(null::myrowtype, '{"a":1,"b":2}')</literal></entry>
+ <entry><literal>select * from json_populate_record(null::myrowtype, '{"a": 1, "b": ["2", "a b"], "c": {"d": 4, "e": "a b c"}}')</literal></entry>
<entry>
<programlisting>
- a | b
----+---
- 1 | 2
+ a | b | c
+---+-----------+-------------
+ 1 | {2,"a b"} | (4,"a b c")
</programlisting>
</entry>
</row>
@@ -11343,12 +11343,12 @@ table2-mapping
explicitly define the structure of the record with an <literal>AS</>
clause.
</entry>
- <entry><literal>select * from json_to_record('{"a":1,"b":[1,2,3],"c":"bar"}') as x(a int, b text, d text) </literal></entry>
+ <entry><literal>select * from json_to_record('{"a":1,"b":[1,2,3],"c":[1,2,3],"e":"bar","r": {"a": 123, "b": "a b c"}}') as x(a int, b text, c int[], d text, r myrowtype) </literal></entry>
<entry>
<programlisting>
- a | b | d
----+---------+---
- 1 | [1,2,3] |
+ a | b | c | d | r
+---+---------+---------+---+---------------
+ 1 | [1,2,3] | {1,2,3} | | (123,"a b c")
</programlisting>
</entry>
</row>
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index 17ee4e4..04959cb 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -93,7 +93,7 @@ static void elements_array_element_end(void *state, bool isnull);
static void elements_scalar(void *state, char *token, JsonTokenType tokentype);
/* turn a json object into a hash table */
-static HTAB *get_json_object_as_hash(text *json, const char *funcname);
+static HTAB *get_json_object_as_hash(char *json, int len, const char *funcname);
/* common worker for populate_record and to_record */
static Datum populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
@@ -149,6 +149,33 @@ static void setPathArray(JsonbIterator **it, Datum *path_elems,
int level, Jsonb *newval, uint32 nelems, int op_type);
static void addJsonbToParseState(JsonbParseState **jbps, Jsonb *jb);
+/* helper functions for populate_record[set] */
+typedef struct ColumnIOData ColumnIOData;
+typedef struct RecordIOData RecordIOData;
+
+static HeapTupleHeader
+populate_record(TupleDesc tupdesc,
+ RecordIOData **record_info,
+ HeapTupleHeader template,
+ MemoryContext mcxt,
+ Oid jtype,
+ HTAB *json_hash,
+ JsonbContainer *cont);
+
+static Datum
+populate_record_field(ColumnIOData *col,
+ Oid typid,
+ int32 typmod,
+ int32 ndims,
+ const char *colname,
+ MemoryContext mcxt,
+ Datum defaultval,
+ Oid jtype,
+ char *json,
+ bool json_is_string,
+ JsonbValue *jval,
+ bool *isnull);
+
/* state for json_object_keys */
typedef struct OkeysState
{
@@ -216,6 +243,7 @@ typedef struct JhashState
HTAB *hash;
char *saved_scalar;
char *save_json_start;
+ bool saved_scalar_is_string;
} JHashState;
/* hashtable element */
@@ -223,26 +251,67 @@ typedef struct JsonHashEntry
{
char fname[NAMEDATALEN]; /* hash key (MUST BE FIRST) */
char *val;
- char *json;
bool isnull;
+ bool isstring;
} JsonHashEntry;
-/* these two are stolen from hstore / record_out, used in populate_record* */
-typedef struct ColumnIOData
+/* structure to cache type I/O metadata needed for populate_scalar() */
+typedef struct ScalarIOData
{
- Oid column_type;
- Oid typiofunc;
Oid typioparam;
- FmgrInfo proc;
-} ColumnIOData;
+ FmgrInfo typiofunc;
+} ScalarIOData;
+
+/* structure to cache metadata needed for populate_array() */
+typedef struct ArrayIOData
+{
+ Oid element_type; /* array element type id */
+ int32 element_typmod; /* array element type modifier */
+ ColumnIOData *element_info; /* metadata cache */
+ int ndims; /* number of dimensions of array */
+} ArrayIOData;
+
+/* structure to cache metadata needed for populate_composite() */
+typedef struct CompositeIOData
+{
+ /*
+ * We use pointer to a RecordIOData here because variable-length
+ * struct RecordIOData can't be used directly in ColumnIOData.io union
+ */
+ RecordIOData *recordio; /* metadata cache for populate_record() */
+ TupleDesc tupdesc; /* cached tuple descriptor */
+} CompositeIOData;
+
+/* these two are stolen from hstore / record_out, used in populate_record* */
-typedef struct RecordIOData
+/* structure to cache record metadata needed for populate_record_field() */
+struct ColumnIOData
+{
+ Oid typid; /* column type id */
+ int32 typmod; /* column type modifier */
+ int32 ndims; /* number of array dimensions if column is array */
+ char type_category; /* column type category:
+ * 's' - scalar,
+ * 'a' - array,
+ * 'c' - composite */
+ bool type_is_domain; /* column type is domain */
+ void *domain_info; /* opaque cache for domain checks */
+ union
+ {
+ ScalarIOData scalar;
+ ArrayIOData array;
+ CompositeIOData composite;
+ } io; /* metadata cache for various column type categories */
+};
+
+/* structure to cache record metadata needed for populate_record() */
+struct RecordIOData
{
Oid record_type;
int32 record_typmod;
int ncolumns;
ColumnIOData columns[FLEXIBLE_ARRAY_MEMBER];
-} RecordIOData;
+};
/* state for populate_recordset */
typedef struct PopulateRecordsetState
@@ -252,13 +321,37 @@ typedef struct PopulateRecordsetState
HTAB *json_hash;
char *saved_scalar;
char *save_json_start;
+ bool saved_scalar_is_string;
Tuplestorestate *tuple_store;
TupleDesc ret_tdesc;
HeapTupleHeader rec;
- RecordIOData *my_extra;
+ RecordIOData **my_extra;
MemoryContext fn_mcxt; /* used to stash IO funcs */
} PopulateRecordsetState;
+/* common data for populate_array_json() and populate_array_dim_jsonb() */
+typedef struct PopulateArrayContext
+{
+ ArrayBuildState *astate; /* array build state */
+ ArrayIOData *aio; /* metadata cache */
+ MemoryContext acxt; /* array build memory context */
+ MemoryContext mcxt; /* cache memory context */
+ const char *colname; /* for diagnostics only */
+ int *dims; /* dimensions */
+ int *sizes; /* current dimension counters */
+ int ndims; /* number of dimensions */
+} PopulateArrayContext;
+
+/* state for populate_array_json() */
+typedef struct PopulateArrayState
+{
+ JsonLexContext *lex; /* json lexer */
+ PopulateArrayContext *ctx; /* context */
+ char *element_start; /* start of the current array element */
+ char *element_scalar; /* current array element token if it is a scalar */
+ bool element_scalar_is_string; /* is current scalar array element string */
+} PopulateArrayState;
+
/* state for json_strip_nulls */
typedef struct StripnullState
{
@@ -267,10 +360,6 @@ typedef struct StripnullState
bool skip_next_null;
} StripnullState;
-/* Turn a jsonb object into a record */
-static void make_row_from_rec_and_jsonb(Jsonb *element,
- PopulateRecordsetState *state);
-
/*
* SQL function json_object_keys
*
@@ -2098,273 +2187,912 @@ json_to_record(PG_FUNCTION_ARGS)
return populate_record_worker(fcinfo, "json_to_record", false);
}
-static Datum
-populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
- bool have_record_arg)
+/* helper function for diagnostics */
+static void
+populate_array_report_expected_array(PopulateArrayContext *ctx, int ndim)
{
- int json_arg_num = have_record_arg ? 1 : 0;
- Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
- text *json;
- Jsonb *jb = NULL;
- HTAB *json_hash = NULL;
- HeapTupleHeader rec = NULL;
- Oid tupType = InvalidOid;
- int32 tupTypmod = -1;
- TupleDesc tupdesc;
- HeapTupleData tuple;
- HeapTuple rettuple;
- RecordIOData *my_extra;
- int ncolumns;
- int i;
- Datum *values;
- bool *nulls;
+ StringInfoData msg;
+ int i;
- Assert(jtype == JSONOID || jtype == JSONBOID);
+ initStringInfo(&msg);
- if (have_record_arg)
+ appendStringInfoString(&msg, "expected json array");
+
+ if (ndim > 0)
{
- Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
+ Assert(ctx->ndims > 0 && ndim < ctx->ndims);
- if (!type_is_rowtype(argtype))
- ereport(ERROR,
- (errcode(ERRCODE_DATATYPE_MISMATCH),
- errmsg("first argument of %s must be a row type",
- funcname)));
+ appendStringInfoString(&msg, " in array element ");
- if (PG_ARGISNULL(0))
- {
- if (PG_ARGISNULL(1))
- PG_RETURN_NULL();
+ for (i = 0; i < ndim; i++)
+ appendStringInfo(&msg, "[%d]", ctx->sizes[i]);
+ }
- /*
- * have no tuple to look at, so the only source of type info is
- * the argtype. The lookup_rowtype_tupdesc call below will error
- * out if we don't have a known composite type oid here.
- */
- tupType = argtype;
- tupTypmod = -1;
- }
- else
- {
- rec = PG_GETARG_HEAPTUPLEHEADER(0);
+ if (ctx->colname)
+ appendStringInfo(&msg, "%s of key \"%s\"",
+ ndim > 0 ? "" : " in value", ctx->colname);
- if (PG_ARGISNULL(1))
- PG_RETURN_POINTER(rec);
+ elog(ERROR, "%s", msg.data);
+}
- /* Extract type info from the tuple itself */
- tupType = HeapTupleHeaderGetTypeId(rec);
- tupTypmod = HeapTupleHeaderGetTypMod(rec);
- }
+/* assign number of dimensions of the populated array when it became known */
+static void
+populate_array_assign_ndims(PopulateArrayContext *ctx, int ndims)
+{
+ int i;
+
+ Assert(ctx->ndims <= 0);
- tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+ if (ndims <= 0)
+ populate_array_report_expected_array(ctx, ndims);
+
+ ctx->ndims = ndims;
+ ctx->dims = palloc(sizeof(int) * ndims);
+ ctx->sizes = palloc0(sizeof(int) * ndims);
+
+ for (i = 0; i < ndims; i++)
+ ctx->dims[i] = -1; /* dimensions are unknown yet */
+}
+
+/* check populated subarray dimension */
+static void
+populate_array_check_dimension(PopulateArrayContext *ctx, int ndim,
+ Oid jtype, char *json, JsonbValue *jbv)
+{
+ int dim = ctx->sizes[ndim]; /* current dimension counter */
+
+ if (ctx->dims[ndim] == -1)
+ ctx->dims[ndim] = dim; /* assign dimension if unknown yet */
+ else if (ctx->dims[ndim] != dim)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("malformed json array: \"%s\"",
+ jtype == JSONOID ? json :
+ JsonbToCString(NULL, jbv->val.binary.data,
+ jbv->val.binary.len)),
+ errdetail("Multidimensional arrays must have "
+ "sub-arrays with matching dimensions.")));
+
+ /* reset current array dimension size counter */
+ ctx->sizes[ndim] = 0;
+
+ /* increment parent dimension counter if it is a nested sub-array */
+ if (ndim > 0)
+ ctx->sizes[ndim - 1]++;
+}
+
+static void populate_array_element(PopulateArrayContext *ctx,
+ int ndim,
+ Oid jtype,
+ char *json,
+ bool jsonIsString,
+ JsonbValue *jbv)
+{
+ Datum element;
+ bool element_isnull;
+
+ /* populate array element */
+ element = populate_record_field(ctx->aio->element_info,
+ ctx->aio->element_type,
+ ctx->aio->element_typmod,
+ 0, NULL, ctx->mcxt,
+ PointerGetDatum(NULL),
+ jtype, json, jsonIsString, jbv,
+ &element_isnull);
+
+ accumArrayResult(ctx->astate, element, element_isnull,
+ ctx->aio->element_type, ctx->acxt);
+
+ Assert(ndim > 0);
+ ctx->sizes[ndim - 1]++; /* increment current dimension counter */
+}
+
+/* json object start handler for populate_array_json() */
+static void
+populate_array_object_start(void *_state)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ if (state->ctx->ndims <= 0)
+ populate_array_assign_ndims(state->ctx, ndim);
+ else if (ndim < state->ctx->ndims)
+ populate_array_report_expected_array(state->ctx, ndim);
+}
+
+/* json array end handler for populate_array_json() */
+static void
+populate_array_array_end(void *_state)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ PopulateArrayContext *ctx = state->ctx;
+ int ndim = state->lex->lex_level;
+
+ if (ctx->ndims <= 0)
+ populate_array_assign_ndims(ctx, ndim + 1);
+
+ if (ndim < ctx->ndims)
+ populate_array_check_dimension(ctx, ndim,
+ JSONOID, state->lex->input, NULL);
+}
+
+/* json array element start handler for populate_array_json() */
+static void
+populate_array_element_start(void *_state, bool isnull)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ if (ndim <= 0 || ndim == state->ctx->ndims)
+ {
+ /* remember current array element start */
+ state->element_start = state->lex->token_start;
+ state->element_scalar = NULL;
}
- else
+}
+
+/* json array element end handler for populate_array_json() */
+static void
+populate_array_element_end(void *_state, bool isnull)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ PopulateArrayContext *ctx = state->ctx;
+ int ndim = state->lex->lex_level;
+
+ Assert(ctx->ndims > 0);
+
+ if (ndim == ctx->ndims)
{
- /* json{b}_to_record case */
- if (PG_ARGISNULL(0))
- PG_RETURN_NULL();
+ char *str;
- if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
- ereport(ERROR,
- (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
- errmsg("function returning record called in context "
- "that cannot accept type record"),
- errhint("Try calling the function in the FROM clause "
- "using a column definition list.")));
+ if (isnull)
+ str = NULL;
+ else if (state->element_scalar)
+ str = state->element_scalar;
+ else
+ {
+ size_t len = state->lex->prev_token_terminator -
+ state->element_start;
+
+ str = palloc(len + 1);
+ memcpy(str, state->element_start, len);
+ str[len] = 0;
+ }
+
+ populate_array_element(ctx, ndim, JSONOID, str,
+ state->element_scalar &&
+ state->element_scalar_is_string, NULL);
}
+}
- if (jtype == JSONOID)
+/* json scalar handler for populate_array_json() */
+static void
+populate_array_scalar(void *_state, char *token, JsonTokenType tokentype)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ PopulateArrayContext *ctx = state->ctx;
+ int ndim = state->lex->lex_level;
+
+ if (ctx->ndims <= 0)
+ populate_array_assign_ndims(ctx, ndim);
+ else if (ndim < ctx->ndims)
+ populate_array_report_expected_array(ctx, ndim);
+
+ if (ndim == ctx->ndims)
{
- /* just get the text */
- json = PG_GETARG_TEXT_P(json_arg_num);
+ /* remeber scalar element token */
+ state->element_scalar = token;
+ state->element_scalar_is_string = tokentype == JSON_TOKEN_STRING;
+ }
+}
+
+/* parse json array and populate array */
+static void
+populate_array_json(PopulateArrayContext *ctx, char *json)
+{
+ PopulateArrayState state;
+ JsonSemAction sem;
+
+ state.lex = makeJsonLexContextCstringLen(json, strlen(json), true);
+ state.ctx = ctx;
+
+ memset(&sem, 0, sizeof(sem));
+ sem.semstate = (void *) &state;
+ sem.object_start = populate_array_object_start;
+ sem.array_end = populate_array_array_end;
+ sem.array_element_start = populate_array_element_start;
+ sem.array_element_end = populate_array_element_end;
+ sem.scalar = populate_array_scalar;
- json_hash = get_json_object_as_hash(json, funcname);
+ pg_parse_json(state.lex, &sem);
+ /* number of dimensions should be already known */
+ Assert(ctx->ndims > 0 && ctx->dims);
+
+ pfree(state.lex);
+}
+
+/*
+ * populate_array_dim_jsonb() -- Iterate recursively through jsonb sub-array
+ * elements and accumulate result using given ArrayBuildState.
+ */
+static void
+populate_array_dim_jsonb(PopulateArrayContext *ctx, /* context */
+ JsonbValue *jbv, /* jsonb sub-array */
+ int ndim) /* current dimension */
+{
+ JsonbContainer *jbc = jbv->val.binary.data;
+ JsonbIterator *it;
+ JsonbIteratorToken tok;
+ JsonbValue val;
+
+ check_stack_depth();
+
+ if (jbv->type != jbvBinary || !JsonContainerIsArray(jbc))
+ populate_array_report_expected_array(ctx, ndim - 1);
+
+ Assert(!JsonContainerIsScalar(jbc));
+
+ it = JsonbIteratorInit(jbc);
+
+ tok = JsonbIteratorNext(&it, &val, true);
+ Assert(tok == WJB_BEGIN_ARRAY);
+
+ tok = JsonbIteratorNext(&it, &val, true);
+
+ /*
+ * If the number of dimensions is yet unknown and
+ * we have found end of array or the first child element is not an array
+ * then assign number of dimensions now.
+ */
+ if (ctx->ndims <= 0 &&
+ (tok == WJB_END_ARRAY ||
+ (tok == WJB_ELEM &&
+ (val.type != jbvBinary ||
+ !JsonContainerIsArray(val.val.binary.data)))))
+ populate_array_assign_ndims(ctx, ndim);
+
+ /* process all array elements */
+ while (tok == WJB_ELEM)
+ {
/*
- * if the input json is empty, we can only skip the rest if we were
- * passed in a non-null record, since otherwise there may be issues
- * with domain nulls.
+ * Recurse only if the dimensions of dimensions is yet unknown or
+ * if it is not the innermost dimension.
*/
- if (hash_get_num_entries(json_hash) == 0 && rec)
+ if (ctx->ndims > 0 && ndim >= ctx->ndims)
+ populate_array_element(ctx, ndim, JSONBOID, NULL, false, &val);
+ else
{
- hash_destroy(json_hash);
- ReleaseTupleDesc(tupdesc);
- PG_RETURN_POINTER(rec);
- }
- }
- else
- {
- jb = PG_GETARG_JSONB(json_arg_num);
+ /* populate child sub-array */
+ populate_array_dim_jsonb(ctx, &val, ndim + 1);
- /* same logic as for json */
- if (JB_ROOT_COUNT(jb) == 0 && rec)
- {
- ReleaseTupleDesc(tupdesc);
- PG_RETURN_POINTER(rec);
+ /* number of dimensions should be already known */
+ Assert(ctx->ndims > 0 && ctx->dims);
+
+ populate_array_check_dimension(ctx, ndim, JSONBOID, NULL, jbv);
}
+
+ tok = JsonbIteratorNext(&it, &val, true);
}
- ncolumns = tupdesc->natts;
+ Assert(tok == WJB_END_ARRAY);
- if (rec)
- {
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = rec;
- }
+ /* free iterator, iterating until WJB_DONE */
+ tok = JsonbIteratorNext(&it, &val, true);
+ Assert(tok == WJB_DONE && !it);
+}
+
+/* populate recursively json/jsonb array */
+static Datum
+populate_array(ArrayIOData *aio, /* metadata cache */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ Oid jtype, /* json/jsonb */
+ char *json, /* valid only if json */
+ JsonbValue *jbv) /* valid only if jsonb */
+{
+ PopulateArrayContext ctx;
+ Datum result;
+ int *lbs;
+ int i;
+
+ ctx.aio = aio;
+ ctx.mcxt = mcxt;
+ ctx.acxt = CurrentMemoryContext;
+ ctx.astate = initArrayResult(aio->element_type, ctx.acxt, true);
+ ctx.colname = colname;
+ ctx.ndims = 0;
/*
- * We arrange to look up the needed I/O info just once per series of
- * calls, assuming the record type doesn't change underneath us.
+ * If the number of dimensions is already known, assign it and allocate
+ * dimensions array now, else allocate it later when it becomes known.
*/
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- if (my_extra == NULL ||
- my_extra->ncolumns != ncolumns)
+ if (aio->ndims > 0)
+ populate_array_assign_ndims(&ctx, aio->ndims);
+ else
{
- fcinfo->flinfo->fn_extra =
- MemoryContextAlloc(fcinfo->flinfo->fn_mcxt,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- my_extra->record_type = InvalidOid;
- my_extra->record_typmod = 0;
- my_extra->ncolumns = ncolumns;
- MemSet(my_extra->columns, 0, sizeof(ColumnIOData) * ncolumns);
+ ctx.dims = NULL;
+ ctx.sizes = NULL;
}
- if (have_record_arg && (my_extra->record_type != tupType ||
- my_extra->record_typmod != tupTypmod))
+ if (jtype == JSONOID)
+ populate_array_json(&ctx, json);
+ else
{
- MemSet(my_extra, 0,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra->record_type = tupType;
- my_extra->record_typmod = tupTypmod;
- my_extra->ncolumns = ncolumns;
+ populate_array_dim_jsonb(&ctx, jbv, 1);
+ ctx.dims[0] = ctx.sizes[0];
}
- values = (Datum *) palloc(ncolumns * sizeof(Datum));
- nulls = (bool *) palloc(ncolumns * sizeof(bool));
+ Assert(ctx.ndims > 0);
+
+ lbs = palloc(sizeof(int) * ctx.ndims);
+
+ for (i = 0; i < ctx.ndims; i++)
+ lbs[i] = 1;
+
+ result = makeMdArrayResult(ctx.astate, ctx.ndims, ctx.dims, lbs,
+ ctx.acxt, true);
+
+ pfree(ctx.dims);
+ pfree(ctx.sizes);
+ pfree(lbs);
+
+ return result;
+}
- if (rec)
+/* populate recursively composite (row type) value from json/jsonb */
+static Datum
+populate_composite(CompositeIOData *io, /* metadata cache */
+ Oid typeid, /* row type id */
+ int32 typmod, /* row type modifier */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ HeapTupleHeader defaultval, /* default row value if any */
+ Oid jtype, /* json/jsonb */
+ char *json, /* valid only if json */
+ int jsonlen, /* valid only if json */
+ JsonbValue *jbv) /* valid only if jsonb only */
+{
+ HeapTupleHeader tuple;
+ HTAB *json_hash = NULL;
+ JsonbContainer *container = NULL;
+
+ /* acquire cached tuple descriptor */
+ if (!io->tupdesc ||
+ io->tupdesc->tdtypeid != typeid ||
+ io->tupdesc->tdtypmod != typmod)
{
- /* Break down the tuple into fields */
- heap_deform_tuple(&tuple, tupdesc, values, nulls);
+ TupleDesc tupdesc = lookup_rowtype_tupdesc(typeid, typmod);
+ MemoryContext oldcxt;
+
+ if (io->tupdesc)
+ FreeTupleDesc(io->tupdesc);
+
+ /* copy tuple desc without constraints into cache memory context */
+ oldcxt = MemoryContextSwitchTo(mcxt);
+ io->tupdesc = CreateTupleDescCopy(tupdesc);
+ MemoryContextSwitchTo(oldcxt);
+
+ ReleaseTupleDesc(tupdesc);
+ }
+
+ /* prepare input value */
+ if (jtype == JSONOID)
+ {
+ /* convert plain-text json into a hash table */
+ json_hash = get_json_object_as_hash(json, jsonlen,
+ "populate_composite");
}
else
{
- for (i = 0; i < ncolumns; ++i)
- {
- values[i] = (Datum) 0;
- nulls[i] = true;
- }
+ if (jbv->type != jbvBinary ||
+ !JsonContainerIsObject(jbv->val.binary.data))
+ elog(ERROR,
+ colname ? "expected json object in value of key \"%s\""
+ : "expected json object", colname);
+ container = jbv->val.binary.data;
}
- for (i = 0; i < ncolumns; ++i)
- {
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- JsonbValue *v = NULL;
- JsonHashEntry *hashentry = NULL;
+ /* populate resulting record tuple */
+ tuple = populate_record(io->tupdesc, &io->recordio, defaultval, mcxt,
+ jtype, json_hash, container);
- /* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
- {
- nulls[i] = true;
- continue;
- }
+ if (jtype == JSONOID)
+ hash_destroy(json_hash);
- if (jtype == JSONOID)
+ return HeapTupleHeaderGetDatum(tuple);
+}
+
+/* populate non-null scalar value from json/jsonb value */
+static Datum
+populate_scalar(ScalarIOData *io, /* metadata cache */
+ Oid typid, /* scalar type id */
+ int32 typmod, /* scalar type modifier */
+ Oid jtype, /* json/jsonb */
+ char *json, /* valid only if json */
+ bool json_is_string, /* valid only if json */
+ JsonbValue *jbv) /* valid only if jsonb */
+{
+ Datum res;
+ char *str = NULL;
+
+ if (jtype == JSONOID)
+ {
+ /* already done the hard work in the json case */
+ if ((typid == JSONOID || typid == JSONBOID) && json_is_string)
{
- hashentry = hash_search(json_hash,
- NameStr(tupdesc->attrs[i]->attname),
- HASH_FIND, NULL);
+ /*
+ * Add quotes around string value (should be already escaped)
+ * if converting to json or jsonb.
+ */
+ size_t len = strlen(json);
+
+ str = palloc(len + 3);
+ str[0] = '"';
+ memcpy(&str[1], json, len);
+ str[len + 1] = '"';
+ str[len + 2] = 0;
}
else
{
- char *key = NameStr(tupdesc->attrs[i]->attname);
-
- v = findJsonbValueFromContainerLen(&jb->root, JB_FOBJECT, key,
- strlen(key));
+ str = json;
}
-
- /*
- * we can't just skip here if the key wasn't found since we might have
- * a domain to deal with. If we were passed in a non-null record
- * datum, we assume that the existing values are valid (if they're
- * not, then it's not our fault), but if we were passed in a null,
- * then every field which we don't populate needs to be run through
- * the input function just in case it's a domain type.
- */
- if (((jtype == JSONOID && hashentry == NULL) ||
- (jtype == JSONBOID && v == NULL)) && rec)
- continue;
-
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
+ }
+ else
+ {
+ if (typid == JSONBOID)
{
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- fcinfo->flinfo->fn_mcxt);
- column_info->column_type = column_type;
+ Jsonb *jsonb = JsonbValueToJsonb(jbv); /* directly use jsonb */
+ return JsonbGetDatum(jsonb);
}
- if ((jtype == JSONOID && (hashentry == NULL || hashentry->isnull)) ||
- (jtype == JSONBOID && (v == NULL || v->type == jbvNull)))
+ /* convert jsonb to string for typio call */
+ else if (typid == JSONOID && jbv->type != jbvBinary)
{
/*
- * need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
+ * Convert scalar jsonb (non-scalars are passed here as jbvBinary)
+ * to json string, preserving quotes around top-level strings.
*/
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
+ Jsonb *jsonb = JsonbValueToJsonb(jbv);
+ str = JsonbToCString(NULL, &jsonb->root, VARSIZE(jsonb));
}
+ else if (jbv->type == jbvString) /* quotes are stripped */
+ str = pnstrdup(jbv->val.string.val, jbv->val.string.len);
+ else if (jbv->type == jbvBool)
+ str = pstrdup(jbv->val.boolean ? "true" : "false");
+ else if (jbv->type == jbvNumeric)
+ str = DatumGetCString(DirectFunctionCall1(numeric_out,
+ PointerGetDatum(jbv->val.numeric)));
+ else if (jbv->type == jbvBinary)
+ str = JsonbToCString(NULL, jbv->val.binary.data,
+ jbv->val.binary.len);
else
+ elog(ERROR, "unrecognized jsonb type: %d", (int) jbv->type);
+ }
+
+ res = InputFunctionCall(&io->typiofunc, str, io->typioparam, typmod);
+
+ /* free temporary buffer */
+ if (str != json)
+ pfree(str);
+
+ return res;
+}
+
+/* prepare column metadata cache for the given type */
+static void
+prepare_column_cache(ColumnIOData *column, /* metadata cache */
+ Oid typid, /* column type id */
+ int32 typmod, /* column type modifier */
+ int32 ndims, /* number of array dimensions */
+ MemoryContext mcxt, /* cache memory context */
+ Oid jtype) /* json/jsonb */
+{
+ /* if the type is a domain, get its base type id and type modifier */
+ int32 base_typmod = typmod;
+ Oid base_typid = getBaseTypeAndTypmod(typid, &base_typmod);
+ bool type_is_domain = base_typid != typid;
+
+ if (type_is_rowtype(base_typid))
+ {
+ column->type_category = 'c'; /* composite */
+ column->io.composite.recordio = NULL;
+ column->io.composite.tupdesc = NULL;
+ }
+ else
+ {
+ Oid element_type = get_element_type(base_typid);
+
+ if (OidIsValid(element_type))
{
- char *s = NULL;
+ /* type is an array */
+ int32 base_ndims = ndims;
- if (jtype == JSONOID)
+ if (type_is_domain)
{
- /* already done the hard work in the json case */
- s = hashentry->val;
+ Assert(ndims == 0); /* arrays of domains are not supported */
+
+ /* get number of dimensions of domain array */
+ base_ndims = get_type_ndims(typid);
}
- else
+
+ column->type_category = 'a'; /* array */
+ column->io.array.element_info =
+ MemoryContextAllocZero(mcxt, sizeof(ColumnIOData));
+ column->io.array.element_type = element_type;
+ /* array element typemod stored in attribute typmod */
+ column->io.array.element_typmod = base_typmod;
+ column->io.array.ndims = base_ndims;
+ }
+ else
+ {
+ /* type is a scalar */
+ column->type_category = 's'; /* scalar */
+
+ /* don't need input function when converting from jsonb to jsonb */
+ if (jtype != JSONBOID || base_typid != JSONBOID)
{
- if (v->type == jbvString)
- s = pnstrdup(v->val.string.val, v->val.string.len);
- else if (v->type == jbvBool)
- s = pnstrdup((v->val.boolean) ? "t" : "f", 1);
- else if (v->type == jbvNumeric)
- s = DatumGetCString(DirectFunctionCall1(numeric_out,
- PointerGetDatum(v->val.numeric)));
- else if (v->type == jbvBinary)
- s = JsonbToCString(NULL, (JsonbContainer *) v->val.binary.data, v->val.binary.len);
- else
- elog(ERROR, "unrecognized jsonb type: %d", (int) v->type);
- }
+ Oid typioproc;
- values[i] = InputFunctionCall(&column_info->proc, s,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
+ getTypeInputInfo(base_typid, &typioproc,
+ &column->io.scalar.typioparam);
+ fmgr_info_cxt(typioproc, &column->io.scalar.typiofunc, mcxt);
+ }
}
}
- rettuple = heap_form_tuple(tupdesc, values, nulls);
+ column->typid = typid;
+ column->typmod = typmod;
+ column->ndims = ndims;
+ column->type_is_domain = type_is_domain;
+ column->domain_info = NULL;
+}
- ReleaseTupleDesc(tupdesc);
+/* populate recursively a record field or an array element from json/jsonb value */
+static Datum
+populate_record_field(ColumnIOData *col, /* metadata cache */
+ Oid typid, /* field type id */
+ int32 typmod, /* field type modifier */
+ int32 ndims, /* number of array dimensions */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ Datum defaultval, /* default record value if any */
+ Oid jtype, /* json/jsonb */
+ char *json, /* valid only if json */
+ bool json_is_string, /* valid only if json */
+ JsonbValue *jval, /* valid only if jsonb */
+ bool *isnull) /* value is NULL */
+{
+ Datum res;
- if (json_hash)
- hash_destroy(json_hash);
+ check_stack_depth();
+
+ /* prepare column metadata cache for the given type */
+ if (col->typid != typid || col->typmod != typmod || col->ndims != ndims)
+ prepare_column_cache(col, typid, typmod, ndims, mcxt, jtype);
+
+ if (jtype == JSONOID ? json == NULL : jval == NULL || jval->type == jbvNull)
+ {
+ *isnull = true;
+ res = PointerGetDatum(NULL);
+ }
+ else if (jtype == JSONOID && json_is_string && col->type_category != 's')
+ {
+ /* don't convert a string json value to a non-scalar */
+ elog(ERROR, colname ? "expected json %s in value of key \"%s\""
+ : "expected json %s",
+ col->type_category == 'a' ? "array" : "object", colname);
+ }
+ else
+ {
+ *isnull = false;
+
+ switch (col->type_category)
+ {
+ case 's':
+ res = populate_scalar(&col->io.scalar, typid, typmod,
+ jtype, json, json_is_string, jval);
+ break;
+
+ case 'a':
+ res = populate_array(&col->io.array, colname, mcxt,
+ jtype, json, jval);
+ break;
+
+ case 'c':
+ res = populate_composite(&col->io.composite, typid, typmod,
+ colname, mcxt,
+ DatumGetPointer(defaultval)
+ ? DatumGetHeapTupleHeader(defaultval)
+ : NULL,
+ jtype,
+ json,
+ jtype == JSONOID ? strlen(json) : 0,
+ jval);
+ break;
+
+ default:
+ elog(ERROR, "unrecognized type category '%c'",
+ col->type_category);
+ break;
+ }
+ }
+
+ if (col->type_is_domain)
+ domain_check(res, *isnull, col->typid, &col->domain_info, mcxt);
+
+ return res;
+}
+
+static RecordIOData *
+allocate_record_info(MemoryContext mcxt, int ncolumns)
+{
+ RecordIOData *data = (RecordIOData *)
+ MemoryContextAlloc(mcxt,
+ offsetof(RecordIOData, columns) +
+ ncolumns * sizeof(ColumnIOData));
+
+ data->record_type = InvalidOid;
+ data->record_typmod = 0;
+ data->ncolumns = ncolumns;
+ MemSet(data->columns, 0, sizeof(ColumnIOData) * ncolumns);
+
+ return data;
+}
+
+/* populate record tuple from json/jsonb value */
+static HeapTupleHeader
+populate_record(TupleDesc tupdesc, /* record tuple descriptor */
+ RecordIOData **precord, /* metadata cache location */
+ HeapTupleHeader defaultval, /* default record value if any */
+ MemoryContext mcxt, /* cache memory context */
+ Oid jtype, /* json/jsonb */
+ HTAB *json_hash, /* valid only if json */
+ JsonbContainer *cont) /* valid only if jsonb */
+{
+ RecordIOData *record = *precord;
+ Datum *values;
+ bool *nulls;
+ HeapTuple res;
+ int ncolumns = tupdesc->natts;
+ int i;
+
+ /*
+ * if the input json is empty, we can only skip the rest if we were
+ * passed in a non-null record, since otherwise there may be issues
+ * with domain nulls.
+ */
+ if ((jtype == JSONOID ? hash_get_num_entries(json_hash) == 0
+ : JsonContainerSize(cont) == 0) && defaultval)
+ return defaultval;
+
+ /* (re)allocate metadata cache */
+ if (record == NULL ||
+ record->ncolumns != ncolumns)
+ *precord = record = allocate_record_info(mcxt, ncolumns);
+
+ /* invalidate metadata cache if the record type has changed */
+ if (record->record_type != tupdesc->tdtypeid ||
+ record->record_typmod != tupdesc->tdtypmod)
+ {
+ MemSet(record, 0, offsetof(RecordIOData, columns) +
+ ncolumns * sizeof(ColumnIOData));
+ record->record_type = tupdesc->tdtypeid;
+ record->record_typmod = tupdesc->tdtypmod;
+ record->ncolumns = ncolumns;
+ }
+
+ values = (Datum *) palloc(ncolumns * sizeof(Datum));
+ nulls = (bool *) palloc(ncolumns * sizeof(bool));
+
+ if (defaultval)
+ {
+ HeapTupleData tuple;
+
+ /* Build a temporary HeapTuple control structure */
+ tuple.t_len = HeapTupleHeaderGetDatumLength(defaultval);
+ ItemPointerSetInvalid(&(tuple.t_self));
+ tuple.t_tableOid = InvalidOid;
+ tuple.t_data = defaultval;
+
+ /* Break down the tuple into fields */
+ heap_deform_tuple(&tuple, tupdesc, values, nulls);
+ }
+ else
+ {
+ for (i = 0; i < ncolumns; ++i)
+ {
+ values[i] = (Datum) 0;
+ nulls[i] = true;
+ }
+ }
+
+ for (i = 0; i < ncolumns; ++i)
+ {
+ Form_pg_attribute att = tupdesc->attrs[i];
+ char *colname = NameStr(att->attname);
+ JsonbValue *val = NULL;
+ JsonHashEntry *hashentry = NULL;
+
+ /* Ignore dropped columns in datatype */
+ if (att->attisdropped)
+ {
+ nulls[i] = true;
+ continue;
+ }
+
+ if (jtype == JSONOID)
+ hashentry = hash_search(json_hash, colname, HASH_FIND, NULL);
+ else
+ val = findJsonbValueFromContainerLen(cont, JB_FOBJECT, colname,
+ strlen(colname));
+
+ /*
+ * we can't just skip here if the key wasn't found since we might have
+ * a domain to deal with. If we were passed in a non-null record
+ * datum, we assume that the existing values are valid (if they're
+ * not, then it's not our fault), but if we were passed in a null,
+ * then every field which we don't populate needs to be run through
+ * the input function just in case it's a domain type.
+ */
+ if ((jtype == JSONOID ? hashentry == NULL : val == NULL) && defaultval)
+ continue;
+
+ values[i] = populate_record_field(&record->columns[i],
+ att->atttypid,
+ att->atttypmod,
+ att->attndims,
+ colname,
+ mcxt,
+ nulls[i] ? PointerGetDatum(NULL)
+ : values[i],
+ jtype,
+ hashentry && !hashentry->isnull ?
+ hashentry->val : NULL,
+ hashentry && hashentry->isstring,
+ val,
+ &nulls[i]);
+ }
+
+ res = heap_form_tuple(tupdesc, values, nulls);
+
+ pfree(values);
+ pfree(nulls);
+
+ return res->t_data;
+}
+
+/* structure to cache metadata needed for populate_record_worker() */
+typedef struct PopulateRecordCache
+{
+ Oid argtype; /* verified row type of the first argument */
+ CompositeIOData io; /* metadata cache for populate_composite() */
+} PopulateRecordCache;
+
+static Datum
+populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
+ bool have_record_arg)
+{
+ int json_arg_num = have_record_arg ? 1 : 0;
+ Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
+ text *json;
+ Jsonb *jb;
+ JsonbValue jbv;
+ HeapTupleHeader rec = NULL;
+ Oid tupType;
+ int32 tupTypmod;
+ TupleDesc tupdesc = NULL;
+ Datum rettuple;
+ MemoryContext fnmcxt = fcinfo->flinfo->fn_mcxt;
+ PopulateRecordCache *cache = fcinfo->flinfo->fn_extra;
+
+ Assert(jtype == JSONOID || jtype == JSONBOID);
+
+ /*
+ * We arrange to look up the needed I/O info just once per series of
+ * calls, assuming the record type doesn't change underneath us.
+ */
+ if (!cache)
+ fcinfo->flinfo->fn_extra = cache =
+ MemoryContextAllocZero(fnmcxt, sizeof(*cache));
+
+ if (have_record_arg)
+ {
+ Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
+
+ if (cache->argtype != argtype)
+ {
+ if (!type_is_rowtype(argtype))
+ ereport(ERROR,
+ (errcode(ERRCODE_DATATYPE_MISMATCH),
+ errmsg("first argument of %s must be a row type",
+ funcname)));
+
+ cache->argtype = argtype;
+ }
+
+ if (PG_ARGISNULL(0))
+ {
+ if (PG_ARGISNULL(1))
+ PG_RETURN_NULL();
+
+ /*
+ * have no tuple to look at, so the only source of type info is
+ * the argtype. The lookup_rowtype_tupdesc call below will error
+ * out if we don't have a known composite type oid here.
+ */
+ tupType = argtype;
+ tupTypmod = -1;
+ }
+ else
+ {
+ rec = PG_GETARG_HEAPTUPLEHEADER(0);
+
+ if (PG_ARGISNULL(1))
+ PG_RETURN_POINTER(rec);
+
+ /* Extract type info from the tuple itself */
+ tupType = HeapTupleHeaderGetTypeId(rec);
+ tupTypmod = HeapTupleHeaderGetTypMod(rec);
+ }
+ }
+ else
+ {
+ /* json{b}_to_record case */
+ if (PG_ARGISNULL(0))
+ PG_RETURN_NULL();
+
+ if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
+ ereport(ERROR,
+ (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
+ errmsg("function returning record called in context "
+ "that cannot accept type record"),
+ errhint("Try calling the function in the FROM clause "
+ "using a column definition list.")));
+
+ Assert(tupdesc);
- PG_RETURN_DATUM(HeapTupleGetDatum(rettuple));
+ /*
+ * Add tupdesc to the cache and set the appropriate values of
+ * tupType/tupTypmod for proper cache usage in populate_composite().
+ */
+ cache->io.tupdesc = tupdesc;
+
+ tupType = tupdesc->tdtypeid;
+ tupTypmod = tupdesc->tdtypmod;
+ }
+
+ if (jtype == JSONOID)
+ {
+ json = PG_GETARG_TEXT_P(json_arg_num);
+ }
+ else
+ {
+ jb = PG_GETARG_JSONB(json_arg_num);
+
+ /* fill binary jsonb value pointing to jb */
+ jbv.type = jbvBinary;
+ jbv.val.binary.data = &jb->root;
+ jbv.val.binary.len = VARSIZE(jb) - VARHDRSZ;
+ }
+
+ rettuple = populate_composite(&cache->io,
+ tupType,
+ tupTypmod,
+ NULL,
+ fnmcxt,
+ rec,
+ jtype,
+ jtype == JSONOID ? VARDATA(json) : NULL,
+ jtype == JSONOID ? VARSIZE(json) - VARHDRSZ : 0,
+ jtype == JSONOID ? NULL : &jbv);
+
+ if (tupdesc)
+ {
+ cache->io.tupdesc = NULL;
+ ReleaseTupleDesc(tupdesc);
+ }
+
+ PG_RETURN_DATUM(rettuple);
}
/*
@@ -2373,12 +3101,12 @@ populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
* decompose a json object into a hash table.
*/
static HTAB *
-get_json_object_as_hash(text *json, const char *funcname)
+get_json_object_as_hash(char *json, int len, const char *funcname)
{
HASHCTL ctl;
HTAB *tab;
JHashState *state;
- JsonLexContext *lex = makeJsonLexContext(json, true);
+ JsonLexContext *lex = makeJsonLexContextCstringLen(json, len, true);
JsonSemAction *sem;
memset(&ctl, 0, sizeof(ctl));
@@ -2460,6 +3188,7 @@ hash_object_field_end(void *state, char *fname, bool isnull)
*/
hashentry->isnull = isnull;
+
if (_state->save_json_start != NULL)
{
int len = _state->lex->prev_token_terminator - _state->save_json_start;
@@ -2468,11 +3197,13 @@ hash_object_field_end(void *state, char *fname, bool isnull)
memcpy(val, _state->save_json_start, len);
val[len] = '\0';
hashentry->val = val;
+ hashentry->isstring = false;
}
else
{
/* must have had a scalar instead */
hashentry->val = _state->saved_scalar;
+ hashentry->isstring = _state->saved_scalar_is_string;
}
}
@@ -2498,7 +3229,10 @@ hash_scalar(void *state, char *token, JsonTokenType tokentype)
errmsg("cannot call %s on a scalar", _state->function_name)));
if (_state->lex->lex_level == 1)
+ {
_state->saved_scalar = token;
+ _state->saved_scalar_is_string = tokentype == JSON_TOKEN_STRING;
+ }
}
@@ -2537,121 +3271,24 @@ json_to_recordset(PG_FUNCTION_ARGS)
}
static void
-make_row_from_rec_and_jsonb(Jsonb *element, PopulateRecordsetState *state)
+populate_recordset_record(PopulateRecordsetState *state,
+ Oid jtype, HTAB *json_hash, JsonbContainer *jsonb)
{
- Datum *values;
- bool *nulls;
- int i;
- RecordIOData *my_extra = state->my_extra;
- int ncolumns = my_extra->ncolumns;
- TupleDesc tupdesc = state->ret_tdesc;
- HeapTupleHeader rec = state->rec;
- HeapTuple rettuple;
-
- values = (Datum *) palloc(ncolumns * sizeof(Datum));
- nulls = (bool *) palloc(ncolumns * sizeof(bool));
-
- if (state->rec)
- {
- HeapTupleData tuple;
-
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(state->rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = state->rec;
-
- /* Break down the tuple into fields */
- heap_deform_tuple(&tuple, tupdesc, values, nulls);
- }
- else
- {
- for (i = 0; i < ncolumns; ++i)
- {
- values[i] = (Datum) 0;
- nulls[i] = true;
- }
- }
-
- for (i = 0; i < ncolumns; ++i)
- {
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- JsonbValue *v = NULL;
- char *key;
-
- /* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
- {
- nulls[i] = true;
- continue;
- }
-
- key = NameStr(tupdesc->attrs[i]->attname);
-
- v = findJsonbValueFromContainerLen(&element->root, JB_FOBJECT,
- key, strlen(key));
-
- /*
- * We can't just skip here if the key wasn't found since we might have
- * a domain to deal with. If we were passed in a non-null record
- * datum, we assume that the existing values are valid (if they're
- * not, then it's not our fault), but if we were passed in a null,
- * then every field which we don't populate needs to be run through
- * the input function just in case it's a domain type.
- */
- if (v == NULL && rec)
- continue;
-
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
- {
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- state->fn_mcxt);
- column_info->column_type = column_type;
- }
- if (v == NULL || v->type == jbvNull)
- {
- /*
- * Need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
- */
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
- }
- else
- {
- char *s = NULL;
-
- if (v->type == jbvString)
- s = pnstrdup(v->val.string.val, v->val.string.len);
- else if (v->type == jbvBool)
- s = pnstrdup((v->val.boolean) ? "t" : "f", 1);
- else if (v->type == jbvNumeric)
- s = DatumGetCString(DirectFunctionCall1(numeric_out,
- PointerGetDatum(v->val.numeric)));
- else if (v->type == jbvBinary)
- s = JsonbToCString(NULL, (JsonbContainer *) v->val.binary.data, v->val.binary.len);
- else
- elog(ERROR, "unrecognized jsonb type: %d", (int) v->type);
-
- values[i] = InputFunctionCall(&column_info->proc, s,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
- }
- }
-
- rettuple = heap_form_tuple(tupdesc, values, nulls);
-
- tuplestore_puttuple(state->tuple_store, rettuple);
+ HeapTupleData tuple;
+ HeapTupleHeader tuphead = populate_record(state->ret_tdesc,
+ state->my_extra,
+ state->rec,
+ state->fn_mcxt,
+ jtype,
+ json_hash,
+ jsonb);
+
+ tuple.t_len = HeapTupleHeaderGetDatumLength(tuphead);
+ ItemPointerSetInvalid(&(tuple.t_self));
+ tuple.t_tableOid = InvalidOid;
+ tuple.t_data = tuphead;
+
+ tuplestore_puttuple(state->tuple_store, &tuple);
}
/*
@@ -2665,12 +3302,8 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
ReturnSetInfo *rsi;
MemoryContext old_cxt;
- Oid tupType;
- int32 tupTypmod;
HeapTupleHeader rec;
TupleDesc tupdesc;
- RecordIOData *my_extra;
- int ncolumns;
PopulateRecordsetState *state;
if (have_record_arg)
@@ -2716,38 +3349,6 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
else
rec = PG_GETARG_HEAPTUPLEHEADER(0);
- tupType = tupdesc->tdtypeid;
- tupTypmod = tupdesc->tdtypmod;
- ncolumns = tupdesc->natts;
-
- /*
- * We arrange to look up the needed I/O info just once per series of
- * calls, assuming the record type doesn't change underneath us.
- */
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- if (my_extra == NULL ||
- my_extra->ncolumns != ncolumns)
- {
- fcinfo->flinfo->fn_extra =
- MemoryContextAlloc(fcinfo->flinfo->fn_mcxt,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- my_extra->record_type = InvalidOid;
- my_extra->record_typmod = 0;
- }
-
- if (my_extra->record_type != tupType ||
- my_extra->record_typmod != tupTypmod)
- {
- MemSet(my_extra, 0,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra->record_type = tupType;
- my_extra->record_typmod = tupTypmod;
- my_extra->ncolumns = ncolumns;
- }
-
state = palloc0(sizeof(PopulateRecordsetState));
/* make these in a sufficiently long-lived memory context */
@@ -2760,7 +3361,7 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
MemoryContextSwitchTo(old_cxt);
state->function_name = funcname;
- state->my_extra = my_extra;
+ state->my_extra = (RecordIOData **) &fcinfo->flinfo->fn_extra;
state->rec = rec;
state->fn_mcxt = fcinfo->flinfo->fn_mcxt;
@@ -2811,14 +3412,15 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
if (r == WJB_ELEM)
{
- Jsonb *element = JsonbValueToJsonb(&v);
-
- if (!JB_ROOT_IS_OBJECT(element))
+ if (v.type != jbvBinary &&
+ !(JsonContainerIsObject(v.val.binary.data)))
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("argument of %s must be an array of objects",
funcname)));
- make_row_from_rec_and_jsonb(element, state);
+
+ populate_recordset_record(state, JSONBOID, NULL,
+ v.val.binary.data);
}
}
}
@@ -2862,115 +3464,16 @@ static void
populate_recordset_object_end(void *state)
{
PopulateRecordsetState *_state = (PopulateRecordsetState *) state;
- HTAB *json_hash = _state->json_hash;
- Datum *values;
- bool *nulls;
- int i;
- RecordIOData *my_extra = _state->my_extra;
- int ncolumns = my_extra->ncolumns;
- TupleDesc tupdesc = _state->ret_tdesc;
- JsonHashEntry *hashentry;
- HeapTupleHeader rec = _state->rec;
- HeapTuple rettuple;
/* Nested objects require no special processing */
if (_state->lex->lex_level > 1)
return;
/* Otherwise, construct and return a tuple based on this level-1 object */
- values = (Datum *) palloc(ncolumns * sizeof(Datum));
- nulls = (bool *) palloc(ncolumns * sizeof(bool));
-
- if (_state->rec)
- {
- HeapTupleData tuple;
-
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(_state->rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = _state->rec;
-
- /* Break down the tuple into fields */
- heap_deform_tuple(&tuple, tupdesc, values, nulls);
- }
- else
- {
- for (i = 0; i < ncolumns; ++i)
- {
- values[i] = (Datum) 0;
- nulls[i] = true;
- }
- }
-
- for (i = 0; i < ncolumns; ++i)
- {
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- char *value;
-
- /* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
- {
- nulls[i] = true;
- continue;
- }
-
- hashentry = hash_search(json_hash,
- NameStr(tupdesc->attrs[i]->attname),
- HASH_FIND, NULL);
-
- /*
- * we can't just skip here if the key wasn't found since we might have
- * a domain to deal with. If we were passed in a non-null record
- * datum, we assume that the existing values are valid (if they're
- * not, then it's not our fault), but if we were passed in a null,
- * then every field which we don't populate needs to be run through
- * the input function just in case it's a domain type.
- */
- if (hashentry == NULL && rec)
- continue;
-
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
- {
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- _state->fn_mcxt);
- column_info->column_type = column_type;
- }
- if (hashentry == NULL || hashentry->isnull)
- {
- /*
- * need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
- */
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
- }
- else
- {
- value = hashentry->val;
-
- values[i] = InputFunctionCall(&column_info->proc, value,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
- }
- }
-
- rettuple = heap_form_tuple(tupdesc, values, nulls);
-
- tuplestore_puttuple(_state->tuple_store, rettuple);
+ populate_recordset_record(_state, JSONOID, _state->json_hash, NULL);
/* Done with hash for this object */
- hash_destroy(json_hash);
+ hash_destroy(_state->json_hash);
_state->json_hash = NULL;
}
@@ -3066,11 +3569,13 @@ populate_recordset_object_field_end(void *state, char *fname, bool isnull)
memcpy(val, _state->save_json_start, len);
val[len] = '\0';
hashentry->val = val;
+ hashentry->isstring = false;
}
else
{
/* must have had a scalar instead */
hashentry->val = _state->saved_scalar;
+ hashentry->isstring = _state->saved_scalar_is_string;
}
}
diff --git a/src/backend/utils/cache/lsyscache.c b/src/backend/utils/cache/lsyscache.c
index 13ae6ad..641005b 100644
--- a/src/backend/utils/cache/lsyscache.c
+++ b/src/backend/utils/cache/lsyscache.c
@@ -2492,6 +2492,26 @@ get_array_type(Oid typid)
}
/*
+ * get_type_ndims
+ *
+ * Given the type OID, get the typndims.
+ */
+int32
+get_type_ndims(Oid typid)
+{
+ HeapTuple tp;
+ int32 result = -1;
+
+ tp = SearchSysCache1(TYPEOID, ObjectIdGetDatum(typid));
+ if (HeapTupleIsValid(tp))
+ {
+ result = ((Form_pg_type) GETSTRUCT(tp))->typndims;
+ ReleaseSysCache(tp);
+ }
+ return result;
+}
+
+/*
* get_promoted_array_type
*
* The "promoted" type is what you'd get from an ARRAY(SELECT ...)
diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h
index 470d5b1..850a825 100644
--- a/src/include/utils/jsonb.h
+++ b/src/include/utils/jsonb.h
@@ -218,6 +218,11 @@ typedef struct
#define JB_ROOT_IS_OBJECT(jbp_) ( *(uint32*) VARDATA(jbp_) & JB_FOBJECT)
#define JB_ROOT_IS_ARRAY(jbp_) ( *(uint32*) VARDATA(jbp_) & JB_FARRAY)
+#define JsonContainerSize(jc) ((jc)->header & JB_CMASK)
+#define JsonContainerIsObject(jc) ((jc)->header & JB_FOBJECT)
+#define JsonContainerIsArray(jc) ((jc)->header & JB_FARRAY)
+#define JsonContainerIsScalar(jc) ((jc)->header & JB_FSCALAR)
+
enum jbvType
{
diff --git a/src/include/utils/lsyscache.h b/src/include/utils/lsyscache.h
index dcb8980..d25cac7 100644
--- a/src/include/utils/lsyscache.h
+++ b/src/include/utils/lsyscache.h
@@ -136,6 +136,7 @@ extern Oid get_element_type(Oid typid);
extern Oid get_array_type(Oid typid);
extern Oid get_promoted_array_type(Oid typid);
extern Oid get_base_element_type(Oid typid);
+extern int32 get_type_ndims(Oid typid);
extern void getTypeInputInfo(Oid type, Oid *typInput, Oid *typIOParam);
extern void getTypeOutputInfo(Oid type, Oid *typOutput, bool *typIsVarlena);
extern void getTypeBinaryInputInfo(Oid type, Oid *typReceive, Oid *typIOParam);
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index efcdc41..a7de8ef 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -1304,6 +1304,31 @@ select * from json_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8
-- populate_record
create type jpop as (a text, b int, c timestamp);
+CREATE DOMAIN js_int_not_null AS int NOT NULL;
+CREATE DOMAIN js_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN js_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+CREATE TYPE jsrec AS (
+ i int,
+ ia _int4,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d js_int_array_1d,
+ ia2d js_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jpop,
+ reca jpop[]
+);
+CREATE TYPE jsrec_i_not_null AS (
+ i js_int_not_null
+);
select * from json_populate_record(null::jpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+---
@@ -1342,6 +1367,327 @@ select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"c":[100,200,false],"x":43.2}') q;
ERROR: invalid input syntax for type timestamp: "[100,200,false]"
+select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{}') q;
+ a | b | c
+---+---+--------------------------
+ x | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"x": 43.2}') q;
+ERROR: domain js_int_not_null does not allow null values
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": null}') q;
+ERROR: domain js_int_not_null does not allow null values
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": 12345}') q;
+ i
+-------
+ 12345
+(1 row)
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": null}') q;
+ ia
+----
+
+(1 row)
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": 123}') q;
+ERROR: expected json array in value of key "ia"
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [1, "2", null, 4]}') q;
+ ia
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1, 2], [3, 4]]}') q;
+ ia
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1], 2]}') q;
+ERROR: expected json array in array element [1] of key "ia"
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1], [2, 3]]}') q;
+ERROR: malformed json array: "[[1], [2, 3]]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": null}') q;
+ ia1
+-----
+
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": 123}') q;
+ERROR: expected json array in value of key "ia1"
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [1, "2", null, 4]}') q;
+ ia1
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [[1, 2, 3]]}') q;
+ERROR: invalid input syntax for integer: "[1, 2, 3]"
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": null}') q;
+ ia1d
+------
+
+(1 row)
+
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": 123}') q;
+ERROR: expected json array in value of key "ia1d"
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null, 4]}') q;
+ERROR: value for domain js_int_array_1d violates check constraint "js_int_array_1d_check"
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null]}') q;
+ ia1d
+------------
+ {1,2,NULL}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [1, "2", null, 4]}') q;
+ERROR: expected json array in array element [0] of key "ia2"
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+ ia2
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[], []]}') q;
+ ia2
+-----
+ {}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [3]]}') q;
+ERROR: malformed json array: "[[1, 2], [3]]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], 3, 4]}') q;
+ERROR: expected json array in array element [1] of key "ia2"
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+ERROR: value for domain js_int_array_2d violates check constraint "js_int_array_2d_check"
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+ ia2d
+----------------------
+ {{1,2,3},{NULL,5,6}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [1, "2", null, 4]}') q;
+ERROR: expected json array in array element [0] of key "ia3"
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+ERROR: expected json array in array element [0][0] of key "ia3"
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+ ia3
+-----
+ {}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+ ia3
+-------------------
+ {{{1,2}},{{3,4}}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+ ia3
+-------------------------------
+ {{{1,2},{3,4}},{{5,6},{7,8}}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+ERROR: malformed json array: "[ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": null}') q;
+ ta
+----
+
+(1 row)
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": 123}') q;
+ERROR: expected json array in value of key "ta"
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [1, "2", null, 4]}') q;
+ ta
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+ ta
+--------------------------------
+ {"[1, 2, 3]","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": null}') q;
+ c
+---
+
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaa"}') q;
+ c
+------------
+ aaa
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaa"}') q;
+ c
+------------
+ aaaaaaaaaa
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaaaaa"}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": null}') q;
+ ca
+----
+
+(1 row)
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": 123}') q;
+ERROR: expected json array in value of key "ca"
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [1, "2", null, 4]}') q;
+ ca
+-----------------------------------------------
+ {"1 ","2 ",NULL,"4 "}
+(1 row)
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+ ca
+---------------------------------
+ {"[1, 2, 3] ","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": null}') q;
+ js
+----
+
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": true}') q;
+ js
+------
+ true
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": 123.45}') q;
+ js
+--------
+ 123.45
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "123.45"}') q;
+ js
+----------
+ "123.45"
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "abc"}') q;
+ js
+-------
+ "abc"
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+ js
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ js
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": null}') q;
+ jsb
+-----
+
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": true}') q;
+ jsb
+------
+ true
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": 123.45}') q;
+ jsb
+--------
+ 123.45
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "123.45"}') q;
+ jsb
+----------
+ "123.45"
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "abc"}') q;
+ jsb
+-------
+ "abc"
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+ jsb
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ jsb
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": null}') q;
+ jsa
+-----
+
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": 123}') q;
+ERROR: expected json array in value of key "jsa"
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": [1, "2", null, 4]}') q;
+ jsa
+--------------------
+ {1,"\"2\"",NULL,4}
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+ jsa
+----------------------------------------------------------
+ {"\"aaa\"",NULL,"[1, 2, \"3\", {}]","{ \"k\" : \"v\" }"}
+(1 row)
+
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": 123}') q;
+ERROR: cannot call populate_composite on a scalar
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": [1, 2]}') q;
+ERROR: cannot call populate_composite on an array
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+ rec
+-----------------------------------
+ (abc,,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": 123}') q;
+ERROR: expected json array in value of key "reca"
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [1, 2]}') q;
+ERROR: cannot call populate_composite on a scalar
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+ reca
+--------------------------------------------------------
+ {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT rec FROM json_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jpop,NULL)::jsrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+ rec
+------------------------------------
+ (abc,3,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
-- populate_recordset
select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
@@ -1408,6 +1754,28 @@ select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":[100,200,3
{"z":true} | 3 | Fri Jan 20 10:42:53 2012
(2 rows)
+-- test type info caching in json_populate_record()
+CREATE TEMP TABLE jspoptest (js json);
+INSERT INTO jspoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::json
+FROM generate_series(1, 3);
+SELECT (json_populate_record(NULL::jsrec, js)).* FROM jspoptest;
+ i | ia | ia1 | ia2 | ia3 | ia1d | ia2d | t | ta | c | ca | ts | js | jsb | jsa | rec | reca
+---+----+-----+-----+-----+------+------+---+----+---+----+----+----+-----+--------------------+-----------------------------------+--------------------------------------------------------
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(3 rows)
+
+DROP TYPE jsrec;
+DROP TYPE jsrec_i_not_null;
+DROP DOMAIN js_int_not_null;
+DROP DOMAIN js_int_array_1d;
+DROP DOMAIN js_int_array_2d;
--json_typeof() function
select value, json_typeof(value)
from (values (json '123.4'),
@@ -1600,11 +1968,11 @@ select * from json_to_recordset('[{"a":1,"b":{"d":"foo"},"c":true},{"a":2,"c":fa
(2 rows)
select *, c is null as c_is_null
-from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::json)
- as t(a int, b json, c text, x int);
- a | b | c | x | c_is_null
----+-----------------+---+---+-----------
- 1 | {"c":16, "d":2} | | 8 | t
+from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::json)
+ as t(a int, b json, c text, x int, ca char(5)[], ia int[][], r jpop);
+ a | b | c | x | ca | ia | r | c_is_null
+---+-----------------+---+---+-------------------+---------------+------------+-----------
+ 1 | {"c":16, "d":2} | | 8 | {"1 2 ","3 "} | {{1,2},{3,4}} | (aaa,123,) | t
(1 row)
select *, c is null as c_is_null
@@ -1615,6 +1983,49 @@ from json_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::json)
1 | {"c":16, "d":2} | | 8 | t
(1 row)
+select * from json_to_record('{"ia": null}') as x(ia _int4);
+ ia
+----
+
+(1 row)
+
+select * from json_to_record('{"ia": 123}') as x(ia _int4);
+ERROR: expected json array in value of key "ia"
+select * from json_to_record('{"ia": [1, "2", null, 4]}') as x(ia _int4);
+ ia
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+select * from json_to_record('{"ia": [[1, 2], [3, 4]]}') as x(ia _int4);
+ ia
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+select * from json_to_record('{"ia": [[1], 2]}') as x(ia _int4);
+ERROR: expected json array in array element [1] of key "ia"
+select * from json_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
+ERROR: malformed json array: "[[1], [2, 3]]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+select * from json_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
+ ia2
+---------
+ {1,2,3}
+(1 row)
+
+select * from json_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
+ ia2
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+select * from json_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
+ ia2
+-----------------
+ {{{1},{2},{3}}}
+(1 row)
+
-- json_strip_nulls
select json_strip_nulls(null);
json_strip_nulls
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
index e2cb08a..758adb2 100644
--- a/src/test/regress/expected/jsonb.out
+++ b/src/test/regress/expected/jsonb.out
@@ -1888,6 +1888,31 @@ SELECT * FROM jsonb_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,
-- populate_record
CREATE TYPE jbpop AS (a text, b int, c timestamp);
+CREATE DOMAIN jsb_int_not_null AS int NOT NULL;
+CREATE DOMAIN jsb_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN jsb_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+CREATE TYPE jsbrec AS (
+ i int,
+ ia _int4,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d jsb_int_array_1d,
+ ia2d jsb_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jbpop,
+ reca jbpop[]
+);
+CREATE TYPE jsbrec_i_not_null AS (
+ i jsb_int_not_null
+);
SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+---
@@ -1926,6 +1951,327 @@ SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"c":[100,200,false],"x":43.2}') q;
ERROR: invalid input syntax for type timestamp: "[100, 200, false]"
+SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop, '{}') q;
+ a | b | c
+---+---+--------------------------
+ x | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"x": 43.2}') q;
+ERROR: domain jsb_int_not_null does not allow null values
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": null}') q;
+ERROR: domain jsb_int_not_null does not allow null values
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": 12345}') q;
+ i
+-------
+ 12345
+(1 row)
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": null}') q;
+ ia
+----
+
+(1 row)
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": 123}') q;
+ERROR: expected json array in value of key "ia"
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [1, "2", null, 4]}') q;
+ ia
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1, 2], [3, 4]]}') q;
+ ia
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1], 2]}') q;
+ERROR: expected json array in array element [1] of key "ia"
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1], [2, 3]]}') q;
+ERROR: malformed json array: "[[1], [2, 3]]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": null}') q;
+ ia1
+-----
+
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": 123}') q;
+ERROR: expected json array in value of key "ia1"
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [1, "2", null, 4]}') q;
+ ia1
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [[1, 2, 3]]}') q;
+ERROR: invalid input syntax for integer: "[1, 2, 3]"
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": null}') q;
+ ia1d
+------
+
+(1 row)
+
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": 123}') q;
+ERROR: expected json array in value of key "ia1d"
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null, 4]}') q;
+ERROR: value for domain jsb_int_array_1d violates check constraint "jsb_int_array_1d_check"
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null]}') q;
+ ia1d
+------------
+ {1,2,NULL}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [1, "2", null, 4]}') q;
+ERROR: expected json array in array element [0] of key "ia2"
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+ ia2
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[], []]}') q;
+ ia2
+-----
+ {}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [3]]}') q;
+ERROR: malformed json array: "[[1, 2], [3]]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], 3, 4]}') q;
+ERROR: expected json array in array element [1] of key "ia2"
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+ERROR: value for domain jsb_int_array_2d violates check constraint "jsb_int_array_2d_check"
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+ ia2d
+----------------------
+ {{1,2,3},{NULL,5,6}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [1, "2", null, 4]}') q;
+ERROR: expected json array in array element [0] of key "ia3"
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+ERROR: expected json array in array element [0][0] of key "ia3"
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+ ia3
+-----
+ {}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+ ia3
+-------------------
+ {{{1,2}},{{3,4}}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+ ia3
+-------------------------------
+ {{{1,2},{3,4}},{{5,6},{7,8}}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+ERROR: malformed json array: "[[[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]]]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": null}') q;
+ ta
+----
+
+(1 row)
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": 123}') q;
+ERROR: expected json array in value of key "ta"
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [1, "2", null, 4]}') q;
+ ta
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+ ta
+--------------------------------
+ {"[1, 2, 3]","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": null}') q;
+ c
+---
+
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaa"}') q;
+ c
+------------
+ aaa
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaa"}') q;
+ c
+------------
+ aaaaaaaaaa
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaaaaa"}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": null}') q;
+ ca
+----
+
+(1 row)
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": 123}') q;
+ERROR: expected json array in value of key "ca"
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [1, "2", null, 4]}') q;
+ ca
+-----------------------------------------------
+ {"1 ","2 ",NULL,"4 "}
+(1 row)
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+ ca
+---------------------------------
+ {"[1, 2, 3] ","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": null}') q;
+ js
+----
+
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": true}') q;
+ js
+------
+ true
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": 123.45}') q;
+ js
+--------
+ 123.45
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "123.45"}') q;
+ js
+----------
+ "123.45"
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "abc"}') q;
+ js
+-------
+ "abc"
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+ js
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ js
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": null}') q;
+ jsb
+-----
+
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": true}') q;
+ jsb
+------
+ true
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": 123.45}') q;
+ jsb
+--------
+ 123.45
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "123.45"}') q;
+ jsb
+----------
+ "123.45"
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "abc"}') q;
+ jsb
+-------
+ "abc"
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+ jsb
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ jsb
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": null}') q;
+ jsa
+-----
+
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": 123}') q;
+ERROR: expected json array in value of key "jsa"
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": [1, "2", null, 4]}') q;
+ jsa
+--------------------
+ {1,"\"2\"",NULL,4}
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+ jsa
+-------------------------------------------------------
+ {"\"aaa\"",NULL,"[1, 2, \"3\", {}]","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": 123}') q;
+ERROR: expected json object in value of key "rec"
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": [1, 2]}') q;
+ERROR: expected json object in value of key "rec"
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+ rec
+-----------------------------------
+ (abc,,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": 123}') q;
+ERROR: expected json array in value of key "reca"
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [1, 2]}') q;
+ERROR: expected json object
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+ reca
+--------------------------------------------------------
+ {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jbpop,NULL)::jsbrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+ rec
+------------------------------------
+ (abc,3,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
-- populate_recordset
SELECT * FROM jsonb_populate_recordset(NULL::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
@@ -2002,11 +2348,11 @@ select * from jsonb_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar",
(2 rows)
select *, c is null as c_is_null
-from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::jsonb)
- as t(a int, b jsonb, c text, x int);
- a | b | c | x | c_is_null
----+-------------------+---+---+-----------
- 1 | {"c": 16, "d": 2} | | 8 | t
+from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::jsonb)
+ as t(a int, b jsonb, c text, x int, ca char(5)[], ia int[][], r jbpop);
+ a | b | c | x | ca | ia | r | c_is_null
+---+-------------------+---+---+-------------------+---------------+------------+-----------
+ 1 | {"c": 16, "d": 2} | | 8 | {"1 2 ","3 "} | {{1,2},{3,4}} | (aaa,123,) | t
(1 row)
select *, c is null as c_is_null
@@ -2017,6 +2363,71 @@ from jsonb_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::jsonb)
1 | {"c": 16, "d": 2} | | 8 | t
(1 row)
+select * from jsonb_to_record('{"ia": null}') as x(ia _int4);
+ ia
+----
+
+(1 row)
+
+select * from jsonb_to_record('{"ia": 123}') as x(ia _int4);
+ERROR: expected json array in value of key "ia"
+select * from jsonb_to_record('{"ia": [1, "2", null, 4]}') as x(ia _int4);
+ ia
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+select * from jsonb_to_record('{"ia": [[1, 2], [3, 4]]}') as x(ia _int4);
+ ia
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+select * from jsonb_to_record('{"ia": [[1], 2]}') as x(ia _int4);
+ERROR: expected json array in array element [1] of key "ia"
+select * from jsonb_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
+ERROR: malformed json array: "[[1], [2, 3]]"
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+select * from jsonb_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
+ ia2
+---------
+ {1,2,3}
+(1 row)
+
+select * from jsonb_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
+ ia2
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+select * from jsonb_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
+ ia2
+-----------------
+ {{{1},{2},{3}}}
+(1 row)
+
+-- test type info caching in jsonb_populate_record()
+CREATE TEMP TABLE jsbpoptest (js jsonb);
+INSERT INTO jsbpoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::jsonb
+FROM generate_series(1, 3);
+SELECT (jsonb_populate_record(NULL::jsbrec, js)).* FROM jsbpoptest;
+ i | ia | ia1 | ia2 | ia3 | ia1d | ia2d | t | ta | c | ca | ts | js | jsb | jsa | rec | reca
+---+----+-----+-----+-----+------+------+---+----+---+----+----+----+-----+--------------------+-----------------------------------+--------------------------------------------------------
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(3 rows)
+
+DROP TYPE jsbrec;
+DROP TYPE jsbrec_i_not_null;
+DROP DOMAIN jsb_int_not_null;
+DROP DOMAIN jsb_int_array_1d;
+DROP DOMAIN jsb_int_array_2d;
-- indexing
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":null}';
count
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index 603288b..e894199 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -378,6 +378,34 @@ select * from json_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8
-- populate_record
create type jpop as (a text, b int, c timestamp);
+CREATE DOMAIN js_int_not_null AS int NOT NULL;
+CREATE DOMAIN js_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN js_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+
+CREATE TYPE jsrec AS (
+ i int,
+ ia _int4,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d js_int_array_1d,
+ ia2d js_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jpop,
+ reca jpop[]
+);
+
+CREATE TYPE jsrec_i_not_null AS (
+ i js_int_not_null
+);
+
select * from json_populate_record(null::jpop,'{"a":"blurfl","x":43.2}') q;
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":"blurfl","x":43.2}') q;
@@ -388,6 +416,96 @@ select * from json_populate_record(null::jpop,'{"a":[100,200,false],"x":43.2}')
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":[100,200,false],"x":43.2}') q;
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"c":[100,200,false],"x":43.2}') q;
+select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{}') q;
+
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"x": 43.2}') q;
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": null}') q;
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": 12345}') q;
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": null}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": 123}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [1, "2", null, 4]}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1, 2], [3, 4]]}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1], 2]}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1], [2, 3]]}') q;
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": null}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": 123}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [1, "2", null, 4]}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [[1, 2, 3]]}') q;
+
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": null}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": 123}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null, 4]}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null]}') q;
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [1, "2", null, 4]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[], []]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [3]]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], 3, 4]}') q;
+
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [1, "2", null, 4]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": null}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": 123}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [1, "2", null, 4]}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": null}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaa"}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaa"}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaaaaa"}') q;
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": null}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": 123}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [1, "2", null, 4]}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": null}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": true}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": 123.45}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "123.45"}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "abc"}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": null}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": true}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": 123.45}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "123.45"}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "abc"}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": null}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": 123}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": [1, "2", null, 4]}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": 123}') q;
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": [1, 2]}') q;
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": 123}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [1, 2]}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+
+SELECT rec FROM json_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jpop,NULL)::jsrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+
-- populate_recordset
select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
@@ -404,6 +522,25 @@ select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
+-- test type info caching in json_populate_record()
+CREATE TEMP TABLE jspoptest (js json);
+
+INSERT INTO jspoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::json
+FROM generate_series(1, 3);
+
+SELECT (json_populate_record(NULL::jsrec, js)).* FROM jspoptest;
+
+DROP TYPE jsrec;
+DROP TYPE jsrec_i_not_null;
+DROP DOMAIN js_int_not_null;
+DROP DOMAIN js_int_array_1d;
+DROP DOMAIN js_int_array_2d;
+
--json_typeof() function
select value, json_typeof(value)
from (values (json '123.4'),
@@ -520,13 +657,24 @@ select * from json_to_recordset('[{"a":1,"b":{"d":"foo"},"c":true},{"a":2,"c":fa
as x(a int, b json, c boolean);
select *, c is null as c_is_null
-from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::json)
- as t(a int, b json, c text, x int);
+from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::json)
+ as t(a int, b json, c text, x int, ca char(5)[], ia int[][], r jpop);
select *, c is null as c_is_null
from json_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::json)
as t(a int, b json, c text, x int);
+select * from json_to_record('{"ia": null}') as x(ia _int4);
+select * from json_to_record('{"ia": 123}') as x(ia _int4);
+select * from json_to_record('{"ia": [1, "2", null, 4]}') as x(ia _int4);
+select * from json_to_record('{"ia": [[1, 2], [3, 4]]}') as x(ia _int4);
+select * from json_to_record('{"ia": [[1], 2]}') as x(ia _int4);
+select * from json_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
+
+select * from json_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
+select * from json_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
+select * from json_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
+
-- json_strip_nulls
select json_strip_nulls(null);
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
index 6b4c796..b429fc8 100644
--- a/src/test/regress/sql/jsonb.sql
+++ b/src/test/regress/sql/jsonb.sql
@@ -478,6 +478,34 @@ SELECT * FROM jsonb_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,
-- populate_record
CREATE TYPE jbpop AS (a text, b int, c timestamp);
+CREATE DOMAIN jsb_int_not_null AS int NOT NULL;
+CREATE DOMAIN jsb_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN jsb_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+
+CREATE TYPE jsbrec AS (
+ i int,
+ ia _int4,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d jsb_int_array_1d,
+ ia2d jsb_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jbpop,
+ reca jbpop[]
+);
+
+CREATE TYPE jsbrec_i_not_null AS (
+ i jsb_int_not_null
+);
+
SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":"blurfl","x":43.2}') q;
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":"blurfl","x":43.2}') q;
@@ -488,6 +516,96 @@ SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":[100,200,false],"x":43.2}'
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":[100,200,false],"x":43.2}') q;
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"c":[100,200,false],"x":43.2}') q;
+SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop, '{}') q;
+
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"x": 43.2}') q;
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": null}') q;
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": 12345}') q;
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": null}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": 123}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [1, "2", null, 4]}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1, 2], [3, 4]]}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1], 2]}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1], [2, 3]]}') q;
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": null}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": 123}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [1, "2", null, 4]}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [[1, 2, 3]]}') q;
+
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": null}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": 123}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null, 4]}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null]}') q;
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [1, "2", null, 4]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[], []]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [3]]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], 3, 4]}') q;
+
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [1, "2", null, 4]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": null}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": 123}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [1, "2", null, 4]}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": null}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaa"}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaa"}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaaaaa"}') q;
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": null}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": 123}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [1, "2", null, 4]}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": null}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": true}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": 123.45}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "123.45"}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "abc"}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": null}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": true}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": 123.45}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "123.45"}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "abc"}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": null}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": 123}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": [1, "2", null, 4]}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": 123}') q;
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": [1, 2]}') q;
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": 123}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [1, 2]}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+
+SELECT rec FROM jsonb_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jbpop,NULL)::jsbrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+
-- populate_recordset
SELECT * FROM jsonb_populate_recordset(NULL::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
SELECT * FROM jsonb_populate_recordset(row('def',99,NULL)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
@@ -509,13 +627,43 @@ select * from jsonb_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar",
as x(a int, b text, c boolean);
select *, c is null as c_is_null
-from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::jsonb)
- as t(a int, b jsonb, c text, x int);
+from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::jsonb)
+ as t(a int, b jsonb, c text, x int, ca char(5)[], ia int[][], r jbpop);
select *, c is null as c_is_null
from jsonb_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::jsonb)
as t(a int, b jsonb, c text, x int);
+select * from jsonb_to_record('{"ia": null}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": 123}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": [1, "2", null, 4]}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": [[1, 2], [3, 4]]}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": [[1], 2]}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
+
+select * from jsonb_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
+select * from jsonb_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
+select * from jsonb_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
+
+-- test type info caching in jsonb_populate_record()
+CREATE TEMP TABLE jsbpoptest (js jsonb);
+
+INSERT INTO jsbpoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::jsonb
+FROM generate_series(1, 3);
+
+SELECT (jsonb_populate_record(NULL::jsbrec, js)).* FROM jsbpoptest;
+
+DROP TYPE jsbrec;
+DROP TYPE jsbrec_i_not_null;
+DROP DOMAIN jsb_int_not_null;
+DROP DOMAIN jsb_int_array_1d;
+DROP DOMAIN jsb_int_array_2d;
+
-- indexing
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":null}';
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":"CC"}';
0002_assign_ndims_to_record_function_result_types_v03.patchtext/x-patch; name=0002_assign_ndims_to_record_function_result_types_v03.patchDownload
diff --git a/src/backend/access/common/tupdesc.c b/src/backend/access/common/tupdesc.c
index b56d0e3..60510c3 100644
--- a/src/backend/access/common/tupdesc.c
+++ b/src/backend/access/common/tupdesc.c
@@ -673,7 +673,7 @@ BuildDescForRelation(List *schema)
* BuildDescFromLists
*
* Build a TupleDesc given lists of column names (as String nodes),
- * column type OIDs, typmods, and collation OIDs.
+ * column type OIDs, typmods, collation OIDs and ndims.
*
* No constraints are generated.
*
@@ -681,7 +681,8 @@ BuildDescForRelation(List *schema)
* with functions returning RECORD.
*/
TupleDesc
-BuildDescFromLists(List *names, List *types, List *typmods, List *collations)
+BuildDescFromLists(List *names, List *types, List *typmods, List *collations,
+ List *ndims)
{
int natts;
AttrNumber attnum;
@@ -689,12 +690,14 @@ BuildDescFromLists(List *names, List *types, List *typmods, List *collations)
ListCell *l2;
ListCell *l3;
ListCell *l4;
+ ListCell *l5;
TupleDesc desc;
natts = list_length(names);
Assert(natts == list_length(types));
Assert(natts == list_length(typmods));
Assert(natts == list_length(collations));
+ Assert(natts == list_length(ndims));
/*
* allocate a new tuple descriptor
@@ -706,12 +709,15 @@ BuildDescFromLists(List *names, List *types, List *typmods, List *collations)
l2 = list_head(types);
l3 = list_head(typmods);
l4 = list_head(collations);
+ l5 = list_head(ndims);
+
foreach(l1, names)
{
char *attname = strVal(lfirst(l1));
Oid atttypid;
int32 atttypmod;
Oid attcollation;
+ int32 attndims;
atttypid = lfirst_oid(l2);
l2 = lnext(l2);
@@ -719,10 +725,12 @@ BuildDescFromLists(List *names, List *types, List *typmods, List *collations)
l3 = lnext(l3);
attcollation = lfirst_oid(l4);
l4 = lnext(l4);
+ attndims = lfirst_oid(l5);
+ l5 = lnext(l5);
attnum++;
- TupleDescInitEntry(desc, attnum, attname, atttypid, atttypmod, 0);
+ TupleDescInitEntry(desc, attnum, attname, atttypid, atttypmod, attndims);
TupleDescInitEntryCollation(desc, attnum, attcollation);
}
diff --git a/src/backend/executor/nodeFunctionscan.c b/src/backend/executor/nodeFunctionscan.c
index 5a0f324..6d93cb3 100644
--- a/src/backend/executor/nodeFunctionscan.c
+++ b/src/backend/executor/nodeFunctionscan.c
@@ -410,7 +410,8 @@ ExecInitFunctionScan(FunctionScan *node, EState *estate, int eflags)
tupdesc = BuildDescFromLists(rtfunc->funccolnames,
rtfunc->funccoltypes,
rtfunc->funccoltypmods,
- rtfunc->funccolcollations);
+ rtfunc->funccolcollations,
+ rtfunc->funccolndims);
/*
* For RECORD results, make sure a typmod has been assigned. (The
diff --git a/src/backend/nodes/copyfuncs.c b/src/backend/nodes/copyfuncs.c
index 6955298..6ca0c91 100644
--- a/src/backend/nodes/copyfuncs.c
+++ b/src/backend/nodes/copyfuncs.c
@@ -2181,6 +2181,7 @@ _copyRangeTblFunction(const RangeTblFunction *from)
COPY_NODE_FIELD(funccoltypes);
COPY_NODE_FIELD(funccoltypmods);
COPY_NODE_FIELD(funccolcollations);
+ COPY_NODE_FIELD(funccolndims);
COPY_BITMAPSET_FIELD(funcparams);
return newnode;
diff --git a/src/backend/nodes/equalfuncs.c b/src/backend/nodes/equalfuncs.c
index 548a2aa..b0e0851 100644
--- a/src/backend/nodes/equalfuncs.c
+++ b/src/backend/nodes/equalfuncs.c
@@ -2490,6 +2490,7 @@ _equalRangeTblFunction(const RangeTblFunction *a, const RangeTblFunction *b)
COMPARE_NODE_FIELD(funccoltypes);
COMPARE_NODE_FIELD(funccoltypmods);
COMPARE_NODE_FIELD(funccolcollations);
+ COMPARE_NODE_FIELD(funccolndims);
COMPARE_BITMAPSET_FIELD(funcparams);
return true;
diff --git a/src/backend/nodes/outfuncs.c b/src/backend/nodes/outfuncs.c
index 9fe9873..3b9439c 100644
--- a/src/backend/nodes/outfuncs.c
+++ b/src/backend/nodes/outfuncs.c
@@ -2880,6 +2880,7 @@ _outRangeTblFunction(StringInfo str, const RangeTblFunction *node)
WRITE_NODE_FIELD(funccoltypes);
WRITE_NODE_FIELD(funccoltypmods);
WRITE_NODE_FIELD(funccolcollations);
+ WRITE_NODE_FIELD(funccolndims);
WRITE_BITMAPSET_FIELD(funcparams);
}
diff --git a/src/backend/nodes/readfuncs.c b/src/backend/nodes/readfuncs.c
index 63f6336..4525298 100644
--- a/src/backend/nodes/readfuncs.c
+++ b/src/backend/nodes/readfuncs.c
@@ -1359,6 +1359,7 @@ _readRangeTblFunction(void)
READ_NODE_FIELD(funccoltypes);
READ_NODE_FIELD(funccoltypmods);
READ_NODE_FIELD(funccolcollations);
+ READ_NODE_FIELD(funccolndims);
READ_BITMAPSET_FIELD(funcparams);
READ_DONE();
diff --git a/src/backend/parser/parse_relation.c b/src/backend/parser/parse_relation.c
index 58f7050..19f5fe2 100644
--- a/src/backend/parser/parse_relation.c
+++ b/src/backend/parser/parse_relation.c
@@ -1444,6 +1444,7 @@ addRangeTableEntryForFunction(ParseState *pstate,
rtfunc->funccoltypes = NIL;
rtfunc->funccoltypmods = NIL;
rtfunc->funccolcollations = NIL;
+ rtfunc->funccolndims = NIL;
rtfunc->funcparams = NULL; /* not set until planning */
/*
@@ -1509,6 +1510,7 @@ addRangeTableEntryForFunction(ParseState *pstate,
Oid attrtype;
int32 attrtypmod;
Oid attrcollation;
+ int32 attrndims;
attrname = n->colname;
if (n->typeName->setof)
@@ -1520,12 +1522,13 @@ addRangeTableEntryForFunction(ParseState *pstate,
typenameTypeIdAndMod(pstate, n->typeName,
&attrtype, &attrtypmod);
attrcollation = GetColumnDefCollation(pstate, n, attrtype);
+ attrndims = list_length(n->typeName->arrayBounds);
TupleDescInitEntry(tupdesc,
(AttrNumber) i,
attrname,
attrtype,
attrtypmod,
- 0);
+ attrndims);
TupleDescInitEntryCollation(tupdesc,
(AttrNumber) i,
attrcollation);
@@ -1537,6 +1540,8 @@ addRangeTableEntryForFunction(ParseState *pstate,
attrtypmod);
rtfunc->funccolcollations = lappend_oid(rtfunc->funccolcollations,
attrcollation);
+ rtfunc->funccolndims = lappend_int(rtfunc->funccolndims,
+ attrndims);
i++;
}
diff --git a/src/include/access/tupdesc.h b/src/include/access/tupdesc.h
index de18f74..9ed44fd 100644
--- a/src/include/access/tupdesc.h
+++ b/src/include/access/tupdesc.h
@@ -125,6 +125,7 @@ extern void TupleDescInitEntryCollation(TupleDesc desc,
extern TupleDesc BuildDescForRelation(List *schema);
-extern TupleDesc BuildDescFromLists(List *names, List *types, List *typmods, List *collations);
+extern TupleDesc BuildDescFromLists(List *names, List *types, List *typmods,
+ List *collations, List *ndims);
#endif /* TUPDESC_H */
diff --git a/src/include/nodes/parsenodes.h b/src/include/nodes/parsenodes.h
index fc532fb..d27db6e 100644
--- a/src/include/nodes/parsenodes.h
+++ b/src/include/nodes/parsenodes.h
@@ -969,7 +969,8 @@ typedef struct RangeTblEntry
* If the function had a column definition list (required for an
* otherwise-unspecified RECORD result), funccolnames lists the names given
* in the definition list, funccoltypes lists their declared column types,
- * funccoltypmods lists their typmods, funccolcollations their collations.
+ * funccoltypmods lists their typmods, funccolcollations their collations,
+ * funccolndims lists their numbers of array dimensions.
* Otherwise, those fields are NIL.
*
* Notice we don't attempt to store info about the results of functions
@@ -989,6 +990,7 @@ typedef struct RangeTblFunction
List *funccoltypes; /* OID list of column type OIDs */
List *funccoltypmods; /* integer list of column typmods */
List *funccolcollations; /* OID list of column collation OIDs */
+ List *funccolndims; /* integer list of column array ndims */
/* This is set during planning for use by the executor: */
Bitmapset *funcparams; /* PARAM_EXEC Param IDs affecting this func */
} RangeTblFunction;
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index a7de8ef..696aa9c 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -2009,11 +2009,7 @@ select * from json_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
ERROR: malformed json array: "[[1], [2, 3]]"
DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
select * from json_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
- ia2
----------
- {1,2,3}
-(1 row)
-
+ERROR: expected json array in array element [0] of key "ia2"
select * from json_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
ia2
---------------
@@ -2021,11 +2017,7 @@ select * from json_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
(1 row)
select * from json_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
- ia2
------------------
- {{{1},{2},{3}}}
-(1 row)
-
+ERROR: invalid input syntax for integer: "[1]"
-- json_strip_nulls
select json_strip_nulls(null);
json_strip_nulls
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
index 758adb2..28c994f 100644
--- a/src/test/regress/expected/jsonb.out
+++ b/src/test/regress/expected/jsonb.out
@@ -2389,11 +2389,7 @@ select * from jsonb_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
ERROR: malformed json array: "[[1], [2, 3]]"
DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
select * from jsonb_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
- ia2
----------
- {1,2,3}
-(1 row)
-
+ERROR: expected json array in array element [0] of key "ia2"
select * from jsonb_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
ia2
---------------
@@ -2401,11 +2397,7 @@ select * from jsonb_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
(1 row)
select * from jsonb_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
- ia2
------------------
- {{{1},{2},{3}}}
-(1 row)
-
+ERROR: invalid input syntax for integer: "[1]"
-- test type info caching in jsonb_populate_record()
CREATE TEMP TABLE jsbpoptest (js jsonb);
INSERT INTO jsbpoptest
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
On 01/08/2017 09:52 PM, Tom Lane wrote:
... attndims is really syntactic sugar only and doesn't affect anything
meaningful semantically.
I have fixed the first patch: when the number of dimensionsis unknown
we determine it simply by the number of successive opening brackets at
the start of a json array. But I'm still using for verification non-zero
ndims values that can be get from composite type attribute (attndims) or
from domain array type (typndims) through specially added function
get_type_ndims().
Apparently I didn't make my point forcefully enough. attndims is not
semantically significant in Postgres; the fact that we even store it
is just a historical artifact, I think. There might be an argument
to start enforcing it, but we'd have to do that across the board, and
I think most likely any such proposal would fail because of backwards
compatibility concerns. (There would also be a lot of discussion as
to whether, say, "int foo[3]" shouldn't enforce that the array length
is 3, not just that it be one-dimensional.) In the meantime, we are
*not* going to have attndims be semantically significant in just this one
function. That would not satisfy the principle of least astonishment.
Therefore, please remove this patch's dependence on attndims.
I looked through the patch briefly and have some other comments:
1. It's pretty bizarre that you need dummy forward struct declarations
for ColumnIOData and RecordIOData. The reason evidently is that somebody
ignored project style and put a bunch of typedefs after the local function
declarations in jsonfuncs.c. Project style of course is to put the
typedefs first, precisely because they may be needed in the local function
declarations. I will go move those declarations right now, as a single-
purpose patch, so that you have something a bit cleaner to modify.
2. The business with isstring, saved_scalar_is_string, etc makes me itch.
I'm having a hard time explaining exactly why, but it just seems like a
single-purpose kluge. Maybe it would seem less so if you saved the
original JsonTokenType instead. But also I'm wondering why the ultimate
consumers are concerned with string-ness as such. It seems like mainly
what they're trying to do is forbid converting a string into a non-scalar
Postgres type, but (a) why, and (b) if you are going to restrict it,
wouldn't it be more sensible to frame it as you can't convert any JSON
scalar to a non-scalar type? As to (a), it seems like you're just
introducing unnecessary compatibility breakage if you insist on that.
As to (b), really it seems like an appropriate restriction of that sort
would be like "if target type is composite, source must be a JSON object,
and if target type is array, source must be a JSON array". Personally
I think what you ought to do here is not change the semantics of cases
that are allowed today, but only change the semantics in the cases of
JSON object being assigned to PG composite and JSON array being assigned
to PG array; both of those fail today, so there's nobody depending on the
existing behavior. But if you reject string-to-composite cases then you
will be breaking existing apps, and I doubt people will thank you for it.
3. I think you'd be better off declaring ColumnIOData.type_category as an
actual enum type, not "char" with some specific values documented only
by a comment. Also, could you fold the domain case in as a fourth
type_category? Also, why does ColumnIOData have both an ndims field and
another one buried in io.array.ndims?
4. populate_array_report_expected_array() violates our error message
guidelines, first by using elog rather than ereport for a user-facing
error, and second by assembling the message from pieces, which would
make it untranslatable even if it were being reported through ereport.
I'm not sure if this needs to be fixed though; will the function even
still be needed once you remove the attndims dependency? Even if there
are still callers, you wouldn't necessarily need such a function if
you scale back your ambitions a bit as to the amount of detail required.
I'm not sure you really need to report what you think the dimensions
are when complaining that an array is nonrectangular.
5. The business with having some arguments that are only for json and
others that are only for jsonb, eg in populate_scalar(), also makes me
itch. I wonder whether this wouldn't all get a lot simpler and more
readable if we gave up on trying to share code between the two cases.
In populate_scalar(), for instance, the amount of code actually shared
between the two cases amounts to a whole two lines, which are dwarfed
by the amount of crud needed to deal with trying to serve both cases
in the one function. There are places where there's more sharable
code than that, but it seems like a better design might be to refactor
the sharable code out into subroutines called by separate functions for
json and jsonb.
6. I'm not too excited by your having invented JsonContainerSize,
JsonContainerIsObject, etc, and then using them in just this new
code. That is not really improving readability, it's just creating
stylistic inconsistency between these functions and the rest of the
jsonb code. If you want such macros I think it would be better to
submit a separate cosmetic patch that tries to hide such bit-tests
behind macros throughout the jsonb code. Another problem is that
these macros are coding hazards because they look like they yield bool
values but they don't really; assigning the results to bool variables,
for example, would fail on most platforms. Safer coding for a
general-purpose macro would be like
#define JsonContainerIsObject(jc) (((jc)->header & JB_FOBJECT) != 0)
7. More generally, the patch is hard to review because it whacks the
existing code around so thoroughly that it's tough even to match up
old and new code to get a handle on what you changed functionally.
Maybe it would be good if you could separate it into a refactoring
patch that just moves existing code around without functional changes,
and then a patch on top of that that adds code and makes only localized
changes in what's already there.
regards, tom lane
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On 22.01.2017 21:58, Tom Lane wrote:
In the meantime, we are *not* going to have attndims be semantically
significant in just this one function. Therefore, please remove this patch's
dependence on attndims.
Ok, I've removed patch's dependence on attndims. But I still believe that
someday PostgreSQL's type system will be fixed.
I looked through the patch briefly and have some other comments:
Thank you very much for your review.
1. It's pretty bizarre that you need dummy forward struct declarations
for ColumnIOData and RecordIOData. The reason evidently is that somebody
ignored project style and put a bunch of typedefs after the local function
declarations in jsonfuncs.c. Project style of course is to put the
typedefs first, precisely because they may be needed in the local function
declarations. I will go move those declarations right now, as a single-
purpose patch, so that you have something a bit cleaner to modify.
These forward struct declarations were moved to the type declaration section.
2. The business with isstring, saved_scalar_is_string, etc makes me itch.
I'm having a hard time explaining exactly why, but it just seems like a
single-purpose kluge. Maybe it would seem less so if you saved the
original JsonTokenType instead.
Original JsonTokenType is saved now. Also "isnull" fields of several structures
were replaced by it.
But also I'm wondering why the ultimate consumers are concerned with
string-ness as such.
saved_scalar_is_string was necessary for the case when json string is converted
to json/jsonb type. json lexer returns strings with stripped quotes and we
must recover them before passing to json_in() or jsonb_in(). There were
examples for this case in my first message:
[master]=# select * from json_to_record('{"js": "a"}') as rec(js json);
ERROR: invalid input syntax for type json
DETAIL: Token "a" is invalid.
CONTEXT: JSON data, line 1: a
[master]=# select * from json_to_record('{"js": "true"}') as rec(js json);
js
------
true
[patched]=# select * from json_to_record('{"js": "a"}') as rec(js json);
js
-----
"a"
[patched]=# select * from json_to_record('{"js": "true"}') as rec(js json);
js
--------
"true"
It seems like mainly what they're trying to do is forbid converting a string
into a non-scalar Postgres type, but (a) why, and (b) if you are going to
restrict it, wouldn't it be more sensible to frame it as you can't convert any
JSON scalar to a non-scalar type? As to (a), it seems like you're just
introducing unnecessary compatibility breakage if you insist on that.
As to (b), really it seems like an appropriate restriction of that sort
would be like "if target type is composite, source must be a JSON object,
and if target type is array, source must be a JSON array". Personally
I think what you ought to do here is not change the semantics of cases
that are allowed today, but only change the semantics in the cases of
JSON object being assigned to PG composite and JSON array being assigned
to PG array; both of those fail today, so there's nobody depending on the
existing behavior. But if you reject string-to-composite cases then you
will be breaking existing apps, and I doubt people will thank you for it.
I've removed compatibility-breaking restrictions and now string-to-non-scalar
conversions through the input function are allowed. Also I've added
corresponding regression test cases.
3. I think you'd be better off declaring ColumnIOData.type_category as an
actual enum type, not "char" with some specific values documented only
by a comment. Also, could you fold the domain case in as a fourth
type_category?
I've introduced enum type TypeCat for type categories with domain category as
its fourth member. (TypeCategory and JsonTypeCategory names conflict with
existing names, you might offer a better name.)
Also, why does ColumnIOData have both an ndims field and another one buried
in io.array.ndims?
Now there are no ndims fields at all, but earlier their values could differ:
ColumnIOData.ndims was typically copied from attndims, but ArrayIOData.ndims
could be copied from typndims for domain types.
4. populate_array_report_expected_array() violates our error message
guidelines, first by using elog rather than ereport for a user-facing
error, and second by assembling the message from pieces, which would
make it untranslatable even if it were being reported through ereport.
I'm not sure if this needs to be fixed though; will the function even
still be needed once you remove the attndims dependency? Even if there
are still callers, you wouldn't necessarily need such a function if
you scale back your ambitions a bit as to the amount of detail required.
I'm not sure you really need to report what you think the dimensions
are when complaining that an array is nonrectangular.
It was my mistake that I was not familiar message-error guidelines. Now
ereport() is used and there is no message assembling, but I'm also not sure
that we need to report these details.
5. The business with having some arguments that are only for json and
others that are only for jsonb, eg in populate_scalar(), also makes me
itch.
I've refactored json/jsonb passing using new struct JsValue which contains
union for json/jsonb values.
I wonder whether this wouldn't all get a lot simpler and more
readable if we gave up on trying to share code between the two cases.
In populate_scalar(), for instance, the amount of code actually shared
between the two cases amounts to a whole two lines, which are dwarfed
by the amount of crud needed to deal with trying to serve both cases
in the one function. There are places where there's more sharable
code than that, but it seems like a better design might be to refactor
the sharable code out into subroutines called by separate functions for
json and jsonb.
Maybe two separate families of functions like this
a_json(common_args, json_args)
{
b(common_args);
c_json(json_args);
d(common_args);
}
a_jsonb(common_args, jsonb_args)
{
b(common_args);
c_jsonb(jsonb_args);
d(common_args);
}
could slightly improve readability, but such code duplication (I believe it is
a duplicate code) would never be acceptable to me.
I can only offer to extract two subroutines from from populate_scalar():
populate_scalar_json() and populate_scalar_jsonb(). I think InputFunctionCall()
here should have exactly the one call site because semantically there is only
the one such call per scalar, regardless of its type.
6. I'm not too excited by your having invented JsonContainerSize,
JsonContainerIsObject, etc, and then using them in just this new
code. That is not really improving readability, it's just creating
stylistic inconsistency between these functions and the rest of the
jsonb code.
These new macros were introduced because existing JB_XXX() macros work with
Jsonb struct and there were no analogous macros for JsonbContainer struct.
They were not invented specifically for this patch: they are the result of the
deep refactoring of json/jsonb code which was made in the process of working on
jsonb compression (I'm going to present this work here soon). This refactoring
allows us to use a single generic interface to json, jsonb and jsonbc (new
compressed format) types using ExpandedObject representation, but direct access
to JsonbContainer fields becomes illegal. So I'am trying not to create new
direct references to JsonbContainer fields. Also I could offer to rename these
macros to JBC_XXX() or JB_CONTAINER_XXX() but it would lead to unnecessary
conflicts.
If you want such macros I think it would be better to submit a separate
cosmetic patch that tries to hide such bit-tests behind macros throughout
the jsonb code.
I've attached that patch, but not all the bit-tests were hidden: some of them
in jsonb_util.c still remain valid after upcoming refactoring because they
don't belong to generic code (there might be better to use JBC_XXX() macros).
Another problem is that these macros are coding hazards because they look like
they yield bool values but they don't really; assigning the results to bool
variables, for example, would fail on most platforms. Safer coding for a
general-purpose macro would be like#define JsonContainerIsObject(jc) (((jc)->header & JB_FOBJECT) != 0)
Sorry for this obvious mistake. But macros JB_ROOT_IS_XXX() also contain the
same hazard.
7. More generally, the patch is hard to review because it whacks the
existing code around so thoroughly that it's tough even to match up
old and new code to get a handle on what you changed functionally.
Maybe it would be good if you could separate it into a refactoring
patch that just moves existing code around without functional changes,
and then a patch on top of that that adds code and makes only localized
changes in what's already there.
I've split this patch into two patches as you asked.
--
Nikita Glukhov
Postgres Professional: http://www.postgrespro.com
The Russian Postgres Company
Attachments:
0001-introduce-JsonContainerXxx-macros-v04.patchtext/x-patch; name=0001-introduce-JsonContainerXxx-macros-v04.patchDownload
diff --git a/src/backend/utils/adt/jsonb_util.c b/src/backend/utils/adt/jsonb_util.c
index 11a1395..64fb1c9 100644
--- a/src/backend/utils/adt/jsonb_util.c
+++ b/src/backend/utils/adt/jsonb_util.c
@@ -328,7 +328,7 @@ findJsonbValueFromContainer(JsonbContainer *container, uint32 flags,
JsonbValue *key)
{
JEntry *children = container->children;
- int count = (container->header & JB_CMASK);
+ int count = JsonContainerSize(container);
JsonbValue *result;
Assert((flags & ~(JB_FARRAY | JB_FOBJECT)) == 0);
@@ -339,7 +339,7 @@ findJsonbValueFromContainer(JsonbContainer *container, uint32 flags,
result = palloc(sizeof(JsonbValue));
- if (flags & JB_FARRAY & container->header)
+ if ((flags & JB_FARRAY) && JsonContainerIsArray(container))
{
char *base_addr = (char *) (children + count);
uint32 offset = 0;
@@ -358,7 +358,7 @@ findJsonbValueFromContainer(JsonbContainer *container, uint32 flags,
JBE_ADVANCE_OFFSET(offset, children[i]);
}
}
- else if (flags & JB_FOBJECT & container->header)
+ else if ((flags & JB_FOBJECT) && JsonContainerIsObject(container))
{
/* Since this is an object, account for *Pairs* of Jentrys */
char *base_addr = (char *) (children + count * 2);
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index a75df62..6a7aab2 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -1266,10 +1266,10 @@ get_jsonb_path_all(FunctionCallInfo fcinfo, bool as_text)
uint32 nelements;
/* Container must be array, but make sure */
- if ((container->header & JB_FARRAY) == 0)
+ if (!JsonContainerIsArray(container))
elog(ERROR, "not a jsonb array");
- nelements = container->header & JB_CMASK;
+ nelements = JsonContainerSize(container);
if (-lindex > nelements)
PG_RETURN_NULL();
diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h
index e402b9e..9bbefa8 100644
--- a/src/include/utils/jsonb.h
+++ b/src/include/utils/jsonb.h
@@ -218,6 +218,12 @@ typedef struct
#define JB_ROOT_IS_OBJECT(jbp_) ( *(uint32*) VARDATA(jbp_) & JB_FOBJECT)
#define JB_ROOT_IS_ARRAY(jbp_) ( *(uint32*) VARDATA(jbp_) & JB_FARRAY)
+#define JsonContainerSize(jc) ((jc)->header & JB_CMASK)
+#define JsonContainerIsEmpty(jc) (JsonContainerSize(jc) == 0)
+#define JsonContainerIsObject(jc) (((jc)->header & JB_FOBJECT) != 0)
+#define JsonContainerIsArray(jc) (((jc)->header & JB_FARRAY) != 0)
+#define JsonContainerIsScalar(jc) (((jc)->header & JB_FSCALAR) != 0)
+
enum jbvType
{
0002-refactor-json_populate_record-v04.patchtext/x-patch; name=0002-refactor-json_populate_record-v04.patchDownload
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index 6a7aab2..95f7e0a 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -109,6 +109,7 @@ typedef struct JhashState
HTAB *hash;
char *saved_scalar;
char *save_json_start;
+ JsonTokenType saved_token_type;
} JHashState;
/* hashtable element */
@@ -116,26 +117,49 @@ typedef struct JsonHashEntry
{
char fname[NAMEDATALEN]; /* hash key (MUST BE FIRST) */
char *val;
- char *json;
- bool isnull;
+ JsonTokenType type;
} JsonHashEntry;
-/* these two are stolen from hstore / record_out, used in populate_record* */
-typedef struct ColumnIOData
+/* structure to cache type I/O metadata needed for populate_scalar() */
+typedef struct ScalarIOData
{
- Oid column_type;
- Oid typiofunc;
Oid typioparam;
- FmgrInfo proc;
-} ColumnIOData;
+ FmgrInfo typiofunc;
+} ScalarIOData;
+
+typedef struct ColumnIOData ColumnIOData;
+typedef struct RecordIOData RecordIOData;
+
+/* structure to cache metadata needed for populate_composite() */
+typedef struct CompositeIOData
+{
+ /*
+ * We use pointer to a RecordIOData here because variable-length
+ * struct RecordIOData can't be used directly in ColumnIOData.io union
+ */
+ RecordIOData *record_io; /* metadata cache for populate_record() */
+ TupleDesc tupdesc; /* cached tuple descriptor */
+} CompositeIOData;
-typedef struct RecordIOData
+/* these two are stolen from hstore / record_out, used in populate_record* */
+
+/* structure to cache record metadata needed for populate_record_field() */
+struct ColumnIOData
+{
+ Oid typid; /* column type id */
+ int32 typmod; /* column type modifier */
+ ScalarIOData scalar_io; /* metadata cache for directi conversion
+ * through input function */
+};
+
+/* structure to cache record metadata needed for populate_record() */
+struct RecordIOData
{
Oid record_type;
int32 record_typmod;
int ncolumns;
ColumnIOData columns[FLEXIBLE_ARRAY_MEMBER];
-} RecordIOData;
+};
/* state for populate_recordset */
typedef struct PopulateRecordsetState
@@ -145,10 +169,11 @@ typedef struct PopulateRecordsetState
HTAB *json_hash;
char *saved_scalar;
char *save_json_start;
+ JsonTokenType saved_token_type;
Tuplestorestate *tuple_store;
TupleDesc ret_tdesc;
HeapTupleHeader rec;
- RecordIOData *my_extra;
+ RecordIOData **my_extra;
MemoryContext fn_mcxt; /* used to stash IO funcs */
} PopulateRecordsetState;
@@ -160,6 +185,24 @@ typedef struct StripnullState
bool skip_next_null;
} StripnullState;
+/* structure for generalized json/jsonb value passing */
+typedef struct JsValue
+{
+ bool is_json; /* json/jsonb */
+ union
+ {
+ struct
+ {
+ char *str; /* json string */
+ int len; /* json string length or -1 if null-terminated */
+ JsonTokenType type; /* json type */
+ } json; /* json value */
+
+ JsonbValue *jsonb; /* jsonb value */
+ } val;
+} JsValue;
+
+
/* semantic action functions for json_object_keys */
static void okeys_object_field_start(void *state, char *fname, bool isnull);
static void okeys_array_start(void *state);
@@ -211,7 +254,7 @@ static void elements_array_element_end(void *state, bool isnull);
static void elements_scalar(void *state, char *token, JsonTokenType tokentype);
/* turn a json object into a hash table */
-static HTAB *get_json_object_as_hash(text *json, const char *funcname);
+static HTAB *get_json_object_as_hash(char *json, int len, const char *funcname);
/* common worker for populate_record and to_record */
static Datum populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
@@ -241,10 +284,6 @@ static void sn_object_field_start(void *state, char *fname, bool isnull);
static void sn_array_element_start(void *state, bool isnull);
static void sn_scalar(void *state, char *token, JsonTokenType tokentype);
-/* Turn a jsonb object into a record */
-static void make_row_from_rec_and_jsonb(Jsonb *element,
- PopulateRecordsetState *state);
-
/* worker function for populate_recordset and to_recordset */
static Datum populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
bool have_record_arg);
@@ -271,6 +310,16 @@ static void setPathArray(JsonbIterator **it, Datum *path_elems,
int level, Jsonb *newval, uint32 nelems, int op_type);
static void addJsonbToParseState(JsonbParseState **jbps, Jsonb *jb);
+/* helper functions for populate_record[set] */
+static HeapTupleHeader
+populate_record(TupleDesc tupdesc, RecordIOData **record_info,
+ HeapTupleHeader template, MemoryContext mcxt,
+ bool is_json, HTAB *json_hash, JsonbContainer *cont);
+
+static Datum
+populate_record_field(ColumnIOData *col, Oid typid, int32 typmod,
+ const char *colname, MemoryContext mcxt,
+ Datum defaultval, JsValue *jsv, bool *isnull);
/*
* SQL function json_object_keys
@@ -2099,158 +2148,250 @@ json_to_record(PG_FUNCTION_ARGS)
return populate_record_worker(fcinfo, "json_to_record", false);
}
+/* populate recursively composite (row type) value from json/jsonb */
static Datum
-populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
- bool have_record_arg)
+populate_composite(CompositeIOData *io, /* metadata cache */
+ Oid typid, /* row type id */
+ int32 typmod, /* row type modifier */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ HeapTupleHeader defaultval, /* default row value, if any */
+ JsValue *jsv) /* json/jsonb object */
{
- int json_arg_num = have_record_arg ? 1 : 0;
- Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
- text *json;
- Jsonb *jb = NULL;
- HTAB *json_hash = NULL;
- HeapTupleHeader rec = NULL;
- Oid tupType = InvalidOid;
- int32 tupTypmod = -1;
- TupleDesc tupdesc;
- HeapTupleData tuple;
- HeapTuple rettuple;
- RecordIOData *my_extra;
- int ncolumns;
- int i;
- Datum *values;
- bool *nulls;
-
- Assert(jtype == JSONOID || jtype == JSONBOID);
-
- if (have_record_arg)
+ HeapTupleHeader tuple;
+ HTAB *json_hash = NULL;
+ JsonbContainer *container = NULL;
+
+ /* acquire cached tuple descriptor */
+ if (!io->tupdesc ||
+ io->tupdesc->tdtypeid != typid ||
+ io->tupdesc->tdtypmod != typmod)
{
- Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
+ TupleDesc tupdesc = lookup_rowtype_tupdesc(typid, typmod);
+ MemoryContext oldcxt;
- if (!type_is_rowtype(argtype))
- ereport(ERROR,
- (errcode(ERRCODE_DATATYPE_MISMATCH),
- errmsg("first argument of %s must be a row type",
- funcname)));
+ if (io->tupdesc)
+ FreeTupleDesc(io->tupdesc);
- if (PG_ARGISNULL(0))
- {
- if (PG_ARGISNULL(1))
- PG_RETURN_NULL();
+ /* copy tuple desc without constraints into cache memory context */
+ oldcxt = MemoryContextSwitchTo(mcxt);
+ io->tupdesc = CreateTupleDescCopy(tupdesc);
+ MemoryContextSwitchTo(oldcxt);
- /*
- * have no tuple to look at, so the only source of type info is
- * the argtype. The lookup_rowtype_tupdesc call below will error
- * out if we don't have a known composite type oid here.
- */
- tupType = argtype;
- tupTypmod = -1;
- }
- else
- {
- rec = PG_GETARG_HEAPTUPLEHEADER(0);
-
- if (PG_ARGISNULL(1))
- PG_RETURN_POINTER(rec);
-
- /* Extract type info from the tuple itself */
- tupType = HeapTupleHeaderGetTypeId(rec);
- tupTypmod = HeapTupleHeaderGetTypMod(rec);
- }
+ ReleaseTupleDesc(tupdesc);
+ }
- tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+ /* prepare input value */
+ if (jsv->is_json)
+ {
+ /* convert plain-text json into a hash table */
+ json_hash = get_json_object_as_hash(jsv->val.json.str,
+ jsv->val.json.len >= 0
+ ? jsv->val.json.len
+ : strlen(jsv->val.json.str),
+ "populate_composite");
}
else
{
- /* json{b}_to_record case */
- if (PG_ARGISNULL(0))
- PG_RETURN_NULL();
+ JsonbValue *jbv = jsv->val.jsonb;
- if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
- ereport(ERROR,
- (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
- errmsg("function returning record called in context "
- "that cannot accept type record"),
- errhint("Try calling the function in the FROM clause "
- "using a column definition list.")));
+ if (jbv->type == jbvBinary &&
+ JsonContainerIsObject(jbv->val.binary.data))
+ container = jbv->val.binary.data;
+ else
+ container = NULL;
}
- if (jtype == JSONOID)
- {
- /* just get the text */
- json = PG_GETARG_TEXT_P(json_arg_num);
+ /* populate resulting record tuple */
+ tuple = populate_record(io->tupdesc, &io->record_io, defaultval, mcxt,
+ jsv->is_json, json_hash, container);
+
+ if (json_hash)
+ hash_destroy(json_hash);
+
+ return HeapTupleHeaderGetDatum(tuple);
+}
- json_hash = get_json_object_as_hash(json, funcname);
+/* populate non-null scalar value from json/jsonb value */
+static Datum
+populate_scalar(ScalarIOData *io, /* metadata cache */
+ Oid typid, /* scalar type id */
+ int32 typmod, /* scalar type modifier */
+ JsValue *jsv) /* json/jsonb value to convert */
+{
+ Datum res;
+ char *str;
+ char *json = NULL;
+ if (!jsv)
+ {
/*
- * if the input json is empty, we can only skip the rest if we were
- * passed in a non-null record, since otherwise there may be issues
- * with domain nulls.
+ * Need InputFunctionCall to happen even for NULLs, so that domain
+ * checks are done.
*/
- if (hash_get_num_entries(json_hash) == 0 && rec)
+ str = NULL;
+ }
+ else if (jsv->is_json)
+ {
+ /* already done the hard work in the json case */
+ int len = jsv->val.json.len;
+
+ json = jsv->val.json.str;
+ Assert(json);
+
+ if (len >= 0)
{
- hash_destroy(json_hash);
- ReleaseTupleDesc(tupdesc);
- PG_RETURN_POINTER(rec);
+ /* Need to copy non-null-terminated string */
+ str = palloc(len + 1 * sizeof(char));
+ memcpy(str, json, len);
+ str[len] = '\0';
}
+ else
+ str = json; /* null-terminated string */
}
else
{
- jb = PG_GETARG_JSONB(json_arg_num);
-
- /* same logic as for json */
- if (JB_ROOT_COUNT(jb) == 0 && rec)
+ JsonbValue *jbv = jsv->val.jsonb;
+
+ if (jbv->type == jbvString) /* quotes are stripped */
+ str = pnstrdup(jbv->val.string.val, jbv->val.string.len);
+ else if (jbv->type == jbvBool)
+ str = pnstrdup(jbv->val.boolean ? "t" : "f", 1);
+ else if (jbv->type == jbvNumeric)
+ str = DatumGetCString(DirectFunctionCall1(numeric_out,
+ PointerGetDatum(jbv->val.numeric)));
+ else if (jbv->type == jbvBinary)
+ str = JsonbToCString(NULL, jbv->val.binary.data,
+ jbv->val.binary.len);
+ else
{
- ReleaseTupleDesc(tupdesc);
- PG_RETURN_POINTER(rec);
+ elog(ERROR, "unrecognized jsonb type: %d", (int) jbv->type);
+ str = NULL;
}
}
- ncolumns = tupdesc->natts;
+ res = InputFunctionCall(&io->typiofunc, str, io->typioparam, typmod);
- if (rec)
- {
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = rec;
- }
+ /* free temporary buffer */
+ if (str != json)
+ pfree(str);
+
+ return res;
+}
+
+/* prepare column metadata cache for the given type */
+static void
+prepare_column_cache(ColumnIOData *column, /* metadata cache */
+ Oid typid, /* column type id */
+ int32 typmod, /* column type modifier */
+ MemoryContext mcxt, /* cache memory context */
+ bool json) /* json/jsonb */
+{
+ Oid typioproc;
+
+ column->typid = typid;
+ column->typmod = typmod;
+
+ getTypeInputInfo(typid, &typioproc, &column->scalar_io.typioparam);
+ fmgr_info_cxt(typioproc, &column->scalar_io.typiofunc, mcxt);
+}
+
+/* populate recursively a record field or an array element from json/jsonb value */
+static Datum
+populate_record_field(ColumnIOData *col, /* metadata cache */
+ Oid typid, /* field type id */
+ int32 typmod, /* field type modifier */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ Datum defaultval, /* default record value if any */
+ JsValue *jsv, /* json/jsonb value to convert */
+ bool *isnull) /* value is NULL */
+{
+ check_stack_depth();
+
+ /* prepare column metadata cache for the given type */
+ if (col->typid != typid || col->typmod != typmod)
+ prepare_column_cache(col, typid, typmod, mcxt, jsv->is_json);
+
+ *isnull = jsv->is_json ? jsv->val.json.str == NULL
+ : jsv->val.jsonb == NULL ||
+ jsv->val.jsonb->type == jbvNull;
+
+ return populate_scalar(&col->scalar_io, typid, typmod,
+ *isnull ? NULL : jsv);
+}
+
+static RecordIOData *
+allocate_record_info(MemoryContext mcxt, int ncolumns)
+{
+ RecordIOData *data = (RecordIOData *)
+ MemoryContextAlloc(mcxt,
+ offsetof(RecordIOData, columns) +
+ ncolumns * sizeof(ColumnIOData));
+
+ data->record_type = InvalidOid;
+ data->record_typmod = 0;
+ data->ncolumns = ncolumns;
+ MemSet(data->columns, 0, sizeof(ColumnIOData) * ncolumns);
+
+ return data;
+}
+
+/* populate record tuple from json/jsonb value */
+static HeapTupleHeader
+populate_record(TupleDesc tupdesc, /* record tuple descriptor */
+ RecordIOData **precord, /* metadata cache location */
+ HeapTupleHeader defaultval, /* default record value if any */
+ MemoryContext mcxt, /* cache memory context */
+ bool is_json, /* json/jsonb */
+ HTAB *json_hash, /* valid only if json */
+ JsonbContainer *cont) /* valid only if jsonb */
+{
+ RecordIOData *record = *precord;
+ Datum *values;
+ bool *nulls;
+ HeapTuple res;
+ int ncolumns = tupdesc->natts;
+ int i;
/*
- * We arrange to look up the needed I/O info just once per series of
- * calls, assuming the record type doesn't change underneath us.
+ * if the input json is empty, we can only skip the rest if we were
+ * passed in a non-null record, since otherwise there may be issues
+ * with domain nulls.
*/
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- if (my_extra == NULL ||
- my_extra->ncolumns != ncolumns)
- {
- fcinfo->flinfo->fn_extra =
- MemoryContextAlloc(fcinfo->flinfo->fn_mcxt,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- my_extra->record_type = InvalidOid;
- my_extra->record_typmod = 0;
- my_extra->ncolumns = ncolumns;
- MemSet(my_extra->columns, 0, sizeof(ColumnIOData) * ncolumns);
- }
-
- if (have_record_arg && (my_extra->record_type != tupType ||
- my_extra->record_typmod != tupTypmod))
+ if ((is_json ? hash_get_num_entries(json_hash) == 0
+ : !cont || JsonContainerIsEmpty(cont)) && defaultval)
+ return defaultval;
+
+ /* (re)allocate metadata cache */
+ if (record == NULL ||
+ record->ncolumns != ncolumns)
+ *precord = record = allocate_record_info(mcxt, ncolumns);
+
+ /* invalidate metadata cache if the record type has changed */
+ if (record->record_type != tupdesc->tdtypeid ||
+ record->record_typmod != tupdesc->tdtypmod)
{
- MemSet(my_extra, 0,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra->record_type = tupType;
- my_extra->record_typmod = tupTypmod;
- my_extra->ncolumns = ncolumns;
+ MemSet(record, 0, offsetof(RecordIOData, columns) +
+ ncolumns * sizeof(ColumnIOData));
+ record->record_type = tupdesc->tdtypeid;
+ record->record_typmod = tupdesc->tdtypmod;
+ record->ncolumns = ncolumns;
}
values = (Datum *) palloc(ncolumns * sizeof(Datum));
nulls = (bool *) palloc(ncolumns * sizeof(bool));
- if (rec)
+ if (defaultval)
{
+ HeapTupleData tuple;
+
+ /* Build a temporary HeapTuple control structure */
+ tuple.t_len = HeapTupleHeaderGetDatumLength(defaultval);
+ ItemPointerSetInvalid(&(tuple.t_self));
+ tuple.t_tableOid = InvalidOid;
+ tuple.t_data = defaultval;
+
/* Break down the tuple into fields */
heap_deform_tuple(&tuple, tupdesc, values, nulls);
}
@@ -2265,31 +2406,34 @@ populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
for (i = 0; i < ncolumns; ++i)
{
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- JsonbValue *v = NULL;
- JsonHashEntry *hashentry = NULL;
+ Form_pg_attribute att = tupdesc->attrs[i];
+ char *colname = NameStr(att->attname);
+ JsonHashEntry *hashentry = NULL;
+ JsValue jsv;
/* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
+ if (att->attisdropped)
{
nulls[i] = true;
continue;
}
- if (jtype == JSONOID)
- {
- hashentry = hash_search(json_hash,
- NameStr(tupdesc->attrs[i]->attname),
- HASH_FIND, NULL);
- }
- else
+ jsv.is_json = is_json;
+
+ if (is_json)
{
- char *key = NameStr(tupdesc->attrs[i]->attname);
+ hashentry = hash_search(json_hash, colname, HASH_FIND, NULL);
+
+ jsv.val.json.type = hashentry ? hashentry->type : JSON_TOKEN_NULL;
+ jsv.val.json.str = jsv.val.json.type == JSON_TOKEN_NULL ? NULL :
+ hashentry->val;
+ jsv.val.json.len = jsv.val.json.str ? -1 : 0; /* null-terminated */
- v = findJsonbValueFromContainerLen(&jb->root, JB_FOBJECT, key,
- strlen(key));
}
+ else
+ jsv.val.jsonb = !cont ? NULL :
+ findJsonbValueFromContainerLen(cont, JB_FOBJECT, colname,
+ strlen(colname));
/*
* we can't just skip here if the key wasn't found since we might have
@@ -2299,73 +2443,159 @@ populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
* then every field which we don't populate needs to be run through
* the input function just in case it's a domain type.
*/
- if (((jtype == JSONOID && hashentry == NULL) ||
- (jtype == JSONBOID && v == NULL)) && rec)
+ if ((is_json ? !jsv.val.json.str : !jsv.val.jsonb) && defaultval)
continue;
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
+ values[i] = populate_record_field(&record->columns[i],
+ att->atttypid,
+ att->atttypmod,
+ colname,
+ mcxt,
+ nulls[i] ? PointerGetDatum(NULL)
+ : values[i],
+ &jsv,
+ &nulls[i]);
+ }
+
+ res = heap_form_tuple(tupdesc, values, nulls);
+
+ pfree(values);
+ pfree(nulls);
+
+ return res->t_data;
+}
+
+/* structure to cache metadata needed for populate_record_worker() */
+typedef struct PopulateRecordCache
+{
+ Oid argtype; /* verified row type of the first argument */
+ CompositeIOData io; /* metadata cache for populate_composite() */
+} PopulateRecordCache;
+
+static Datum
+populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
+ bool have_record_arg)
+{
+ int json_arg_num = have_record_arg ? 1 : 0;
+ Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
+ JsValue jsv;
+ HeapTupleHeader rec = NULL;
+ Oid tupType;
+ int32 tupTypmod;
+ TupleDesc tupdesc = NULL;
+ Datum rettuple;
+ MemoryContext fnmcxt = fcinfo->flinfo->fn_mcxt;
+ PopulateRecordCache *cache = fcinfo->flinfo->fn_extra;
+
+ Assert(jtype == JSONOID || jtype == JSONBOID);
+
+ /*
+ * We arrange to look up the needed I/O info just once per series of
+ * calls, assuming the record type doesn't change underneath us.
+ */
+ if (!cache)
+ fcinfo->flinfo->fn_extra = cache =
+ MemoryContextAllocZero(fnmcxt, sizeof(*cache));
+
+ if (have_record_arg)
+ {
+ Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
+
+ if (cache->argtype != argtype)
{
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- fcinfo->flinfo->fn_mcxt);
- column_info->column_type = column_type;
+ if (!type_is_rowtype(argtype))
+ ereport(ERROR,
+ (errcode(ERRCODE_DATATYPE_MISMATCH),
+ errmsg("first argument of %s must be a row type",
+ funcname)));
+
+ cache->argtype = argtype;
}
- if ((jtype == JSONOID && (hashentry == NULL || hashentry->isnull)) ||
- (jtype == JSONBOID && (v == NULL || v->type == jbvNull)))
+
+ if (PG_ARGISNULL(0))
{
+ if (PG_ARGISNULL(1))
+ PG_RETURN_NULL();
+
/*
- * need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
+ * have no tuple to look at, so the only source of type info is
+ * the argtype. The lookup_rowtype_tupdesc call below will error
+ * out if we don't have a known composite type oid here.
*/
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
+ tupType = argtype;
+ tupTypmod = -1;
}
else
{
- char *s = NULL;
+ rec = PG_GETARG_HEAPTUPLEHEADER(0);
- if (jtype == JSONOID)
- {
- /* already done the hard work in the json case */
- s = hashentry->val;
- }
- else
- {
- if (v->type == jbvString)
- s = pnstrdup(v->val.string.val, v->val.string.len);
- else if (v->type == jbvBool)
- s = pnstrdup((v->val.boolean) ? "t" : "f", 1);
- else if (v->type == jbvNumeric)
- s = DatumGetCString(DirectFunctionCall1(numeric_out,
- PointerGetDatum(v->val.numeric)));
- else if (v->type == jbvBinary)
- s = JsonbToCString(NULL, (JsonbContainer *) v->val.binary.data, v->val.binary.len);
- else
- elog(ERROR, "unrecognized jsonb type: %d", (int) v->type);
- }
+ if (PG_ARGISNULL(1))
+ PG_RETURN_POINTER(rec);
- values[i] = InputFunctionCall(&column_info->proc, s,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
+ /* Extract type info from the tuple itself */
+ tupType = HeapTupleHeaderGetTypeId(rec);
+ tupTypmod = HeapTupleHeaderGetTypMod(rec);
}
}
+ else
+ {
+ /* json{b}_to_record case */
+ if (PG_ARGISNULL(0))
+ PG_RETURN_NULL();
+
+ if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
+ ereport(ERROR,
+ (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
+ errmsg("function returning record called in context "
+ "that cannot accept type record"),
+ errhint("Try calling the function in the FROM clause "
+ "using a column definition list.")));
- rettuple = heap_form_tuple(tupdesc, values, nulls);
+ Assert(tupdesc);
- ReleaseTupleDesc(tupdesc);
+ /*
+ * Add tupdesc to the cache and set the appropriate values of
+ * tupType/tupTypmod for proper cache usage in populate_composite().
+ */
+ cache->io.tupdesc = tupdesc;
- if (json_hash)
- hash_destroy(json_hash);
+ tupType = tupdesc->tdtypeid;
+ tupTypmod = tupdesc->tdtypmod;
+ }
- PG_RETURN_DATUM(HeapTupleGetDatum(rettuple));
+ jsv.is_json = jtype == JSONOID;
+
+ if (jsv.is_json)
+ {
+ text *json = PG_GETARG_TEXT_P(json_arg_num);
+
+ jsv.val.json.str = VARDATA(json);
+ jsv.val.json.len = VARSIZE(json) - VARHDRSZ;
+ jsv.val.json.type = JSON_TOKEN_INVALID; /* not used in populate_composite() */
+ }
+ else
+ {
+ Jsonb *jb = PG_GETARG_JSONB(json_arg_num);
+ JsonbValue jbv;
+
+ jsv.val.jsonb = &jbv;
+
+ /* fill binary jsonb value pointing to jb */
+ jbv.type = jbvBinary;
+ jbv.val.binary.data = &jb->root;
+ jbv.val.binary.len = VARSIZE(jb) - VARHDRSZ;
+ }
+
+ rettuple = populate_composite(&cache->io, tupType, tupTypmod,
+ NULL, fnmcxt, rec, &jsv);
+
+ if (tupdesc)
+ {
+ cache->io.tupdesc = NULL;
+ ReleaseTupleDesc(tupdesc);
+ }
+
+ PG_RETURN_DATUM(rettuple);
}
/*
@@ -2374,12 +2604,12 @@ populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
* decompose a json object into a hash table.
*/
static HTAB *
-get_json_object_as_hash(text *json, const char *funcname)
+get_json_object_as_hash(char *json, int len, const char *funcname)
{
HASHCTL ctl;
HTAB *tab;
JHashState *state;
- JsonLexContext *lex = makeJsonLexContext(json, true);
+ JsonLexContext *lex = makeJsonLexContextCstringLen(json, len, true);
JsonSemAction *sem;
memset(&ctl, 0, sizeof(ctl));
@@ -2417,6 +2647,9 @@ hash_object_field_start(void *state, char *fname, bool isnull)
if (_state->lex->lex_level > 1)
return;
+ /* remember token type */
+ _state->saved_token_type = _state->lex->token_type;
+
if (_state->lex->token_type == JSON_TOKEN_ARRAY_START ||
_state->lex->token_type == JSON_TOKEN_OBJECT_START)
{
@@ -2460,7 +2693,9 @@ hash_object_field_end(void *state, char *fname, bool isnull)
* that, a later field with the same name overrides the earlier field.
*/
- hashentry->isnull = isnull;
+ hashentry->type = _state->saved_token_type;
+ Assert(isnull == (hashentry->type == JSON_TOKEN_NULL));
+
if (_state->save_json_start != NULL)
{
int len = _state->lex->prev_token_terminator - _state->save_json_start;
@@ -2499,7 +2734,11 @@ hash_scalar(void *state, char *token, JsonTokenType tokentype)
errmsg("cannot call %s on a scalar", _state->function_name)));
if (_state->lex->lex_level == 1)
+ {
_state->saved_scalar = token;
+ /* saved_token_type must already be set in hash_object_field_start() */
+ Assert(_state->saved_token_type == tokentype);
+ }
}
@@ -2538,121 +2777,24 @@ json_to_recordset(PG_FUNCTION_ARGS)
}
static void
-make_row_from_rec_and_jsonb(Jsonb *element, PopulateRecordsetState *state)
+populate_recordset_record(PopulateRecordsetState *state,
+ bool is_json, HTAB *json_hash, JsonbContainer *jsonb)
{
- Datum *values;
- bool *nulls;
- int i;
- RecordIOData *my_extra = state->my_extra;
- int ncolumns = my_extra->ncolumns;
- TupleDesc tupdesc = state->ret_tdesc;
- HeapTupleHeader rec = state->rec;
- HeapTuple rettuple;
-
- values = (Datum *) palloc(ncolumns * sizeof(Datum));
- nulls = (bool *) palloc(ncolumns * sizeof(bool));
-
- if (state->rec)
- {
- HeapTupleData tuple;
-
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(state->rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = state->rec;
-
- /* Break down the tuple into fields */
- heap_deform_tuple(&tuple, tupdesc, values, nulls);
- }
- else
- {
- for (i = 0; i < ncolumns; ++i)
- {
- values[i] = (Datum) 0;
- nulls[i] = true;
- }
- }
-
- for (i = 0; i < ncolumns; ++i)
- {
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- JsonbValue *v = NULL;
- char *key;
-
- /* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
- {
- nulls[i] = true;
- continue;
- }
-
- key = NameStr(tupdesc->attrs[i]->attname);
-
- v = findJsonbValueFromContainerLen(&element->root, JB_FOBJECT,
- key, strlen(key));
-
- /*
- * We can't just skip here if the key wasn't found since we might have
- * a domain to deal with. If we were passed in a non-null record
- * datum, we assume that the existing values are valid (if they're
- * not, then it's not our fault), but if we were passed in a null,
- * then every field which we don't populate needs to be run through
- * the input function just in case it's a domain type.
- */
- if (v == NULL && rec)
- continue;
-
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
- {
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- state->fn_mcxt);
- column_info->column_type = column_type;
- }
- if (v == NULL || v->type == jbvNull)
- {
- /*
- * Need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
- */
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
- }
- else
- {
- char *s = NULL;
-
- if (v->type == jbvString)
- s = pnstrdup(v->val.string.val, v->val.string.len);
- else if (v->type == jbvBool)
- s = pnstrdup((v->val.boolean) ? "t" : "f", 1);
- else if (v->type == jbvNumeric)
- s = DatumGetCString(DirectFunctionCall1(numeric_out,
- PointerGetDatum(v->val.numeric)));
- else if (v->type == jbvBinary)
- s = JsonbToCString(NULL, (JsonbContainer *) v->val.binary.data, v->val.binary.len);
- else
- elog(ERROR, "unrecognized jsonb type: %d", (int) v->type);
-
- values[i] = InputFunctionCall(&column_info->proc, s,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
- }
- }
-
- rettuple = heap_form_tuple(tupdesc, values, nulls);
-
- tuplestore_puttuple(state->tuple_store, rettuple);
+ HeapTupleData tuple;
+ HeapTupleHeader tuphead = populate_record(state->ret_tdesc,
+ state->my_extra,
+ state->rec,
+ state->fn_mcxt,
+ is_json,
+ json_hash,
+ jsonb);
+
+ tuple.t_len = HeapTupleHeaderGetDatumLength(tuphead);
+ ItemPointerSetInvalid(&(tuple.t_self));
+ tuple.t_tableOid = InvalidOid;
+ tuple.t_data = tuphead;
+
+ tuplestore_puttuple(state->tuple_store, &tuple);
}
/*
@@ -2666,12 +2808,8 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
ReturnSetInfo *rsi;
MemoryContext old_cxt;
- Oid tupType;
- int32 tupTypmod;
HeapTupleHeader rec;
TupleDesc tupdesc;
- RecordIOData *my_extra;
- int ncolumns;
PopulateRecordsetState *state;
if (have_record_arg)
@@ -2717,38 +2855,6 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
else
rec = PG_GETARG_HEAPTUPLEHEADER(0);
- tupType = tupdesc->tdtypeid;
- tupTypmod = tupdesc->tdtypmod;
- ncolumns = tupdesc->natts;
-
- /*
- * We arrange to look up the needed I/O info just once per series of
- * calls, assuming the record type doesn't change underneath us.
- */
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- if (my_extra == NULL ||
- my_extra->ncolumns != ncolumns)
- {
- fcinfo->flinfo->fn_extra =
- MemoryContextAlloc(fcinfo->flinfo->fn_mcxt,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- my_extra->record_type = InvalidOid;
- my_extra->record_typmod = 0;
- }
-
- if (my_extra->record_type != tupType ||
- my_extra->record_typmod != tupTypmod)
- {
- MemSet(my_extra, 0,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra->record_type = tupType;
- my_extra->record_typmod = tupTypmod;
- my_extra->ncolumns = ncolumns;
- }
-
state = palloc0(sizeof(PopulateRecordsetState));
/* make these in a sufficiently long-lived memory context */
@@ -2761,7 +2867,7 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
MemoryContextSwitchTo(old_cxt);
state->function_name = funcname;
- state->my_extra = my_extra;
+ state->my_extra = (RecordIOData **) &fcinfo->flinfo->fn_extra;
state->rec = rec;
state->fn_mcxt = fcinfo->flinfo->fn_mcxt;
@@ -2812,14 +2918,15 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
if (r == WJB_ELEM)
{
- Jsonb *element = JsonbValueToJsonb(&v);
-
- if (!JB_ROOT_IS_OBJECT(element))
+ if (v.type != jbvBinary ||
+ !JsonContainerIsObject(v.val.binary.data))
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("argument of %s must be an array of objects",
funcname)));
- make_row_from_rec_and_jsonb(element, state);
+
+ populate_recordset_record(state, false, NULL,
+ v.val.binary.data);
}
}
}
@@ -2863,115 +2970,16 @@ static void
populate_recordset_object_end(void *state)
{
PopulateRecordsetState *_state = (PopulateRecordsetState *) state;
- HTAB *json_hash = _state->json_hash;
- Datum *values;
- bool *nulls;
- int i;
- RecordIOData *my_extra = _state->my_extra;
- int ncolumns = my_extra->ncolumns;
- TupleDesc tupdesc = _state->ret_tdesc;
- JsonHashEntry *hashentry;
- HeapTupleHeader rec = _state->rec;
- HeapTuple rettuple;
/* Nested objects require no special processing */
if (_state->lex->lex_level > 1)
return;
/* Otherwise, construct and return a tuple based on this level-1 object */
- values = (Datum *) palloc(ncolumns * sizeof(Datum));
- nulls = (bool *) palloc(ncolumns * sizeof(bool));
-
- if (_state->rec)
- {
- HeapTupleData tuple;
-
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(_state->rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = _state->rec;
-
- /* Break down the tuple into fields */
- heap_deform_tuple(&tuple, tupdesc, values, nulls);
- }
- else
- {
- for (i = 0; i < ncolumns; ++i)
- {
- values[i] = (Datum) 0;
- nulls[i] = true;
- }
- }
-
- for (i = 0; i < ncolumns; ++i)
- {
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- char *value;
-
- /* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
- {
- nulls[i] = true;
- continue;
- }
-
- hashentry = hash_search(json_hash,
- NameStr(tupdesc->attrs[i]->attname),
- HASH_FIND, NULL);
-
- /*
- * we can't just skip here if the key wasn't found since we might have
- * a domain to deal with. If we were passed in a non-null record
- * datum, we assume that the existing values are valid (if they're
- * not, then it's not our fault), but if we were passed in a null,
- * then every field which we don't populate needs to be run through
- * the input function just in case it's a domain type.
- */
- if (hashentry == NULL && rec)
- continue;
-
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
- {
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- _state->fn_mcxt);
- column_info->column_type = column_type;
- }
- if (hashentry == NULL || hashentry->isnull)
- {
- /*
- * need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
- */
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
- }
- else
- {
- value = hashentry->val;
-
- values[i] = InputFunctionCall(&column_info->proc, value,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
- }
- }
-
- rettuple = heap_form_tuple(tupdesc, values, nulls);
-
- tuplestore_puttuple(_state->tuple_store, rettuple);
+ populate_recordset_record(_state, true, _state->json_hash, NULL);
/* Done with hash for this object */
- hash_destroy(json_hash);
+ hash_destroy(_state->json_hash);
_state->json_hash = NULL;
}
@@ -3017,6 +3025,8 @@ populate_recordset_object_field_start(void *state, char *fname, bool isnull)
if (_state->lex->lex_level > 2)
return;
+ _state->saved_token_type = _state->lex->token_type;
+
if (_state->lex->token_type == JSON_TOKEN_ARRAY_START ||
_state->lex->token_type == JSON_TOKEN_OBJECT_START)
{
@@ -3058,7 +3068,9 @@ populate_recordset_object_field_end(void *state, char *fname, bool isnull)
* that, a later field with the same name overrides the earlier field.
*/
- hashentry->isnull = isnull;
+ hashentry->type = _state->saved_token_type;
+ Assert(isnull == (hashentry->type == JSON_TOKEN_NULL));
+
if (_state->save_json_start != NULL)
{
int len = _state->lex->prev_token_terminator - _state->save_json_start;
0003-recursive-json_populate_record-v04.patchtext/x-patch; name=0003-recursive-json_populate_record-v04.patchDownload
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index b214218..6d666b6 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -11257,12 +11257,12 @@ table2-mapping
whose columns match the record type defined by <replaceable>base</>
(see note below).
</entry>
- <entry><literal>select * from json_populate_record(null::myrowtype, '{"a":1,"b":2}')</literal></entry>
+ <entry><literal>select * from json_populate_record(null::myrowtype, '{"a": 1, "b": ["2", "a b"], "c": {"d": 4, "e": "a b c"}}')</literal></entry>
<entry>
<programlisting>
- a | b
----+---
- 1 | 2
+ a | b | c
+---+-----------+-------------
+ 1 | {2,"a b"} | (4,"a b c")
</programlisting>
</entry>
</row>
@@ -11351,12 +11351,12 @@ table2-mapping
explicitly define the structure of the record with an <literal>AS</>
clause.
</entry>
- <entry><literal>select * from json_to_record('{"a":1,"b":[1,2,3],"c":"bar"}') as x(a int, b text, d text) </literal></entry>
+ <entry><literal>select * from json_to_record('{"a":1,"b":[1,2,3],"c":[1,2,3],"e":"bar","r": {"a": 123, "b": "a b c"}}') as x(a int, b text, c int[], d text, r myrowtype) </literal></entry>
<entry>
<programlisting>
- a | b | d
----+---------+---
- 1 | [1,2,3] |
+ a | b | c | d | r
+---+---------+---------+---+---------------
+ 1 | [1,2,3] | {1,2,3} | | (123,"a b c")
</programlisting>
</entry>
</row>
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index 95f7e0a..39c6c70 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -31,6 +31,7 @@
#include "utils/jsonb.h"
#include "utils/lsyscache.h"
#include "utils/memutils.h"
+#include "utils/syscache.h"
#include "utils/typcache.h"
/* Operations available for setPath */
@@ -130,6 +131,14 @@ typedef struct ScalarIOData
typedef struct ColumnIOData ColumnIOData;
typedef struct RecordIOData RecordIOData;
+/* structure to cache metadata needed for populate_array() */
+typedef struct ArrayIOData
+{
+ ColumnIOData *element_info; /* metadata cache */
+ Oid element_type; /* array element type id */
+ int32 element_typmod; /* array element type modifier */
+} ArrayIOData;
+
/* structure to cache metadata needed for populate_composite() */
typedef struct CompositeIOData
{
@@ -141,6 +150,24 @@ typedef struct CompositeIOData
TupleDesc tupdesc; /* cached tuple descriptor */
} CompositeIOData;
+/* structure to cache metadata needed for populate_domain() */
+typedef struct DomainIOData
+{
+ ColumnIOData *base_io; /* metadata cache */
+ Oid base_typid; /* base type id */
+ int32 base_typmod; /* base type modifier */
+ void *domain_info; /* opaque cache for domain checks */
+} DomainIOData;
+
+/* enumeration type categories */
+typedef enum TypeCat
+{
+ TYPECAT_SCALAR = 's',
+ TYPECAT_ARRAY = 'a',
+ TYPECAT_COMPOSITE = 'c',
+ TYPECAT_DOMAIN = 'd',
+} TypeCat;
+
/* these two are stolen from hstore / record_out, used in populate_record* */
/* structure to cache record metadata needed for populate_record_field() */
@@ -148,8 +175,15 @@ struct ColumnIOData
{
Oid typid; /* column type id */
int32 typmod; /* column type modifier */
+ TypeCat typcat; /* column type category */
ScalarIOData scalar_io; /* metadata cache for directi conversion
* through input function */
+ union
+ {
+ ArrayIOData array;
+ CompositeIOData composite;
+ DomainIOData domain;
+ } io; /* metadata cache for various column type categories */
};
/* structure to cache record metadata needed for populate_record() */
@@ -177,6 +211,29 @@ typedef struct PopulateRecordsetState
MemoryContext fn_mcxt; /* used to stash IO funcs */
} PopulateRecordsetState;
+/* common data for populate_array_json() and populate_array_dim_jsonb() */
+typedef struct PopulateArrayContext
+{
+ ArrayBuildState *astate; /* array build state */
+ ArrayIOData *aio; /* metadata cache */
+ MemoryContext acxt; /* array build memory context */
+ MemoryContext mcxt; /* cache memory context */
+ const char *colname; /* for diagnostics only */
+ int *dims; /* dimensions */
+ int *sizes; /* current dimension counters */
+ int ndims; /* number of dimensions */
+} PopulateArrayContext;
+
+/* state for populate_array_json() */
+typedef struct PopulateArrayState
+{
+ JsonLexContext *lex; /* json lexer */
+ PopulateArrayContext *ctx; /* context */
+ char *element_start; /* start of the current array element */
+ char *element_scalar; /* current array element token if it is a scalar */
+ JsonTokenType element_type; /* current array element type */
+} PopulateArrayState;
+
/* state for json_strip_nulls */
typedef struct StripnullState
{
@@ -2148,6 +2205,358 @@ json_to_record(PG_FUNCTION_ARGS)
return populate_record_worker(fcinfo, "json_to_record", false);
}
+/* helper function for diagnostics */
+static void
+populate_array_report_expected_array(PopulateArrayContext *ctx, int ndim)
+{
+ if (ndim <= 0)
+ {
+ if (ctx->colname)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("expected json array"),
+ errhint("see the value of key \"%s\"", ctx->colname)));
+ else
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("expected json array")));
+ }
+ else
+ {
+ StringInfoData indices;
+ int i;
+
+ initStringInfo(&indices);
+
+ Assert(ctx->ndims > 0 && ndim < ctx->ndims);
+
+ for (i = 0; i < ndim; i++)
+ appendStringInfo(&indices, "[%d]", ctx->sizes[i]);
+
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("expected json array"),
+ errhint(ctx->colname ? "see the array element %s of key \"%s\""
+ : "see the array element %s",
+ indices.data, ctx->colname)));
+ }
+}
+
+/* assign number of dimensions of the populated array when it became known */
+static void
+populate_array_assign_ndims(PopulateArrayContext *ctx, int ndims)
+{
+ int i;
+
+ Assert(ctx->ndims <= 0);
+
+ if (ndims <= 0)
+ populate_array_report_expected_array(ctx, ndims);
+
+ ctx->ndims = ndims;
+ ctx->dims = palloc(sizeof(int) * ndims);
+ ctx->sizes = palloc0(sizeof(int) * ndims);
+
+ for (i = 0; i < ndims; i++)
+ ctx->dims[i] = -1; /* dimensions are unknown yet */
+}
+
+/* check populated subarray dimension */
+static void
+populate_array_check_dimension(PopulateArrayContext *ctx, int ndim)
+{
+ int dim = ctx->sizes[ndim]; /* current dimension counter */
+
+ if (ctx->dims[ndim] == -1)
+ ctx->dims[ndim] = dim; /* assign dimension if unknown yet */
+ else if (ctx->dims[ndim] != dim)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("malformed json array"),
+ errdetail("Multidimensional arrays must have "
+ "sub-arrays with matching dimensions.")));
+
+ /* reset current array dimension size counter */
+ ctx->sizes[ndim] = 0;
+
+ /* increment parent dimension counter if it is a nested sub-array */
+ if (ndim > 0)
+ ctx->sizes[ndim - 1]++;
+}
+
+static void
+populate_array_element(PopulateArrayContext *ctx, int ndim, JsValue *jsv)
+{
+ Datum element;
+ bool element_isnull;
+
+ /* populate array element */
+ element = populate_record_field(ctx->aio->element_info,
+ ctx->aio->element_type,
+ ctx->aio->element_typmod,
+ NULL, ctx->mcxt, PointerGetDatum(NULL),
+ jsv, &element_isnull);
+
+ accumArrayResult(ctx->astate, element, element_isnull,
+ ctx->aio->element_type, ctx->acxt);
+
+ Assert(ndim > 0);
+ ctx->sizes[ndim - 1]++; /* increment current dimension counter */
+}
+
+/* json object start handler for populate_array_json() */
+static void
+populate_array_object_start(void *_state)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ if (state->ctx->ndims <= 0)
+ populate_array_assign_ndims(state->ctx, ndim);
+ else if (ndim < state->ctx->ndims)
+ populate_array_report_expected_array(state->ctx, ndim);
+}
+
+/* json array end handler for populate_array_json() */
+static void
+populate_array_array_end(void *_state)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ PopulateArrayContext *ctx = state->ctx;
+ int ndim = state->lex->lex_level;
+
+ if (ctx->ndims <= 0)
+ populate_array_assign_ndims(ctx, ndim + 1);
+
+ if (ndim < ctx->ndims)
+ populate_array_check_dimension(ctx, ndim);
+}
+
+/* json array element start handler for populate_array_json() */
+static void
+populate_array_element_start(void *_state, bool isnull)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ if (state->ctx->ndims <= 0 || ndim == state->ctx->ndims)
+ {
+ /* remember current array element start */
+ state->element_start = state->lex->token_start;
+ state->element_type = state->lex->token_type;
+ state->element_scalar = NULL;
+ }
+}
+
+/* json array element end handler for populate_array_json() */
+static void
+populate_array_element_end(void *_state, bool isnull)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ PopulateArrayContext *ctx = state->ctx;
+ int ndim = state->lex->lex_level;
+
+ Assert(ctx->ndims > 0);
+
+ if (ndim == ctx->ndims)
+ {
+ JsValue jsv;
+
+ jsv.is_json = true;
+ jsv.val.json.type = state->element_type;
+
+ if (isnull)
+ {
+ Assert(jsv.val.json.type == JSON_TOKEN_NULL);
+ jsv.val.json.str = NULL;
+ jsv.val.json.len = 0;
+ }
+ else if (state->element_scalar)
+ {
+ jsv.val.json.str = state->element_scalar;
+ jsv.val.json.len = -1; /* null-terminated */
+ }
+ else
+ {
+ jsv.val.json.str = state->element_start;
+ jsv.val.json.len = (state->lex->prev_token_terminator -
+ state->element_start) * sizeof(char);
+ }
+
+ populate_array_element(ctx, ndim, &jsv);
+ }
+}
+
+/* json scalar handler for populate_array_json() */
+static void
+populate_array_scalar(void *_state, char *token, JsonTokenType tokentype)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ PopulateArrayContext *ctx = state->ctx;
+ int ndim = state->lex->lex_level;
+
+ if (ctx->ndims <= 0)
+ populate_array_assign_ndims(ctx, ndim);
+ else if (ndim < ctx->ndims)
+ populate_array_report_expected_array(ctx, ndim);
+
+ if (ndim == ctx->ndims)
+ {
+ /* remeber scalar element token */
+ state->element_scalar = token;
+ /* element_type must already be set in populate_array_element_start() */
+ Assert(state->element_type == tokentype);
+ }
+}
+
+/* parse json array and populate array */
+static void
+populate_array_json(PopulateArrayContext *ctx, char *json, int len)
+{
+ PopulateArrayState state;
+ JsonSemAction sem;
+
+ state.lex = makeJsonLexContextCstringLen(json, len, true);
+ state.ctx = ctx;
+
+ memset(&sem, 0, sizeof(sem));
+ sem.semstate = (void *) &state;
+ sem.object_start = populate_array_object_start;
+ sem.array_end = populate_array_array_end;
+ sem.array_element_start = populate_array_element_start;
+ sem.array_element_end = populate_array_element_end;
+ sem.scalar = populate_array_scalar;
+
+ pg_parse_json(state.lex, &sem);
+
+ /* number of dimensions should be already known */
+ Assert(ctx->ndims > 0 && ctx->dims);
+
+ pfree(state.lex);
+}
+
+/*
+ * populate_array_dim_jsonb() -- Iterate recursively through jsonb sub-array
+ * elements and accumulate result using given ArrayBuildState.
+ */
+static void
+populate_array_dim_jsonb(PopulateArrayContext *ctx, /* context */
+ JsonbValue *jbv, /* jsonb sub-array */
+ int ndim) /* current dimension */
+{
+ JsonbContainer *jbc = jbv->val.binary.data;
+ JsonbIterator *it;
+ JsonbIteratorToken tok;
+ JsonbValue val;
+ JsValue jsv;
+
+ check_stack_depth();
+
+ if (jbv->type != jbvBinary || !JsonContainerIsArray(jbc))
+ populate_array_report_expected_array(ctx, ndim - 1);
+
+ Assert(!JsonContainerIsScalar(jbc));
+
+ it = JsonbIteratorInit(jbc);
+
+ tok = JsonbIteratorNext(&it, &val, true);
+ Assert(tok == WJB_BEGIN_ARRAY);
+
+ tok = JsonbIteratorNext(&it, &val, true);
+
+ /*
+ * If the number of dimensions is yet unknown and
+ * we have found end of array or the first child element is not an array
+ * then assign number of dimensions now.
+ */
+ if (ctx->ndims <= 0 &&
+ (tok == WJB_END_ARRAY ||
+ (tok == WJB_ELEM &&
+ (val.type != jbvBinary ||
+ !JsonContainerIsArray(val.val.binary.data)))))
+ populate_array_assign_ndims(ctx, ndim);
+
+ jsv.is_json = false;
+ jsv.val.jsonb = &val;
+
+ /* process all array elements */
+ while (tok == WJB_ELEM)
+ {
+ /*
+ * Recurse only if the dimensions of dimensions is yet unknown or
+ * if it is not the innermost dimension.
+ */
+ if (ctx->ndims > 0 && ndim >= ctx->ndims)
+ populate_array_element(ctx, ndim, &jsv);
+ else
+ {
+ /* populate child sub-array */
+ populate_array_dim_jsonb(ctx, &val, ndim + 1);
+
+ /* number of dimensions should be already known */
+ Assert(ctx->ndims > 0 && ctx->dims);
+
+ populate_array_check_dimension(ctx, ndim);
+ }
+
+ tok = JsonbIteratorNext(&it, &val, true);
+ }
+
+ Assert(tok == WJB_END_ARRAY);
+
+ /* free iterator, iterating until WJB_DONE */
+ tok = JsonbIteratorNext(&it, &val, true);
+ Assert(tok == WJB_DONE && !it);
+}
+
+/* populate recursively json/jsonb array */
+static Datum
+populate_array(ArrayIOData *aio, /* metadata cache */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ JsValue *jsv) /* json/jsonb array */
+{
+ PopulateArrayContext ctx;
+ Datum result;
+ int *lbs;
+ int i;
+
+ ctx.aio = aio;
+ ctx.mcxt = mcxt;
+ ctx.acxt = CurrentMemoryContext;
+ ctx.astate = initArrayResult(aio->element_type, ctx.acxt, true);
+ ctx.colname = colname;
+ ctx.ndims = 0; /* unknown yet */
+ ctx.dims = NULL;
+ ctx.sizes = NULL;
+
+ if (jsv->is_json)
+ populate_array_json(&ctx, jsv->val.json.str,
+ jsv->val.json.len >= 0 ? jsv->val.json.len
+ : strlen(jsv->val.json.str));
+ else
+ {
+ populate_array_dim_jsonb(&ctx, jsv->val.jsonb, 1);
+ ctx.dims[0] = ctx.sizes[0];
+ }
+
+ Assert(ctx.ndims > 0);
+
+ lbs = palloc(sizeof(int) * ctx.ndims);
+
+ for (i = 0; i < ctx.ndims; i++)
+ lbs[i] = 1;
+
+ result = makeMdArrayResult(ctx.astate, ctx.ndims, ctx.dims, lbs,
+ ctx.acxt, true);
+
+ pfree(ctx.dims);
+ pfree(ctx.sizes);
+ pfree(lbs);
+
+ return result;
+}
+
/* populate recursively composite (row type) value from json/jsonb */
static Datum
populate_composite(CompositeIOData *io, /* metadata cache */
@@ -2220,26 +2629,35 @@ populate_scalar(ScalarIOData *io, /* metadata cache */
JsValue *jsv) /* json/jsonb value to convert */
{
Datum res;
- char *str;
+ char *str = NULL;
char *json = NULL;
- if (!jsv)
- {
- /*
- * Need InputFunctionCall to happen even for NULLs, so that domain
- * checks are done.
- */
- str = NULL;
- }
- else if (jsv->is_json)
+ if (jsv->is_json)
{
- /* already done the hard work in the json case */
int len = jsv->val.json.len;
json = jsv->val.json.str;
Assert(json);
- if (len >= 0)
+ /* already done the hard work in the json case */
+ if ((typid == JSONOID || typid == JSONBOID) &&
+ jsv->val.json.type == JSON_TOKEN_STRING)
+ {
+ /*
+ * Add quotes around string value (should be already escaped)
+ * if converting to json/jsonb.
+ */
+
+ if (len < 0)
+ len = strlen(json);
+
+ str = palloc(len + sizeof(char) * 3);
+ str[0] = '"';
+ memcpy(&str[1], json, len);
+ str[len + 1] = '"';
+ str[len + 2] = '\0';
+ }
+ else if (len >= 0)
{
/* Need to copy non-null-terminated string */
str = palloc(len + 1 * sizeof(char));
@@ -2253,10 +2671,25 @@ populate_scalar(ScalarIOData *io, /* metadata cache */
{
JsonbValue *jbv = jsv->val.jsonb;
- if (jbv->type == jbvString) /* quotes are stripped */
+ if (typid == JSONBOID)
+ {
+ Jsonb *jsonb = JsonbValueToJsonb(jbv); /* directly use jsonb */
+ return JsonbGetDatum(jsonb);
+ }
+ /* convert jsonb to string for typio call */
+ else if (typid == JSONOID && jbv->type != jbvBinary)
+ {
+ /*
+ * Convert scalar jsonb (non-scalars are passed here as jbvBinary)
+ * to json string, preserving quotes around top-level strings.
+ */
+ Jsonb *jsonb = JsonbValueToJsonb(jbv);
+ str = JsonbToCString(NULL, &jsonb->root, VARSIZE(jsonb));
+ }
+ else if (jbv->type == jbvString) /* quotes are stripped */
str = pnstrdup(jbv->val.string.val, jbv->val.string.len);
else if (jbv->type == jbvBool)
- str = pnstrdup(jbv->val.boolean ? "t" : "f", 1);
+ str = pstrdup(jbv->val.boolean ? "true" : "false");
else if (jbv->type == jbvNumeric)
str = DatumGetCString(DirectFunctionCall1(numeric_out,
PointerGetDatum(jbv->val.numeric)));
@@ -2264,10 +2697,7 @@ populate_scalar(ScalarIOData *io, /* metadata cache */
str = JsonbToCString(NULL, jbv->val.binary.data,
jbv->val.binary.len);
else
- {
elog(ERROR, "unrecognized jsonb type: %d", (int) jbv->type);
- str = NULL;
- }
}
res = InputFunctionCall(&io->typiofunc, str, io->typioparam, typmod);
@@ -2279,6 +2709,32 @@ populate_scalar(ScalarIOData *io, /* metadata cache */
return res;
}
+static Datum
+populate_domain(DomainIOData *io, /* metadata cache */
+ Oid typid, /* domain type id */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ JsValue *jsv, /* json/jsonb value to convert */
+ bool isnull) /* value is NULL */
+{
+ Datum res;
+
+ if (isnull)
+ res = (Datum) 0;
+ else
+ {
+ res = populate_record_field(io->base_io,
+ io->base_typid, io->base_typmod,
+ colname, mcxt, PointerGetDatum(NULL),
+ jsv, &isnull);
+ Assert(!isnull);
+ }
+
+ domain_check(res, isnull, typid, &io->domain_info, mcxt);
+
+ return res;
+}
+
/* prepare column metadata cache for the given type */
static void
prepare_column_cache(ColumnIOData *column, /* metadata cache */
@@ -2287,13 +2743,55 @@ prepare_column_cache(ColumnIOData *column, /* metadata cache */
MemoryContext mcxt, /* cache memory context */
bool json) /* json/jsonb */
{
- Oid typioproc;
+ HeapTuple tup;
+ Form_pg_type type;
column->typid = typid;
column->typmod = typmod;
- getTypeInputInfo(typid, &typioproc, &column->scalar_io.typioparam);
- fmgr_info_cxt(typioproc, &column->scalar_io.typiofunc, mcxt);
+ tup = SearchSysCache1(TYPEOID, ObjectIdGetDatum(typid));
+ if (!HeapTupleIsValid(tup))
+ elog(ERROR, "cache lookup failed for type %u", typid);
+
+ type = (Form_pg_type) GETSTRUCT(tup);
+
+ if (type->typtype == TYPTYPE_DOMAIN)
+ {
+ column->typcat = TYPECAT_DOMAIN;
+ column->io.domain.base_typid = type->typbasetype;
+ column->io.domain.base_typmod = type->typtypmod;
+ column->io.domain.base_io = MemoryContextAllocZero(mcxt,
+ sizeof(ColumnIOData));
+ column->io.domain.domain_info = NULL;
+ }
+ else if (type->typtype == TYPTYPE_COMPOSITE || typid == RECORDOID)
+ {
+ column->typcat = TYPECAT_COMPOSITE;
+ column->io.composite.record_io = NULL;
+ column->io.composite.tupdesc = NULL;
+ }
+ else if (type->typlen == -1 && OidIsValid(type->typelem))
+ {
+ column->typcat = TYPECAT_ARRAY;
+ column->io.array.element_info = MemoryContextAllocZero(mcxt,
+ sizeof(ColumnIOData));
+ column->io.array.element_type = type->typelem;
+ /* array element typemod stored in attribute's typmod */
+ column->io.array.element_typmod = typmod;
+ }
+ else
+ column->typcat = TYPECAT_SCALAR;
+
+ /* don't need input function when converting from jsonb to jsonb */
+ if (json || typid != JSONBOID)
+ {
+ Oid typioproc;
+
+ getTypeInputInfo(typid, &typioproc, &column->scalar_io.typioparam);
+ fmgr_info_cxt(typioproc, &column->scalar_io.typiofunc, mcxt);
+ }
+
+ ReleaseSysCache(tup);
}
/* populate recursively a record field or an array element from json/jsonb value */
@@ -2307,6 +2805,8 @@ populate_record_field(ColumnIOData *col, /* metadata cache */
JsValue *jsv, /* json/jsonb value to convert */
bool *isnull) /* value is NULL */
{
+ TypeCat typcat;
+
check_stack_depth();
/* prepare column metadata cache for the given type */
@@ -2317,8 +2817,44 @@ populate_record_field(ColumnIOData *col, /* metadata cache */
: jsv->val.jsonb == NULL ||
jsv->val.jsonb->type == jbvNull;
- return populate_scalar(&col->scalar_io, typid, typmod,
- *isnull ? NULL : jsv);
+ typcat = col->typcat;
+
+ /* try to convert json string to a non-scalar type through input function */
+ if ((jsv->is_json ? jsv->val.json.type == JSON_TOKEN_STRING
+ : jsv->val.jsonb &&
+ jsv->val.jsonb->type == jbvString) &&
+ (typcat == TYPECAT_ARRAY ||
+ typcat == TYPECAT_COMPOSITE))
+ typcat = TYPECAT_SCALAR;
+
+ /* we must perform domain checks for NULLs */
+ if (*isnull && typcat != TYPECAT_DOMAIN)
+ return (Datum) 0;
+
+ switch (typcat)
+ {
+ case TYPECAT_SCALAR:
+ return populate_scalar(&col->scalar_io, typid, typmod, jsv);
+
+ case TYPECAT_ARRAY:
+ return populate_array(&col->io.array, colname, mcxt, jsv);
+
+ case TYPECAT_COMPOSITE:
+ return populate_composite(&col->io.composite, typid, typmod,
+ colname, mcxt,
+ DatumGetPointer(defaultval)
+ ? DatumGetHeapTupleHeader(defaultval)
+ : NULL,
+ jsv);
+
+ case TYPECAT_DOMAIN:
+ return populate_domain(&col->io.domain, typid, colname, mcxt,
+ jsv, *isnull);
+
+ default:
+ elog(ERROR, "unrecognized type category '%c'", typcat);
+ return (Datum) 0;
+ }
}
static RecordIOData *
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index efcdc41..e6b1ed5 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -1304,6 +1304,31 @@ select * from json_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8
-- populate_record
create type jpop as (a text, b int, c timestamp);
+CREATE DOMAIN js_int_not_null AS int NOT NULL;
+CREATE DOMAIN js_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN js_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+CREATE TYPE jsrec AS (
+ i int,
+ ia _int4,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d js_int_array_1d,
+ ia2d js_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jpop,
+ reca jpop[]
+);
+CREATE TYPE jsrec_i_not_null AS (
+ i js_int_not_null
+);
select * from json_populate_record(null::jpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+---
@@ -1342,6 +1367,370 @@ select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"c":[100,200,false],"x":43.2}') q;
ERROR: invalid input syntax for type timestamp: "[100,200,false]"
+select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{}') q;
+ a | b | c
+---+---+--------------------------
+ x | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"x": 43.2}') q;
+ERROR: domain js_int_not_null does not allow null values
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": null}') q;
+ERROR: domain js_int_not_null does not allow null values
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": 12345}') q;
+ i
+-------
+ 12345
+(1 row)
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": null}') q;
+ ia
+----
+
+(1 row)
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ia"
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [1, "2", null, 4]}') q;
+ ia
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1, 2], [3, 4]]}') q;
+ ia
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1], 2]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ia"
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1], [2, 3]]}') q;
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": "{1,2,3}"}') q;
+ ia
+---------
+ {1,2,3}
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": null}') q;
+ ia1
+-----
+
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ia1"
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [1, "2", null, 4]}') q;
+ ia1
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [[1, 2, 3]]}') q;
+ ia1
+-----------
+ {{1,2,3}}
+(1 row)
+
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": null}') q;
+ ia1d
+------
+
+(1 row)
+
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ia1d"
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null, 4]}') q;
+ERROR: value for domain js_int_array_1d violates check constraint "js_int_array_1d_check"
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null]}') q;
+ ia1d
+------------
+ {1,2,NULL}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [1, "2", null, 4]}') q;
+ ia2
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+ ia2
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[], []]}') q;
+ ia2
+-----
+ {}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [3]]}') q;
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], 3, 4]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ia2"
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+ERROR: value for domain js_int_array_2d violates check constraint "js_int_array_2d_check"
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+ ia2d
+----------------------
+ {{1,2,3},{NULL,5,6}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [1, "2", null, 4]}') q;
+ ia3
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+ ia3
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+ ia3
+-----
+ {}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+ ia3
+-------------------
+ {{{1,2}},{{3,4}}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+ ia3
+-------------------------------
+ {{{1,2},{3,4}},{{5,6},{7,8}}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": null}') q;
+ ta
+----
+
+(1 row)
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ta"
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [1, "2", null, 4]}') q;
+ ta
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ta"
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": null}') q;
+ c
+---
+
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaa"}') q;
+ c
+------------
+ aaa
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaa"}') q;
+ c
+------------
+ aaaaaaaaaa
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaaaaa"}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": null}') q;
+ ca
+----
+
+(1 row)
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ca"
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [1, "2", null, 4]}') q;
+ ca
+-----------------------------------------------
+ {"1 ","2 ",NULL,"4 "}
+(1 row)
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ca"
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": null}') q;
+ js
+----
+
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": true}') q;
+ js
+------
+ true
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": 123.45}') q;
+ js
+--------
+ 123.45
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "123.45"}') q;
+ js
+----------
+ "123.45"
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "abc"}') q;
+ js
+-------
+ "abc"
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+ js
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ js
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": null}') q;
+ jsb
+-----
+
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": true}') q;
+ jsb
+------
+ true
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": 123.45}') q;
+ jsb
+--------
+ 123.45
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "123.45"}') q;
+ jsb
+----------
+ "123.45"
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "abc"}') q;
+ jsb
+-------
+ "abc"
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+ jsb
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ jsb
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": null}') q;
+ jsa
+-----
+
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "jsa"
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": [1, "2", null, 4]}') q;
+ jsa
+--------------------
+ {1,"\"2\"",NULL,4}
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+ jsa
+----------------------------------------------------------
+ {"\"aaa\"",NULL,"[1, 2, \"3\", {}]","{ \"k\" : \"v\" }"}
+(1 row)
+
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": 123}') q;
+ERROR: cannot call populate_composite on a scalar
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": [1, 2]}') q;
+ERROR: cannot call populate_composite on an array
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+ rec
+-----------------------------------
+ (abc,,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": "(abc,42,01.02.2003)"}') q;
+ rec
+-------------------------------------
+ (abc,42,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "reca"
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [1, 2]}') q;
+ERROR: cannot call populate_composite on a scalar
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+ reca
+--------------------------------------------------------
+ {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": ["(abc,42,01.02.2003)"]}') q;
+ reca
+-------------------------------------------
+ {"(abc,42,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": "{\"(abc,42,01.02.2003)\"}"}') q;
+ reca
+-------------------------------------------
+ {"(abc,42,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT rec FROM json_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jpop,NULL)::jsrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+ rec
+------------------------------------
+ (abc,3,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
-- populate_recordset
select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
@@ -1408,6 +1797,28 @@ select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":[100,200,3
{"z":true} | 3 | Fri Jan 20 10:42:53 2012
(2 rows)
+-- test type info caching in json_populate_record()
+CREATE TEMP TABLE jspoptest (js json);
+INSERT INTO jspoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::json
+FROM generate_series(1, 3);
+SELECT (json_populate_record(NULL::jsrec, js)).* FROM jspoptest;
+ i | ia | ia1 | ia2 | ia3 | ia1d | ia2d | t | ta | c | ca | ts | js | jsb | jsa | rec | reca
+---+----+-----+-----+-----+------+------+---+----+---+----+----+----+-----+--------------------+-----------------------------------+--------------------------------------------------------
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(3 rows)
+
+DROP TYPE jsrec;
+DROP TYPE jsrec_i_not_null;
+DROP DOMAIN js_int_not_null;
+DROP DOMAIN js_int_array_1d;
+DROP DOMAIN js_int_array_2d;
--json_typeof() function
select value, json_typeof(value)
from (values (json '123.4'),
@@ -1600,11 +2011,11 @@ select * from json_to_recordset('[{"a":1,"b":{"d":"foo"},"c":true},{"a":2,"c":fa
(2 rows)
select *, c is null as c_is_null
-from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::json)
- as t(a int, b json, c text, x int);
- a | b | c | x | c_is_null
----+-----------------+---+---+-----------
- 1 | {"c":16, "d":2} | | 8 | t
+from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::json)
+ as t(a int, b json, c text, x int, ca char(5)[], ia int[][], r jpop);
+ a | b | c | x | ca | ia | r | c_is_null
+---+-----------------+---+---+-------------------+---------------+------------+-----------
+ 1 | {"c":16, "d":2} | | 8 | {"1 2 ","3 "} | {{1,2},{3,4}} | (aaa,123,) | t
(1 row)
select *, c is null as c_is_null
@@ -1615,6 +2026,51 @@ from json_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::json)
1 | {"c":16, "d":2} | | 8 | t
(1 row)
+select * from json_to_record('{"ia": null}') as x(ia _int4);
+ ia
+----
+
+(1 row)
+
+select * from json_to_record('{"ia": 123}') as x(ia _int4);
+ERROR: expected json array
+HINT: see the value of key "ia"
+select * from json_to_record('{"ia": [1, "2", null, 4]}') as x(ia _int4);
+ ia
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+select * from json_to_record('{"ia": [[1, 2], [3, 4]]}') as x(ia _int4);
+ ia
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+select * from json_to_record('{"ia": [[1], 2]}') as x(ia _int4);
+ERROR: expected json array
+HINT: see the array element [1] of key "ia"
+select * from json_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+select * from json_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
+ ia2
+---------
+ {1,2,3}
+(1 row)
+
+select * from json_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
+ ia2
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+select * from json_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
+ ia2
+-----------------
+ {{{1},{2},{3}}}
+(1 row)
+
-- json_strip_nulls
select json_strip_nulls(null);
json_strip_nulls
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
index ba9b1d7..4acd461 100644
--- a/src/test/regress/expected/jsonb.out
+++ b/src/test/regress/expected/jsonb.out
@@ -1888,6 +1888,31 @@ SELECT * FROM jsonb_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,
-- populate_record
CREATE TYPE jbpop AS (a text, b int, c timestamp);
+CREATE DOMAIN jsb_int_not_null AS int NOT NULL;
+CREATE DOMAIN jsb_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN jsb_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+CREATE TYPE jsbrec AS (
+ i int,
+ ia _int4,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d jsb_int_array_1d,
+ ia2d jsb_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jbpop,
+ reca jbpop[]
+);
+CREATE TYPE jsbrec_i_not_null AS (
+ i jsb_int_not_null
+);
SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+---
@@ -1926,6 +1951,382 @@ SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"c":[100,200,false],"x":43.2}') q;
ERROR: invalid input syntax for type timestamp: "[100, 200, false]"
+SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop, '{}') q;
+ a | b | c
+---+---+--------------------------
+ x | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"x": 43.2}') q;
+ERROR: domain jsb_int_not_null does not allow null values
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": null}') q;
+ERROR: domain jsb_int_not_null does not allow null values
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": 12345}') q;
+ i
+-------
+ 12345
+(1 row)
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": null}') q;
+ ia
+----
+
+(1 row)
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ia"
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [1, "2", null, 4]}') q;
+ ia
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1, 2], [3, 4]]}') q;
+ ia
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1], 2]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ia"
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1], [2, 3]]}') q;
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": "{1,2,3}"}') q;
+ ia
+---------
+ {1,2,3}
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": null}') q;
+ ia1
+-----
+
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ia1"
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [1, "2", null, 4]}') q;
+ ia1
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [[1, 2, 3]]}') q;
+ ia1
+-----------
+ {{1,2,3}}
+(1 row)
+
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": null}') q;
+ ia1d
+------
+
+(1 row)
+
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ia1d"
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null, 4]}') q;
+ERROR: value for domain jsb_int_array_1d violates check constraint "jsb_int_array_1d_check"
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null]}') q;
+ ia1d
+------------
+ {1,2,NULL}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [1, "2", null, 4]}') q;
+ ia2
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+ ia2
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[], []]}') q;
+ ia2
+-----
+ {}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [3]]}') q;
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], 3, 4]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ia2"
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+ERROR: value for domain jsb_int_array_2d violates check constraint "jsb_int_array_2d_check"
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+ ia2d
+----------------------
+ {{1,2,3},{NULL,5,6}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [1, "2", null, 4]}') q;
+ ia3
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+ ia3
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+ ia3
+-----
+ {}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+ ia3
+-------------------
+ {{{1,2}},{{3,4}}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+ ia3
+-------------------------------
+ {{{1,2},{3,4}},{{5,6},{7,8}}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": null}') q;
+ ta
+----
+
+(1 row)
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ta"
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [1, "2", null, 4]}') q;
+ ta
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ta"
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": null}') q;
+ c
+---
+
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaa"}') q;
+ c
+------------
+ aaa
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaa"}') q;
+ c
+------------
+ aaaaaaaaaa
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaaaaa"}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": null}') q;
+ ca
+----
+
+(1 row)
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ca"
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [1, "2", null, 4]}') q;
+ ca
+-----------------------------------------------
+ {"1 ","2 ",NULL,"4 "}
+(1 row)
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ca"
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": null}') q;
+ js
+----
+
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": true}') q;
+ js
+------
+ true
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": 123.45}') q;
+ js
+--------
+ 123.45
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "123.45"}') q;
+ js
+----------
+ "123.45"
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "abc"}') q;
+ js
+-------
+ "abc"
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+ js
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ js
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": null}') q;
+ jsb
+-----
+
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": true}') q;
+ jsb
+------
+ true
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": 123.45}') q;
+ jsb
+--------
+ 123.45
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "123.45"}') q;
+ jsb
+----------
+ "123.45"
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "abc"}') q;
+ jsb
+-------
+ "abc"
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+ jsb
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ jsb
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": null}') q;
+ jsa
+-----
+
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "jsa"
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": [1, "2", null, 4]}') q;
+ jsa
+--------------------
+ {1,"\"2\"",NULL,4}
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+ jsa
+-------------------------------------------------------
+ {"\"aaa\"",NULL,"[1, 2, \"3\", {}]","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": 123}') q;
+ rec
+------
+ (,,)
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": [1, 2]}') q;
+ rec
+------
+ (,,)
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+ rec
+-----------------------------------
+ (abc,,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": "(abc,42,01.02.2003)"}') q;
+ rec
+-------------------------------------
+ (abc,42,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "reca"
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [1, 2]}') q;
+ reca
+-----------------
+ {"(,,)","(,,)"}
+(1 row)
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+ reca
+--------------------------------------------------------
+ {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": ["(abc,42,01.02.2003)"]}') q;
+ reca
+-------------------------------------------
+ {"(abc,42,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": "{\"(abc,42,01.02.2003)\"}"}') q;
+ reca
+-------------------------------------------
+ {"(abc,42,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jbpop,NULL)::jsbrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+ rec
+------------------------------------
+ (abc,3,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
-- populate_recordset
SELECT * FROM jsonb_populate_recordset(NULL::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
@@ -2002,11 +2403,11 @@ select * from jsonb_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar",
(2 rows)
select *, c is null as c_is_null
-from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::jsonb)
- as t(a int, b jsonb, c text, x int);
- a | b | c | x | c_is_null
----+-------------------+---+---+-----------
- 1 | {"c": 16, "d": 2} | | 8 | t
+from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::jsonb)
+ as t(a int, b jsonb, c text, x int, ca char(5)[], ia int[][], r jbpop);
+ a | b | c | x | ca | ia | r | c_is_null
+---+-------------------+---+---+-------------------+---------------+------------+-----------
+ 1 | {"c": 16, "d": 2} | | 8 | {"1 2 ","3 "} | {{1,2},{3,4}} | (aaa,123,) | t
(1 row)
select *, c is null as c_is_null
@@ -2017,6 +2418,73 @@ from jsonb_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::jsonb)
1 | {"c": 16, "d": 2} | | 8 | t
(1 row)
+select * from jsonb_to_record('{"ia": null}') as x(ia _int4);
+ ia
+----
+
+(1 row)
+
+select * from jsonb_to_record('{"ia": 123}') as x(ia _int4);
+ERROR: expected json array
+HINT: see the value of key "ia"
+select * from jsonb_to_record('{"ia": [1, "2", null, 4]}') as x(ia _int4);
+ ia
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+select * from jsonb_to_record('{"ia": [[1, 2], [3, 4]]}') as x(ia _int4);
+ ia
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+select * from jsonb_to_record('{"ia": [[1], 2]}') as x(ia _int4);
+ERROR: expected json array
+HINT: see the array element [1] of key "ia"
+select * from jsonb_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+select * from jsonb_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
+ ia2
+---------
+ {1,2,3}
+(1 row)
+
+select * from jsonb_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
+ ia2
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+select * from jsonb_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
+ ia2
+-----------------
+ {{{1},{2},{3}}}
+(1 row)
+
+-- test type info caching in jsonb_populate_record()
+CREATE TEMP TABLE jsbpoptest (js jsonb);
+INSERT INTO jsbpoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::jsonb
+FROM generate_series(1, 3);
+SELECT (jsonb_populate_record(NULL::jsbrec, js)).* FROM jsbpoptest;
+ i | ia | ia1 | ia2 | ia3 | ia1d | ia2d | t | ta | c | ca | ts | js | jsb | jsa | rec | reca
+---+----+-----+-----+-----+------+------+---+----+---+----+----+----+-----+--------------------+-----------------------------------+--------------------------------------------------------
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(3 rows)
+
+DROP TYPE jsbrec;
+DROP TYPE jsbrec_i_not_null;
+DROP DOMAIN jsb_int_not_null;
+DROP DOMAIN jsb_int_array_1d;
+DROP DOMAIN jsb_int_array_2d;
-- indexing
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":null}';
count
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index 603288b..de9c4ff 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -378,6 +378,34 @@ select * from json_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8
-- populate_record
create type jpop as (a text, b int, c timestamp);
+CREATE DOMAIN js_int_not_null AS int NOT NULL;
+CREATE DOMAIN js_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN js_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+
+CREATE TYPE jsrec AS (
+ i int,
+ ia _int4,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d js_int_array_1d,
+ ia2d js_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jpop,
+ reca jpop[]
+);
+
+CREATE TYPE jsrec_i_not_null AS (
+ i js_int_not_null
+);
+
select * from json_populate_record(null::jpop,'{"a":"blurfl","x":43.2}') q;
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":"blurfl","x":43.2}') q;
@@ -388,6 +416,100 @@ select * from json_populate_record(null::jpop,'{"a":[100,200,false],"x":43.2}')
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":[100,200,false],"x":43.2}') q;
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"c":[100,200,false],"x":43.2}') q;
+select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{}') q;
+
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"x": 43.2}') q;
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": null}') q;
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": 12345}') q;
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": null}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": 123}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [1, "2", null, 4]}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1, 2], [3, 4]]}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1], 2]}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1], [2, 3]]}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": "{1,2,3}"}') q;
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": null}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": 123}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [1, "2", null, 4]}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [[1, 2, 3]]}') q;
+
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": null}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": 123}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null, 4]}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null]}') q;
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [1, "2", null, 4]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[], []]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [3]]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], 3, 4]}') q;
+
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [1, "2", null, 4]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": null}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": 123}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [1, "2", null, 4]}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": null}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaa"}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaa"}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaaaaa"}') q;
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": null}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": 123}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [1, "2", null, 4]}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": null}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": true}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": 123.45}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "123.45"}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "abc"}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": null}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": true}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": 123.45}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "123.45"}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "abc"}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": null}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": 123}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": [1, "2", null, 4]}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": 123}') q;
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": [1, 2]}') q;
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": "(abc,42,01.02.2003)"}') q;
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": 123}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [1, 2]}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": ["(abc,42,01.02.2003)"]}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": "{\"(abc,42,01.02.2003)\"}"}') q;
+
+SELECT rec FROM json_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jpop,NULL)::jsrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+
-- populate_recordset
select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
@@ -404,6 +526,25 @@ select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
+-- test type info caching in json_populate_record()
+CREATE TEMP TABLE jspoptest (js json);
+
+INSERT INTO jspoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::json
+FROM generate_series(1, 3);
+
+SELECT (json_populate_record(NULL::jsrec, js)).* FROM jspoptest;
+
+DROP TYPE jsrec;
+DROP TYPE jsrec_i_not_null;
+DROP DOMAIN js_int_not_null;
+DROP DOMAIN js_int_array_1d;
+DROP DOMAIN js_int_array_2d;
+
--json_typeof() function
select value, json_typeof(value)
from (values (json '123.4'),
@@ -520,13 +661,24 @@ select * from json_to_recordset('[{"a":1,"b":{"d":"foo"},"c":true},{"a":2,"c":fa
as x(a int, b json, c boolean);
select *, c is null as c_is_null
-from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::json)
- as t(a int, b json, c text, x int);
+from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::json)
+ as t(a int, b json, c text, x int, ca char(5)[], ia int[][], r jpop);
select *, c is null as c_is_null
from json_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::json)
as t(a int, b json, c text, x int);
+select * from json_to_record('{"ia": null}') as x(ia _int4);
+select * from json_to_record('{"ia": 123}') as x(ia _int4);
+select * from json_to_record('{"ia": [1, "2", null, 4]}') as x(ia _int4);
+select * from json_to_record('{"ia": [[1, 2], [3, 4]]}') as x(ia _int4);
+select * from json_to_record('{"ia": [[1], 2]}') as x(ia _int4);
+select * from json_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
+
+select * from json_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
+select * from json_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
+select * from json_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
+
-- json_strip_nulls
select json_strip_nulls(null);
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
index eb65a38..34b9fc5 100644
--- a/src/test/regress/sql/jsonb.sql
+++ b/src/test/regress/sql/jsonb.sql
@@ -478,6 +478,34 @@ SELECT * FROM jsonb_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,
-- populate_record
CREATE TYPE jbpop AS (a text, b int, c timestamp);
+CREATE DOMAIN jsb_int_not_null AS int NOT NULL;
+CREATE DOMAIN jsb_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN jsb_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+
+CREATE TYPE jsbrec AS (
+ i int,
+ ia _int4,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d jsb_int_array_1d,
+ ia2d jsb_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jbpop,
+ reca jbpop[]
+);
+
+CREATE TYPE jsbrec_i_not_null AS (
+ i jsb_int_not_null
+);
+
SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":"blurfl","x":43.2}') q;
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":"blurfl","x":43.2}') q;
@@ -488,6 +516,100 @@ SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":[100,200,false],"x":43.2}'
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":[100,200,false],"x":43.2}') q;
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"c":[100,200,false],"x":43.2}') q;
+SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop, '{}') q;
+
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"x": 43.2}') q;
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": null}') q;
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": 12345}') q;
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": null}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": 123}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [1, "2", null, 4]}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1, 2], [3, 4]]}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1], 2]}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1], [2, 3]]}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": "{1,2,3}"}') q;
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": null}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": 123}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [1, "2", null, 4]}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [[1, 2, 3]]}') q;
+
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": null}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": 123}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null, 4]}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null]}') q;
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [1, "2", null, 4]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[], []]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [3]]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], 3, 4]}') q;
+
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [1, "2", null, 4]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": null}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": 123}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [1, "2", null, 4]}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": null}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaa"}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaa"}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaaaaa"}') q;
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": null}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": 123}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [1, "2", null, 4]}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": null}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": true}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": 123.45}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "123.45"}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "abc"}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": null}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": true}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": 123.45}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "123.45"}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "abc"}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": null}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": 123}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": [1, "2", null, 4]}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": 123}') q;
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": [1, 2]}') q;
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": "(abc,42,01.02.2003)"}') q;
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": 123}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [1, 2]}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": ["(abc,42,01.02.2003)"]}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": "{\"(abc,42,01.02.2003)\"}"}') q;
+
+SELECT rec FROM jsonb_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jbpop,NULL)::jsbrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+
-- populate_recordset
SELECT * FROM jsonb_populate_recordset(NULL::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
SELECT * FROM jsonb_populate_recordset(row('def',99,NULL)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
@@ -509,13 +631,43 @@ select * from jsonb_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar",
as x(a int, b text, c boolean);
select *, c is null as c_is_null
-from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::jsonb)
- as t(a int, b jsonb, c text, x int);
+from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::jsonb)
+ as t(a int, b jsonb, c text, x int, ca char(5)[], ia int[][], r jbpop);
select *, c is null as c_is_null
from jsonb_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::jsonb)
as t(a int, b jsonb, c text, x int);
+select * from jsonb_to_record('{"ia": null}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": 123}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": [1, "2", null, 4]}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": [[1, 2], [3, 4]]}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": [[1], 2]}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
+
+select * from jsonb_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
+select * from jsonb_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
+select * from jsonb_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
+
+-- test type info caching in jsonb_populate_record()
+CREATE TEMP TABLE jsbpoptest (js jsonb);
+
+INSERT INTO jsbpoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::jsonb
+FROM generate_series(1, 3);
+
+SELECT (jsonb_populate_record(NULL::jsbrec, js)).* FROM jsbpoptest;
+
+DROP TYPE jsbrec;
+DROP TYPE jsbrec_i_not_null;
+DROP DOMAIN jsb_int_not_null;
+DROP DOMAIN jsb_int_array_1d;
+DROP DOMAIN jsb_int_array_2d;
+
-- indexing
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":null}';
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":"CC"}';
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
On 22.01.2017 21:58, Tom Lane wrote:
If you want such macros I think it would be better to submit a separate
cosmetic patch that tries to hide such bit-tests behind macros throughout
the jsonb code.
I've attached that patch, but not all the bit-tests were hidden: some of them
in jsonb_util.c still remain valid after upcoming refactoring because they
don't belong to generic code (there might be better to use JBC_XXX() macros).
Pushed this; grepping found a couple other places that could be replaced
by the macros, so I did.
I didn't include the JsonContainerIsEmpty macro, though. It wasn't used
anywhere, and I'm not exactly convinced that "IsEmpty" is more readable
than "Size == 0", anyhow. We can add it later if the use-case gets
stronger.
Sorry for this obvious mistake. But macros JB_ROOT_IS_XXX() also contain the
same hazard.
Good point, fixed.
I'll look at the rest of this in a bit.
regards, tom lane
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
On 22.01.2017 21:58, Tom Lane wrote:
5. The business with having some arguments that are only for json and
others that are only for jsonb, eg in populate_scalar(), also makes me
itch.
I've refactored json/jsonb passing using new struct JsValue which contains
union for json/jsonb values.
I'm not too enamored of that fix. It doesn't do much for readability, and
at least with my compiler (gcc 4.4.7), I sometimes get "variable might be
used uninitialized" warnings, probably due to not all fields of the union
getting set in every code path.
I wonder whether this wouldn't all get a lot simpler and more
readable if we gave up on trying to share code between the two cases.
Maybe two separate families of functions like this
...
could slightly improve readability, but such code duplication (I believe it is
a duplicate code) would never be acceptable to me.
I think you need to take a second look at the code you're producing
and realize that it's not so clean either. This extract from
populate_record_field, for example, is pretty horrid:
/* prepare column metadata cache for the given type */
if (col->typid != typid || col->typmod != typmod)
prepare_column_cache(col, typid, typmod, mcxt, jsv->is_json);
*isnull = jsv->is_json ? jsv->val.json.str == NULL
: jsv->val.jsonb == NULL ||
jsv->val.jsonb->type == jbvNull;
typcat = col->typcat;
/* try to convert json string to a non-scalar type through input function */
if ((jsv->is_json ? jsv->val.json.type == JSON_TOKEN_STRING
: jsv->val.jsonb &&
jsv->val.jsonb->type == jbvString) &&
(typcat == TYPECAT_ARRAY ||
typcat == TYPECAT_COMPOSITE))
typcat = TYPECAT_SCALAR;
/* we must perform domain checks for NULLs */
if (*isnull && typcat != TYPECAT_DOMAIN)
return (Datum) 0;
When every other line contains an is_json conditional, you are not making
good readable code. It's also worth noting that this is going to become
even less readable after pgindent gets done with it:
/* prepare column metadata cache for the given type */
if (col->typid != typid || col->typmod != typmod)
prepare_column_cache(col, typid, typmod, mcxt, jsv->is_json);
*isnull = jsv->is_json ? jsv->val.json.str == NULL
: jsv->val.jsonb == NULL ||
jsv->val.jsonb->type == jbvNull;
typcat = col->typcat;
/* try to convert json string to a non-scalar type through input function */
if ((jsv->is_json ? jsv->val.json.type == JSON_TOKEN_STRING
: jsv->val.jsonb &&
jsv->val.jsonb->type == jbvString) &&
(typcat == TYPECAT_ARRAY ||
typcat == TYPECAT_COMPOSITE))
typcat = TYPECAT_SCALAR;
/* we must perform domain checks for NULLs */
if (*isnull && typcat != TYPECAT_DOMAIN)
return (Datum) 0;
You could maybe improve that result with some different formatting
choices, but I think it's basically a readability fail to be relying
heavily on multiline x?y:z conditionals in the first place.
And I still maintain that it's entirely silly to be structuring
populate_scalar() this way.
So I really think it'd be a good idea to explore separating the json and
jsonb code paths. This direction isn't looking like a win.
7. More generally, the patch is hard to review because it whacks the
existing code around so thoroughly that it's tough even to match up
old and new code to get a handle on what you changed functionally.
Maybe it would be good if you could separate it into a refactoring
patch that just moves existing code around without functional changes,
and then a patch on top of that that adds code and makes only localized
changes in what's already there.
I've split this patch into two patches as you asked.
That didn't really help :-( ... the 0002 patch is still nigh unreadable.
Maybe it's trying to do too much in one step.
regards, tom lane
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On 25.01.2017 23:58, Tom Lane wrote:
I think you need to take a second look at the code you're producing
and realize that it's not so clean either. This extract from
populate_record_field, for example, is pretty horrid:
But what if we introduce some helper macros like this:
#define JsValueIsNull(jsv) \
((jsv)->is_json ? !(jsv)->val.json.str \
: !(jsv)->val.jsonb || (jsv)->val.jsonb->type == jbvNull)
#define JsValueIsString(jsv) \
((jsv)->is_json ? (jsv)->val.json.type == JSON_TOKEN_STRING \
: (jsv)->val.jsonb && (jsv)->val.jsonb->type == jbvString)
/* prepare column metadata cache for the given type */
if (col->typid != typid || col->typmod != typmod)
prepare_column_cache(col, typid, typmod, mcxt, jsv->is_json);
*isnull = JsValueIsNull(jsv);
typcat = col->typcat;
/* try to convert json string to a non-scalar type through input function */
if (JsValueIsString(jsv) &&
(typcat == TYPECAT_ARRAY || typcat == TYPECAT_COMPOSITE))
typcat = TYPECAT_SCALAR;
/* we must perform domain checks for NULLs */
if (*isnull && typcat != TYPECAT_DOMAIN)
return (Datum) 0;
--
Nikita Glukhov
Postgres Professional: http://www.postgrespro.com
The Russian Postgres Company
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
On 25.01.2017 23:58, Tom Lane wrote:
I think you need to take a second look at the code you're producing
and realize that it's not so clean either. This extract from
populate_record_field, for example, is pretty horrid:
But what if we introduce some helper macros like this:
#define JsValueIsNull(jsv) \
((jsv)->is_json ? !(jsv)->val.json.str \
: !(jsv)->val.jsonb || (jsv)->val.jsonb->type == jbvNull)
#define JsValueIsString(jsv) \
((jsv)->is_json ? (jsv)->val.json.type == JSON_TOKEN_STRING \
: (jsv)->val.jsonb && (jsv)->val.jsonb->type == jbvString)
Yeah, I was wondering about that too. I'm not sure that you can make
a reasonable set of helper macros that will fix this, but if you want
to try, go for it.
BTW, just as a stylistic thing, I find "a?b:c||d" unreadable: I have
to go back to the manual every darn time to convince myself whether
that means (a?b:c)||d or a?b:(c||d). It's better not to rely on
the reader (... or the author) having memorized C's precedence rules
in quite that much detail. Extra parens help.
regards, tom lane
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On Thu, Jan 26, 2017 at 6:49 AM, Tom Lane <tgl@sss.pgh.pa.us> wrote:
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
On 25.01.2017 23:58, Tom Lane wrote:
I think you need to take a second look at the code you're producing
and realize that it's not so clean either. This extract from
populate_record_field, for example, is pretty horrid:But what if we introduce some helper macros like this:
#define JsValueIsNull(jsv) \
((jsv)->is_json ? !(jsv)->val.json.str \
: !(jsv)->val.jsonb || (jsv)->val.jsonb->type == jbvNull)#define JsValueIsString(jsv) \
((jsv)->is_json ? (jsv)->val.json.type == JSON_TOKEN_STRING \
: (jsv)->val.jsonb && (jsv)->val.jsonb->type == jbvString)Yeah, I was wondering about that too. I'm not sure that you can make
a reasonable set of helper macros that will fix this, but if you want
to try, go for it.BTW, just as a stylistic thing, I find "a?b:c||d" unreadable: I have
to go back to the manual every darn time to convince myself whether
that means (a?b:c)||d or a?b:(c||d). It's better not to rely on
the reader (... or the author) having memorized C's precedence rules
in quite that much detail. Extra parens help.
Moved to CF 2017-03 as discussion is going on and more review is
needed on the last set of patches.
--
Michael
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On 2/1/17 12:53 AM, Michael Paquier wrote:
On Thu, Jan 26, 2017 at 6:49 AM, Tom Lane <tgl@sss.pgh.pa.us> wrote:
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
On 25.01.2017 23:58, Tom Lane wrote:
I think you need to take a second look at the code you're producing
and realize that it's not so clean either. This extract from
populate_record_field, for example, is pretty horrid:But what if we introduce some helper macros like this:
#define JsValueIsNull(jsv) \
((jsv)->is_json ? !(jsv)->val.json.str \
: !(jsv)->val.jsonb || (jsv)->val.jsonb->type == jbvNull)#define JsValueIsString(jsv) \
((jsv)->is_json ? (jsv)->val.json.type == JSON_TOKEN_STRING \
: (jsv)->val.jsonb && (jsv)->val.jsonb->type == jbvString)Yeah, I was wondering about that too. I'm not sure that you can make
a reasonable set of helper macros that will fix this, but if you want
to try, go for it.BTW, just as a stylistic thing, I find "a?b:c||d" unreadable: I have
to go back to the manual every darn time to convince myself whether
that means (a?b:c)||d or a?b:(c||d). It's better not to rely on
the reader (... or the author) having memorized C's precedence rules
in quite that much detail. Extra parens help.Moved to CF 2017-03 as discussion is going on and more review is
needed on the last set of patches.
The current patches do not apply cleanly at cccbdde:
$ git apply ../other/0001-introduce-JsonContainerXxx-macros-v04.patch
error: patch failed: src/backend/utils/adt/jsonb_util.c:328
error: src/backend/utils/adt/jsonb_util.c: patch does not apply
error: patch failed: src/backend/utils/adt/jsonfuncs.c:1266
error: src/backend/utils/adt/jsonfuncs.c: patch does not apply
error: patch failed: src/include/utils/jsonb.h:218
error: src/include/utils/jsonb.h: patch does not apply
In addition, it appears a number of suggestions have been made by Tom
that warrant new patches. Marked "Waiting on Author".
--
-David
david@pgmasters.net
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On 3/16/17 11:54 AM, David Steele wrote:
On 2/1/17 12:53 AM, Michael Paquier wrote:
On Thu, Jan 26, 2017 at 6:49 AM, Tom Lane <tgl@sss.pgh.pa.us> wrote:
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
On 25.01.2017 23:58, Tom Lane wrote:
I think you need to take a second look at the code you're producing
and realize that it's not so clean either. This extract from
populate_record_field, for example, is pretty horrid:But what if we introduce some helper macros like this:
#define JsValueIsNull(jsv) \
((jsv)->is_json ? !(jsv)->val.json.str \
: !(jsv)->val.jsonb || (jsv)->val.jsonb->type == jbvNull)#define JsValueIsString(jsv) \
((jsv)->is_json ? (jsv)->val.json.type == JSON_TOKEN_STRING \
: (jsv)->val.jsonb && (jsv)->val.jsonb->type == jbvString)Yeah, I was wondering about that too. I'm not sure that you can make
a reasonable set of helper macros that will fix this, but if you want
to try, go for it.BTW, just as a stylistic thing, I find "a?b:c||d" unreadable: I have
to go back to the manual every darn time to convince myself whether
that means (a?b:c)||d or a?b:(c||d). It's better not to rely on
the reader (... or the author) having memorized C's precedence rules
in quite that much detail. Extra parens help.Moved to CF 2017-03 as discussion is going on and more review is
needed on the last set of patches.The current patches do not apply cleanly at cccbdde:
$ git apply ../other/0001-introduce-JsonContainerXxx-macros-v04.patch
error: patch failed: src/backend/utils/adt/jsonb_util.c:328
error: src/backend/utils/adt/jsonb_util.c: patch does not apply
error: patch failed: src/backend/utils/adt/jsonfuncs.c:1266
error: src/backend/utils/adt/jsonfuncs.c: patch does not apply
error: patch failed: src/include/utils/jsonb.h:218
error: src/include/utils/jsonb.h: patch does not applyIn addition, it appears a number of suggestions have been made by Tom
that warrant new patches. Marked "Waiting on Author".
This thread has been idle for months since Tom's review.
The submission has been marked "Returned with Feedback". Please feel
free to resubmit to a future commitfest.
Thanks,
--
-David
david@pgmasters.net
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On 03/21/2017 01:37 PM, David Steele wrote:
On 3/16/17 11:54 AM, David Steele wrote:
On 2/1/17 12:53 AM, Michael Paquier wrote:
On Thu, Jan 26, 2017 at 6:49 AM, Tom Lane <tgl@sss.pgh.pa.us> wrote:
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
On 25.01.2017 23:58, Tom Lane wrote:
I think you need to take a second look at the code you're producing
and realize that it's not so clean either. This extract from
populate_record_field, for example, is pretty horrid:But what if we introduce some helper macros like this:
#define JsValueIsNull(jsv) \
((jsv)->is_json ? !(jsv)->val.json.str \
: !(jsv)->val.jsonb || (jsv)->val.jsonb->type == jbvNull)#define JsValueIsString(jsv) \
((jsv)->is_json ? (jsv)->val.json.type == JSON_TOKEN_STRING \
: (jsv)->val.jsonb && (jsv)->val.jsonb->type == jbvString)Yeah, I was wondering about that too. I'm not sure that you can make
a reasonable set of helper macros that will fix this, but if you want
to try, go for it.BTW, just as a stylistic thing, I find "a?b:c||d" unreadable: I have
to go back to the manual every darn time to convince myself whether
that means (a?b:c)||d or a?b:(c||d). It's better not to rely on
the reader (... or the author) having memorized C's precedence rules
in quite that much detail. Extra parens help.Moved to CF 2017-03 as discussion is going on and more review is
needed on the last set of patches.The current patches do not apply cleanly at cccbdde:
$ git apply ../other/0001-introduce-JsonContainerXxx-macros-v04.patch
error: patch failed: src/backend/utils/adt/jsonb_util.c:328
error: src/backend/utils/adt/jsonb_util.c: patch does not apply
error: patch failed: src/backend/utils/adt/jsonfuncs.c:1266
error: src/backend/utils/adt/jsonfuncs.c: patch does not apply
error: patch failed: src/include/utils/jsonb.h:218
error: src/include/utils/jsonb.h: patch does not applyIn addition, it appears a number of suggestions have been made by Tom
that warrant new patches. Marked "Waiting on Author".This thread has been idle for months since Tom's review.
The submission has been marked "Returned with Feedback". Please feel
free to resubmit to a future commitfest.
Please revive. I am planning to look at this later this week.
cheers
andrew
--
Andrew Dunstan https://www.2ndQuadrant.com
PostgreSQL Development, 24x7 Support, Remote DBA, Training & Services
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On 3/21/17 2:31 PM, Andrew Dunstan wrote:
On 03/21/2017 01:37 PM, David Steele wrote:
This thread has been idle for months since Tom's review.
The submission has been marked "Returned with Feedback". Please feel
free to resubmit to a future commitfest.Please revive. I am planning to look at this later this week.
Revived in "Waiting on Author" state.
--
-David
david@pgmasters.net
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On 22.03.2017 00:26, David Steele wrote:
On 3/21/17 2:31 PM, Andrew Dunstan wrote:
On 03/21/2017 01:37 PM, David Steele wrote:
This thread has been idle for months since Tom's review.
The submission has been marked "Returned with Feedback". Please feel
free to resubmit to a future commitfest.Please revive. I am planning to look at this later this week.
Revived in "Waiting on Author" state.
Here is updated v05 version of this patch:
* rebased to the latest master
* one patch is completely removed because it is unnecessary now
* added some macros for JsValue fields access
* added new JsObject structure for passing json/jsonb objects
* refactoring patch is not yet simplified (not broken into a
step-by-step sequence)
Also I must notice that json branch of this code is not as optimal as it
might be:
there could be repetitive parsing passes for nested json objects/arrays
instead of a single parsing pass.
--
Nikita Glukhov
Postgres Professional: http://www.postgrespro.com
The Russian Postgres Company
Attachments:
0001-refactor-json_populate_record-v05.patchtext/x-patch; name=0001-refactor-json_populate_record-v05.patchDownload
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index bf2c91f..da29dab 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -109,6 +109,7 @@ typedef struct JhashState
HTAB *hash;
char *saved_scalar;
char *save_json_start;
+ JsonTokenType saved_token_type;
} JHashState;
/* hashtable element */
@@ -116,26 +117,49 @@ typedef struct JsonHashEntry
{
char fname[NAMEDATALEN]; /* hash key (MUST BE FIRST) */
char *val;
- char *json;
- bool isnull;
+ JsonTokenType type;
} JsonHashEntry;
-/* these two are stolen from hstore / record_out, used in populate_record* */
-typedef struct ColumnIOData
+/* structure to cache type I/O metadata needed for populate_scalar() */
+typedef struct ScalarIOData
{
- Oid column_type;
- Oid typiofunc;
Oid typioparam;
- FmgrInfo proc;
-} ColumnIOData;
+ FmgrInfo typiofunc;
+} ScalarIOData;
+
+typedef struct ColumnIOData ColumnIOData;
+typedef struct RecordIOData RecordIOData;
-typedef struct RecordIOData
+/* structure to cache metadata needed for populate_composite() */
+typedef struct CompositeIOData
+{
+ /*
+ * We use pointer to a RecordIOData here because variable-length
+ * struct RecordIOData can't be used directly in ColumnIOData.io union
+ */
+ RecordIOData *record_io; /* metadata cache for populate_record() */
+ TupleDesc tupdesc; /* cached tuple descriptor */
+} CompositeIOData;
+
+/* these two are stolen from hstore / record_out, used in populate_record* */
+
+/* structure to cache record metadata needed for populate_record_field() */
+struct ColumnIOData
+{
+ Oid typid; /* column type id */
+ int32 typmod; /* column type modifier */
+ ScalarIOData scalar_io; /* metadata cache for directi conversion
+ * through input function */
+};
+
+/* structure to cache record metadata needed for populate_record() */
+struct RecordIOData
{
Oid record_type;
int32 record_typmod;
int ncolumns;
ColumnIOData columns[FLEXIBLE_ARRAY_MEMBER];
-} RecordIOData;
+};
/* state for populate_recordset */
typedef struct PopulateRecordsetState
@@ -145,10 +169,11 @@ typedef struct PopulateRecordsetState
HTAB *json_hash;
char *saved_scalar;
char *save_json_start;
+ JsonTokenType saved_token_type;
Tuplestorestate *tuple_store;
TupleDesc ret_tdesc;
HeapTupleHeader rec;
- RecordIOData *my_extra;
+ RecordIOData **my_extra;
MemoryContext fn_mcxt; /* used to stash IO funcs */
} PopulateRecordsetState;
@@ -160,6 +185,55 @@ typedef struct StripnullState
bool skip_next_null;
} StripnullState;
+/* structure for generalized json/jsonb value passing */
+typedef struct JsValue
+{
+ bool is_json; /* json/jsonb */
+ union
+ {
+ struct
+ {
+ char *str; /* json string */
+ int len; /* json string length or -1 if null-terminated */
+ JsonTokenType type; /* json type */
+ } json; /* json value */
+
+ JsonbValue *jsonb; /* jsonb value */
+ } val;
+} JsValue;
+
+typedef struct JsObject
+{
+ bool is_json; /* json/jsonb */
+ union
+ {
+ HTAB *json_hash;
+ JsonbContainer *jsonb_cont;
+ } val;
+} JsObject;
+
+#define JsValueIsNull(jsv) \
+ ((jsv)->is_json ? \
+ (!(jsv)->val.json.str || (jsv)->val.json.type == JSON_TOKEN_NULL) : \
+ (!(jsv)->val.jsonb || (jsv)->val.jsonb->type == jbvNull))
+
+#define JsValueIsString(jsv) \
+ ((jsv)->is_json ? (jsv)->val.json.type == JSON_TOKEN_STRING \
+ : ((jsv)->val.jsonb && (jsv)->val.jsonb->type == jbvString))
+
+#define JsObjectSize(jso) \
+ ((jso)->is_json \
+ ? hash_get_num_entries((jso)->val.json_hash) \
+ : !(jso)->val.jsonb_cont || JsonContainerSize((jso)->val.jsonb_cont))
+
+#define JsObjectIsEmpty(jso) (JsObjectSize(jso) == 0)
+
+#define JsObjectFree(jso) do { \
+ if ((jso)->is_json) \
+ hash_destroy((jso)->val.json_hash); \
+ } while (0)
+
+
/* semantic action functions for json_object_keys */
static void okeys_object_field_start(void *state, char *fname, bool isnull);
static void okeys_array_start(void *state);
@@ -211,7 +285,7 @@ static void elements_array_element_end(void *state, bool isnull);
static void elements_scalar(void *state, char *token, JsonTokenType tokentype);
/* turn a json object into a hash table */
-static HTAB *get_json_object_as_hash(text *json, const char *funcname);
+static HTAB *get_json_object_as_hash(char *json, int len, const char *funcname);
/* common worker for populate_record and to_record */
static Datum populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
@@ -241,10 +315,6 @@ static void sn_object_field_start(void *state, char *fname, bool isnull);
static void sn_array_element_start(void *state, bool isnull);
static void sn_scalar(void *state, char *token, JsonTokenType tokentype);
-/* Turn a jsonb object into a record */
-static void make_row_from_rec_and_jsonb(Jsonb *element,
- PopulateRecordsetState *state);
-
/* worker function for populate_recordset and to_recordset */
static Datum populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
bool have_record_arg);
@@ -271,6 +341,16 @@ static void setPathArray(JsonbIterator **it, Datum *path_elems,
int level, Jsonb *newval, uint32 nelems, int op_type);
static void addJsonbToParseState(JsonbParseState **jbps, Jsonb *jb);
+/* helper functions for populate_record[set] */
+static HeapTupleHeader
+populate_record(TupleDesc tupdesc, RecordIOData **record_info,
+ HeapTupleHeader template, MemoryContext mcxt,
+ JsObject *obj);
+
+static Datum
+populate_record_field(ColumnIOData *col, Oid typid, int32 typmod,
+ const char *colname, MemoryContext mcxt,
+ Datum defaultval, JsValue *jsv, bool *isnull);
/*
* SQL function json_object_keys
@@ -2099,158 +2179,279 @@ json_to_record(PG_FUNCTION_ARGS)
return populate_record_worker(fcinfo, "json_to_record", false);
}
-static Datum
-populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
- bool have_record_arg)
+static void
+JsValueToJsObject(JsValue *jsv, JsObject *jso)
{
- int json_arg_num = have_record_arg ? 1 : 0;
- Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
- text *json;
- Jsonb *jb = NULL;
- HTAB *json_hash = NULL;
- HeapTupleHeader rec = NULL;
- Oid tupType = InvalidOid;
- int32 tupTypmod = -1;
- TupleDesc tupdesc;
- HeapTupleData tuple;
- HeapTuple rettuple;
- RecordIOData *my_extra;
- int ncolumns;
- int i;
- Datum *values;
- bool *nulls;
-
- Assert(jtype == JSONOID || jtype == JSONBOID);
+ jso->is_json = jsv->is_json;
- if (have_record_arg)
+ if (jsv->is_json)
{
- Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
+ /* convert plain-text json into a hash table */
+ jso->val.json_hash =
+ get_json_object_as_hash(jsv->val.json.str,
+ jsv->val.json.len >= 0
+ ? jsv->val.json.len
+ : strlen(jsv->val.json.str),
+ "populate_composite");
+ }
+ else
+ {
+ JsonbValue *jbv = jsv->val.jsonb;
- if (!type_is_rowtype(argtype))
- ereport(ERROR,
- (errcode(ERRCODE_DATATYPE_MISMATCH),
- errmsg("first argument of %s must be a row type",
- funcname)));
+ if (jbv->type == jbvBinary &&
+ JsonContainerIsObject(jbv->val.binary.data))
+ jso->val.jsonb_cont = jbv->val.binary.data;
+ else
+ jso->val.jsonb_cont = NULL;
+ }
+}
- if (PG_ARGISNULL(0))
- {
- if (PG_ARGISNULL(1))
- PG_RETURN_NULL();
+/* populate recursively composite (row type) value from json/jsonb */
+static Datum
+populate_composite(CompositeIOData *io, /* metadata cache */
+ Oid typid, /* row type id */
+ int32 typmod, /* row type modifier */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ HeapTupleHeader defaultval, /* default row value, if any */
+ JsValue *jsv) /* json/jsonb object */
+{
+ HeapTupleHeader tuple;
+ JsObject jso;
- /*
- * have no tuple to look at, so the only source of type info is
- * the argtype. The lookup_rowtype_tupdesc call below will error
- * out if we don't have a known composite type oid here.
- */
- tupType = argtype;
- tupTypmod = -1;
- }
- else
- {
- rec = PG_GETARG_HEAPTUPLEHEADER(0);
+ /* acquire cached tuple descriptor */
+ if (!io->tupdesc ||
+ io->tupdesc->tdtypeid != typid ||
+ io->tupdesc->tdtypmod != typmod)
+ {
+ TupleDesc tupdesc = lookup_rowtype_tupdesc(typid, typmod);
+ MemoryContext oldcxt;
- if (PG_ARGISNULL(1))
- PG_RETURN_POINTER(rec);
+ if (io->tupdesc)
+ FreeTupleDesc(io->tupdesc);
- /* Extract type info from the tuple itself */
- tupType = HeapTupleHeaderGetTypeId(rec);
- tupTypmod = HeapTupleHeaderGetTypMod(rec);
- }
+ /* copy tuple desc without constraints into cache memory context */
+ oldcxt = MemoryContextSwitchTo(mcxt);
+ io->tupdesc = CreateTupleDescCopy(tupdesc);
+ MemoryContextSwitchTo(oldcxt);
- tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
+ ReleaseTupleDesc(tupdesc);
}
- else
- {
- /* json{b}_to_record case */
- if (PG_ARGISNULL(0))
- PG_RETURN_NULL();
- if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
- ereport(ERROR,
- (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
- errmsg("function returning record called in context "
- "that cannot accept type record"),
- errhint("Try calling the function in the FROM clause "
- "using a column definition list.")));
- }
+ /* prepare input value */
+ JsValueToJsObject(jsv, &jso);
- if (jtype == JSONOID)
- {
- /* just get the text */
- json = PG_GETARG_TEXT_PP(json_arg_num);
+ /* populate resulting record tuple */
+ tuple = populate_record(io->tupdesc, &io->record_io,
+ defaultval, mcxt, &jso);
+
+ JsObjectFree(&jso);
+
+ return HeapTupleHeaderGetDatum(tuple);
+}
- json_hash = get_json_object_as_hash(json, funcname);
+/* populate non-null scalar value from json/jsonb value */
+static Datum
+populate_scalar(ScalarIOData *io, /* metadata cache */
+ Oid typid, /* scalar type id */
+ int32 typmod, /* scalar type modifier */
+ JsValue *jsv) /* json/jsonb value to convert */
+{
+ Datum res;
+ char *str;
+ char *json = NULL;
+ if (!jsv)
+ {
/*
- * if the input json is empty, we can only skip the rest if we were
- * passed in a non-null record, since otherwise there may be issues
- * with domain nulls.
+ * Need InputFunctionCall to happen even for NULLs, so that domain
+ * checks are done.
*/
- if (hash_get_num_entries(json_hash) == 0 && rec)
+ str = NULL;
+ }
+ else if (jsv->is_json)
+ {
+ /* already done the hard work in the json case */
+ int len = jsv->val.json.len;
+
+ json = jsv->val.json.str;
+ Assert(json);
+
+ if (len >= 0)
{
- hash_destroy(json_hash);
- ReleaseTupleDesc(tupdesc);
- PG_RETURN_POINTER(rec);
+ /* Need to copy non-null-terminated string */
+ str = palloc(len + 1 * sizeof(char));
+ memcpy(str, json, len);
+ str[len] = '\0';
}
+ else
+ str = json; /* null-terminated string */
}
else
{
- jb = PG_GETARG_JSONB(json_arg_num);
-
- /* same logic as for json */
- if (JB_ROOT_COUNT(jb) == 0 && rec)
+ JsonbValue *jbv = jsv->val.jsonb;
+
+ if (jbv->type == jbvString) /* quotes are stripped */
+ str = pnstrdup(jbv->val.string.val, jbv->val.string.len);
+ else if (jbv->type == jbvBool)
+ str = pnstrdup(jbv->val.boolean ? "t" : "f", 1);
+ else if (jbv->type == jbvNumeric)
+ str = DatumGetCString(DirectFunctionCall1(numeric_out,
+ PointerGetDatum(jbv->val.numeric)));
+ else if (jbv->type == jbvBinary)
+ str = JsonbToCString(NULL, jbv->val.binary.data,
+ jbv->val.binary.len);
+ else
{
- ReleaseTupleDesc(tupdesc);
- PG_RETURN_POINTER(rec);
+ elog(ERROR, "unrecognized jsonb type: %d", (int) jbv->type);
+ str = NULL;
}
}
- ncolumns = tupdesc->natts;
+ res = InputFunctionCall(&io->typiofunc, str, io->typioparam, typmod);
+
+ /* free temporary buffer */
+ if (str != json)
+ pfree(str);
+
+ return res;
+}
+
+/* prepare column metadata cache for the given type */
+static void
+prepare_column_cache(ColumnIOData *column, /* metadata cache */
+ Oid typid, /* column type id */
+ int32 typmod, /* column type modifier */
+ MemoryContext mcxt, /* cache memory context */
+ bool json) /* json/jsonb */
+{
+ Oid typioproc;
+
+ column->typid = typid;
+ column->typmod = typmod;
+
+ getTypeInputInfo(typid, &typioproc, &column->scalar_io.typioparam);
+ fmgr_info_cxt(typioproc, &column->scalar_io.typiofunc, mcxt);
+}
+
+/* populate recursively a record field or an array element from json/jsonb value */
+static Datum
+populate_record_field(ColumnIOData *col, /* metadata cache */
+ Oid typid, /* field type id */
+ int32 typmod, /* field type modifier */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ Datum defaultval, /* default record value if any */
+ JsValue *jsv, /* json/jsonb value to convert */
+ bool *isnull) /* value is NULL */
+{
+ check_stack_depth();
+
+ /* prepare column metadata cache for the given type */
+ if (col->typid != typid || col->typmod != typmod)
+ prepare_column_cache(col, typid, typmod, mcxt, jsv->is_json);
+
+ *isnull = JsValueIsNull(jsv);
+
+ return populate_scalar(&col->scalar_io, typid, typmod,
+ *isnull ? NULL : jsv);
+}
+
+static RecordIOData *
+allocate_record_info(MemoryContext mcxt, int ncolumns)
+{
+ RecordIOData *data = (RecordIOData *)
+ MemoryContextAlloc(mcxt,
+ offsetof(RecordIOData, columns) +
+ ncolumns * sizeof(ColumnIOData));
- if (rec)
+ data->record_type = InvalidOid;
+ data->record_typmod = 0;
+ data->ncolumns = ncolumns;
+ MemSet(data->columns, 0, sizeof(ColumnIOData) * ncolumns);
+
+ return data;
+}
+
+static bool
+JsObjectGetField(JsObject *obj, char *field, JsValue *jsv)
+{
+ jsv->is_json = obj->is_json;
+
+ if (jsv->is_json)
{
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = rec;
+ JsonHashEntry *hashentry = hash_search(obj->val.json_hash, field,
+ HASH_FIND, NULL);
+
+ jsv->val.json.type = hashentry ? hashentry->type : JSON_TOKEN_NULL;
+ jsv->val.json.str = jsv->val.json.type == JSON_TOKEN_NULL ? NULL :
+ hashentry->val;
+ jsv->val.json.len = jsv->val.json.str ? -1 : 0; /* null-terminated */
+
+ return hashentry != NULL;
+ }
+ else
+ {
+ jsv->val.jsonb = !obj->val.jsonb_cont ? NULL :
+ findJsonbValueFromContainerLen(obj->val.jsonb_cont, JB_FOBJECT,
+ field, strlen(field));
+
+ return jsv->val.jsonb != NULL;
}
+}
+
+/* populate record tuple from json/jsonb value */
+static HeapTupleHeader
+populate_record(TupleDesc tupdesc, /* record tuple descriptor */
+ RecordIOData **precord, /* metadata cache location */
+ HeapTupleHeader defaultval, /* default record value if any */
+ MemoryContext mcxt, /* cache memory context */
+ JsObject *obj) /* json/jsonb object */
+{
+ RecordIOData *record = *precord;
+ Datum *values;
+ bool *nulls;
+ HeapTuple res;
+ int ncolumns = tupdesc->natts;
+ int i;
/*
- * We arrange to look up the needed I/O info just once per series of
- * calls, assuming the record type doesn't change underneath us.
+ * if the input json is empty, we can only skip the rest if we were
+ * passed in a non-null record, since otherwise there may be issues
+ * with domain nulls.
*/
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- if (my_extra == NULL ||
- my_extra->ncolumns != ncolumns)
- {
- fcinfo->flinfo->fn_extra =
- MemoryContextAlloc(fcinfo->flinfo->fn_mcxt,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- my_extra->record_type = InvalidOid;
- my_extra->record_typmod = 0;
- my_extra->ncolumns = ncolumns;
- MemSet(my_extra->columns, 0, sizeof(ColumnIOData) * ncolumns);
- }
+ if (defaultval && JsObjectIsEmpty(obj))
+ return defaultval;
- if (have_record_arg && (my_extra->record_type != tupType ||
- my_extra->record_typmod != tupTypmod))
+ /* (re)allocate metadata cache */
+ if (record == NULL ||
+ record->ncolumns != ncolumns)
+ *precord = record = allocate_record_info(mcxt, ncolumns);
+
+ /* invalidate metadata cache if the record type has changed */
+ if (record->record_type != tupdesc->tdtypeid ||
+ record->record_typmod != tupdesc->tdtypmod)
{
- MemSet(my_extra, 0,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra->record_type = tupType;
- my_extra->record_typmod = tupTypmod;
- my_extra->ncolumns = ncolumns;
+ MemSet(record, 0, offsetof(RecordIOData, columns) +
+ ncolumns * sizeof(ColumnIOData));
+ record->record_type = tupdesc->tdtypeid;
+ record->record_typmod = tupdesc->tdtypmod;
+ record->ncolumns = ncolumns;
}
values = (Datum *) palloc(ncolumns * sizeof(Datum));
nulls = (bool *) palloc(ncolumns * sizeof(bool));
- if (rec)
+ if (defaultval)
{
+ HeapTupleData tuple;
+
+ /* Build a temporary HeapTuple control structure */
+ tuple.t_len = HeapTupleHeaderGetDatumLength(defaultval);
+ ItemPointerSetInvalid(&(tuple.t_self));
+ tuple.t_tableOid = InvalidOid;
+ tuple.t_data = defaultval;
+
/* Break down the tuple into fields */
heap_deform_tuple(&tuple, tupdesc, values, nulls);
}
@@ -2265,31 +2466,19 @@ populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
for (i = 0; i < ncolumns; ++i)
{
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- JsonbValue *v = NULL;
- JsonHashEntry *hashentry = NULL;
+ Form_pg_attribute att = tupdesc->attrs[i];
+ char *colname = NameStr(att->attname);
+ JsValue field = { 0 };
+ bool found;
/* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
+ if (att->attisdropped)
{
nulls[i] = true;
continue;
}
- if (jtype == JSONOID)
- {
- hashentry = hash_search(json_hash,
- NameStr(tupdesc->attrs[i]->attname),
- HASH_FIND, NULL);
- }
- else
- {
- char *key = NameStr(tupdesc->attrs[i]->attname);
-
- v = findJsonbValueFromContainerLen(&jb->root, JB_FOBJECT, key,
- strlen(key));
- }
+ found = JsObjectGetField(obj, colname, &field);
/*
* we can't just skip here if the key wasn't found since we might have
@@ -2299,73 +2488,158 @@ populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
* then every field which we don't populate needs to be run through
* the input function just in case it's a domain type.
*/
- if (((jtype == JSONOID && hashentry == NULL) ||
- (jtype == JSONBOID && v == NULL)) && rec)
+ if (defaultval && !found)
continue;
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
+ values[i] = populate_record_field(&record->columns[i],
+ att->atttypid,
+ att->atttypmod,
+ colname,
+ mcxt,
+ nulls[i] ? (Datum) 0 : values[i],
+ &field,
+ &nulls[i]);
+ }
+
+ res = heap_form_tuple(tupdesc, values, nulls);
+
+ pfree(values);
+ pfree(nulls);
+
+ return res->t_data;
+}
+
+/* structure to cache metadata needed for populate_record_worker() */
+typedef struct PopulateRecordCache
+{
+ Oid argtype; /* verified row type of the first argument */
+ CompositeIOData io; /* metadata cache for populate_composite() */
+} PopulateRecordCache;
+
+static Datum
+populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
+ bool have_record_arg)
+{
+ int json_arg_num = have_record_arg ? 1 : 0;
+ Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
+ JsValue jsv = { 0 };
+ HeapTupleHeader rec = NULL;
+ Oid tupType;
+ int32 tupTypmod;
+ TupleDesc tupdesc = NULL;
+ Datum rettuple;
+ JsonbValue jbv;
+ MemoryContext fnmcxt = fcinfo->flinfo->fn_mcxt;
+ PopulateRecordCache *cache = fcinfo->flinfo->fn_extra;
+
+ Assert(jtype == JSONOID || jtype == JSONBOID);
+
+ /*
+ * We arrange to look up the needed I/O info just once per series of
+ * calls, assuming the record type doesn't change underneath us.
+ */
+ if (!cache)
+ fcinfo->flinfo->fn_extra = cache =
+ MemoryContextAllocZero(fnmcxt, sizeof(*cache));
+
+ if (have_record_arg)
+ {
+ Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
+
+ if (cache->argtype != argtype)
{
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- fcinfo->flinfo->fn_mcxt);
- column_info->column_type = column_type;
+ if (!type_is_rowtype(argtype))
+ ereport(ERROR,
+ (errcode(ERRCODE_DATATYPE_MISMATCH),
+ errmsg("first argument of %s must be a row type",
+ funcname)));
+
+ cache->argtype = argtype;
}
- if ((jtype == JSONOID && (hashentry == NULL || hashentry->isnull)) ||
- (jtype == JSONBOID && (v == NULL || v->type == jbvNull)))
+
+ if (PG_ARGISNULL(0))
{
+ if (PG_ARGISNULL(1))
+ PG_RETURN_NULL();
+
/*
- * need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
+ * have no tuple to look at, so the only source of type info is
+ * the argtype. The lookup_rowtype_tupdesc call below will error
+ * out if we don't have a known composite type oid here.
*/
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
+ tupType = argtype;
+ tupTypmod = -1;
}
else
{
- char *s = NULL;
+ rec = PG_GETARG_HEAPTUPLEHEADER(0);
- if (jtype == JSONOID)
- {
- /* already done the hard work in the json case */
- s = hashentry->val;
- }
- else
- {
- if (v->type == jbvString)
- s = pnstrdup(v->val.string.val, v->val.string.len);
- else if (v->type == jbvBool)
- s = pnstrdup((v->val.boolean) ? "t" : "f", 1);
- else if (v->type == jbvNumeric)
- s = DatumGetCString(DirectFunctionCall1(numeric_out,
- PointerGetDatum(v->val.numeric)));
- else if (v->type == jbvBinary)
- s = JsonbToCString(NULL, (JsonbContainer *) v->val.binary.data, v->val.binary.len);
- else
- elog(ERROR, "unrecognized jsonb type: %d", (int) v->type);
- }
+ if (PG_ARGISNULL(1))
+ PG_RETURN_POINTER(rec);
- values[i] = InputFunctionCall(&column_info->proc, s,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
+ /* Extract type info from the tuple itself */
+ tupType = HeapTupleHeaderGetTypeId(rec);
+ tupTypmod = HeapTupleHeaderGetTypMod(rec);
}
}
+ else
+ {
+ /* json{b}_to_record case */
+ if (PG_ARGISNULL(0))
+ PG_RETURN_NULL();
- rettuple = heap_form_tuple(tupdesc, values, nulls);
+ if (get_call_result_type(fcinfo, NULL, &tupdesc) != TYPEFUNC_COMPOSITE)
+ ereport(ERROR,
+ (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
+ errmsg("function returning record called in context "
+ "that cannot accept type record"),
+ errhint("Try calling the function in the FROM clause "
+ "using a column definition list.")));
+
+ Assert(tupdesc);
+
+ /*
+ * Add tupdesc to the cache and set the appropriate values of
+ * tupType/tupTypmod for proper cache usage in populate_composite().
+ */
+ cache->io.tupdesc = tupdesc;
+
+ tupType = tupdesc->tdtypeid;
+ tupTypmod = tupdesc->tdtypmod;
+ }
+
+ jsv.is_json = jtype == JSONOID;
+
+ if (jsv.is_json)
+ {
+ text *json = PG_GETARG_TEXT_PP(json_arg_num);
+
+ jsv.val.json.str = VARDATA_ANY(json);
+ jsv.val.json.len = VARSIZE_ANY_EXHDR(json);
+ jsv.val.json.type = JSON_TOKEN_INVALID; /* not used in populate_composite() */
+ }
+ else
+ {
+ Jsonb *jb = PG_GETARG_JSONB(json_arg_num);
+
+ jsv.val.jsonb = &jbv;
+
+ /* fill binary jsonb value pointing to jb */
+ jbv.type = jbvBinary;
+ jbv.val.binary.data = &jb->root;
+ jbv.val.binary.len = VARSIZE(jb) - VARHDRSZ;
+ }
- ReleaseTupleDesc(tupdesc);
+ rettuple = populate_composite(&cache->io, tupType, tupTypmod,
+ NULL, fnmcxt, rec, &jsv);
- if (json_hash)
- hash_destroy(json_hash);
+ if (tupdesc)
+ {
+ cache->io.tupdesc = NULL;
+ ReleaseTupleDesc(tupdesc);
+ }
- PG_RETURN_DATUM(HeapTupleGetDatum(rettuple));
+ PG_RETURN_DATUM(rettuple);
}
/*
@@ -2374,12 +2648,12 @@ populate_record_worker(FunctionCallInfo fcinfo, const char *funcname,
* decompose a json object into a hash table.
*/
static HTAB *
-get_json_object_as_hash(text *json, const char *funcname)
+get_json_object_as_hash(char *json, int len, const char *funcname)
{
HASHCTL ctl;
HTAB *tab;
JHashState *state;
- JsonLexContext *lex = makeJsonLexContext(json, true);
+ JsonLexContext *lex = makeJsonLexContextCstringLen(json, len, true);
JsonSemAction *sem;
memset(&ctl, 0, sizeof(ctl));
@@ -2417,6 +2691,9 @@ hash_object_field_start(void *state, char *fname, bool isnull)
if (_state->lex->lex_level > 1)
return;
+ /* remember token type */
+ _state->saved_token_type = _state->lex->token_type;
+
if (_state->lex->token_type == JSON_TOKEN_ARRAY_START ||
_state->lex->token_type == JSON_TOKEN_OBJECT_START)
{
@@ -2460,7 +2737,9 @@ hash_object_field_end(void *state, char *fname, bool isnull)
* that, a later field with the same name overrides the earlier field.
*/
- hashentry->isnull = isnull;
+ hashentry->type = _state->saved_token_type;
+ Assert(isnull == (hashentry->type == JSON_TOKEN_NULL));
+
if (_state->save_json_start != NULL)
{
int len = _state->lex->prev_token_terminator - _state->save_json_start;
@@ -2499,7 +2778,11 @@ hash_scalar(void *state, char *token, JsonTokenType tokentype)
errmsg("cannot call %s on a scalar", _state->function_name)));
if (_state->lex->lex_level == 1)
+ {
_state->saved_scalar = token;
+ /* saved_token_type must already be set in hash_object_field_start() */
+ Assert(_state->saved_token_type == tokentype);
+ }
}
@@ -2538,121 +2821,21 @@ json_to_recordset(PG_FUNCTION_ARGS)
}
static void
-make_row_from_rec_and_jsonb(Jsonb *element, PopulateRecordsetState *state)
+populate_recordset_record(PopulateRecordsetState *state, JsObject *obj)
{
- Datum *values;
- bool *nulls;
- int i;
- RecordIOData *my_extra = state->my_extra;
- int ncolumns = my_extra->ncolumns;
- TupleDesc tupdesc = state->ret_tdesc;
- HeapTupleHeader rec = state->rec;
- HeapTuple rettuple;
-
- values = (Datum *) palloc(ncolumns * sizeof(Datum));
- nulls = (bool *) palloc(ncolumns * sizeof(bool));
-
- if (state->rec)
- {
- HeapTupleData tuple;
-
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(state->rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = state->rec;
-
- /* Break down the tuple into fields */
- heap_deform_tuple(&tuple, tupdesc, values, nulls);
- }
- else
- {
- for (i = 0; i < ncolumns; ++i)
- {
- values[i] = (Datum) 0;
- nulls[i] = true;
- }
- }
-
- for (i = 0; i < ncolumns; ++i)
- {
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- JsonbValue *v = NULL;
- char *key;
-
- /* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
- {
- nulls[i] = true;
- continue;
- }
-
- key = NameStr(tupdesc->attrs[i]->attname);
-
- v = findJsonbValueFromContainerLen(&element->root, JB_FOBJECT,
- key, strlen(key));
-
- /*
- * We can't just skip here if the key wasn't found since we might have
- * a domain to deal with. If we were passed in a non-null record
- * datum, we assume that the existing values are valid (if they're
- * not, then it's not our fault), but if we were passed in a null,
- * then every field which we don't populate needs to be run through
- * the input function just in case it's a domain type.
- */
- if (v == NULL && rec)
- continue;
-
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
- {
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- state->fn_mcxt);
- column_info->column_type = column_type;
- }
- if (v == NULL || v->type == jbvNull)
- {
- /*
- * Need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
- */
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
- }
- else
- {
- char *s = NULL;
-
- if (v->type == jbvString)
- s = pnstrdup(v->val.string.val, v->val.string.len);
- else if (v->type == jbvBool)
- s = pnstrdup((v->val.boolean) ? "t" : "f", 1);
- else if (v->type == jbvNumeric)
- s = DatumGetCString(DirectFunctionCall1(numeric_out,
- PointerGetDatum(v->val.numeric)));
- else if (v->type == jbvBinary)
- s = JsonbToCString(NULL, (JsonbContainer *) v->val.binary.data, v->val.binary.len);
- else
- elog(ERROR, "unrecognized jsonb type: %d", (int) v->type);
-
- values[i] = InputFunctionCall(&column_info->proc, s,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
- }
- }
-
- rettuple = heap_form_tuple(tupdesc, values, nulls);
-
- tuplestore_puttuple(state->tuple_store, rettuple);
+ HeapTupleData tuple;
+ HeapTupleHeader tuphead = populate_record(state->ret_tdesc,
+ state->my_extra,
+ state->rec,
+ state->fn_mcxt,
+ obj);
+
+ tuple.t_len = HeapTupleHeaderGetDatumLength(tuphead);
+ ItemPointerSetInvalid(&(tuple.t_self));
+ tuple.t_tableOid = InvalidOid;
+ tuple.t_data = tuphead;
+
+ tuplestore_puttuple(state->tuple_store, &tuple);
}
/*
@@ -2666,12 +2849,8 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
Oid jtype = get_fn_expr_argtype(fcinfo->flinfo, json_arg_num);
ReturnSetInfo *rsi;
MemoryContext old_cxt;
- Oid tupType;
- int32 tupTypmod;
HeapTupleHeader rec;
TupleDesc tupdesc;
- RecordIOData *my_extra;
- int ncolumns;
PopulateRecordsetState *state;
if (have_record_arg)
@@ -2717,38 +2896,6 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
else
rec = PG_GETARG_HEAPTUPLEHEADER(0);
- tupType = tupdesc->tdtypeid;
- tupTypmod = tupdesc->tdtypmod;
- ncolumns = tupdesc->natts;
-
- /*
- * We arrange to look up the needed I/O info just once per series of
- * calls, assuming the record type doesn't change underneath us.
- */
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- if (my_extra == NULL ||
- my_extra->ncolumns != ncolumns)
- {
- fcinfo->flinfo->fn_extra =
- MemoryContextAlloc(fcinfo->flinfo->fn_mcxt,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra;
- my_extra->record_type = InvalidOid;
- my_extra->record_typmod = 0;
- }
-
- if (my_extra->record_type != tupType ||
- my_extra->record_typmod != tupTypmod)
- {
- MemSet(my_extra, 0,
- offsetof(RecordIOData, columns) +
- ncolumns * sizeof(ColumnIOData));
- my_extra->record_type = tupType;
- my_extra->record_typmod = tupTypmod;
- my_extra->ncolumns = ncolumns;
- }
-
state = palloc0(sizeof(PopulateRecordsetState));
/* make these in a sufficiently long-lived memory context */
@@ -2761,7 +2908,7 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
MemoryContextSwitchTo(old_cxt);
state->function_name = funcname;
- state->my_extra = my_extra;
+ state->my_extra = (RecordIOData **) &fcinfo->flinfo->fn_extra;
state->rec = rec;
state->fn_mcxt = fcinfo->flinfo->fn_mcxt;
@@ -2812,14 +2959,19 @@ populate_recordset_worker(FunctionCallInfo fcinfo, const char *funcname,
if (r == WJB_ELEM)
{
- Jsonb *element = JsonbValueToJsonb(&v);
+ JsObject obj;
- if (!JB_ROOT_IS_OBJECT(element))
+ if (v.type != jbvBinary ||
+ !JsonContainerIsObject(v.val.binary.data))
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("argument of %s must be an array of objects",
funcname)));
- make_row_from_rec_and_jsonb(element, state);
+
+ obj.is_json = false;
+ obj.val.jsonb_cont = v.val.binary.data;
+
+ populate_recordset_record(state, &obj);
}
}
}
@@ -2863,115 +3015,20 @@ static void
populate_recordset_object_end(void *state)
{
PopulateRecordsetState *_state = (PopulateRecordsetState *) state;
- HTAB *json_hash = _state->json_hash;
- Datum *values;
- bool *nulls;
- int i;
- RecordIOData *my_extra = _state->my_extra;
- int ncolumns = my_extra->ncolumns;
- TupleDesc tupdesc = _state->ret_tdesc;
- JsonHashEntry *hashentry;
- HeapTupleHeader rec = _state->rec;
- HeapTuple rettuple;
+ JsObject obj;
/* Nested objects require no special processing */
if (_state->lex->lex_level > 1)
return;
- /* Otherwise, construct and return a tuple based on this level-1 object */
- values = (Datum *) palloc(ncolumns * sizeof(Datum));
- nulls = (bool *) palloc(ncolumns * sizeof(bool));
-
- if (_state->rec)
- {
- HeapTupleData tuple;
-
- /* Build a temporary HeapTuple control structure */
- tuple.t_len = HeapTupleHeaderGetDatumLength(_state->rec);
- ItemPointerSetInvalid(&(tuple.t_self));
- tuple.t_tableOid = InvalidOid;
- tuple.t_data = _state->rec;
+ obj.is_json = true;
+ obj.val.json_hash = _state->json_hash;
- /* Break down the tuple into fields */
- heap_deform_tuple(&tuple, tupdesc, values, nulls);
- }
- else
- {
- for (i = 0; i < ncolumns; ++i)
- {
- values[i] = (Datum) 0;
- nulls[i] = true;
- }
- }
-
- for (i = 0; i < ncolumns; ++i)
- {
- ColumnIOData *column_info = &my_extra->columns[i];
- Oid column_type = tupdesc->attrs[i]->atttypid;
- char *value;
-
- /* Ignore dropped columns in datatype */
- if (tupdesc->attrs[i]->attisdropped)
- {
- nulls[i] = true;
- continue;
- }
-
- hashentry = hash_search(json_hash,
- NameStr(tupdesc->attrs[i]->attname),
- HASH_FIND, NULL);
-
- /*
- * we can't just skip here if the key wasn't found since we might have
- * a domain to deal with. If we were passed in a non-null record
- * datum, we assume that the existing values are valid (if they're
- * not, then it's not our fault), but if we were passed in a null,
- * then every field which we don't populate needs to be run through
- * the input function just in case it's a domain type.
- */
- if (hashentry == NULL && rec)
- continue;
-
- /*
- * Prepare to convert the column value from text
- */
- if (column_info->column_type != column_type)
- {
- getTypeInputInfo(column_type,
- &column_info->typiofunc,
- &column_info->typioparam);
- fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
- _state->fn_mcxt);
- column_info->column_type = column_type;
- }
- if (hashentry == NULL || hashentry->isnull)
- {
- /*
- * need InputFunctionCall to happen even for nulls, so that domain
- * checks are done
- */
- values[i] = InputFunctionCall(&column_info->proc, NULL,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = true;
- }
- else
- {
- value = hashentry->val;
-
- values[i] = InputFunctionCall(&column_info->proc, value,
- column_info->typioparam,
- tupdesc->attrs[i]->atttypmod);
- nulls[i] = false;
- }
- }
-
- rettuple = heap_form_tuple(tupdesc, values, nulls);
-
- tuplestore_puttuple(_state->tuple_store, rettuple);
+ /* Otherwise, construct and return a tuple based on this level-1 object */
+ populate_recordset_record(_state, &obj);
/* Done with hash for this object */
- hash_destroy(json_hash);
+ hash_destroy(_state->json_hash);
_state->json_hash = NULL;
}
@@ -3017,6 +3074,8 @@ populate_recordset_object_field_start(void *state, char *fname, bool isnull)
if (_state->lex->lex_level > 2)
return;
+ _state->saved_token_type = _state->lex->token_type;
+
if (_state->lex->token_type == JSON_TOKEN_ARRAY_START ||
_state->lex->token_type == JSON_TOKEN_OBJECT_START)
{
@@ -3058,7 +3117,9 @@ populate_recordset_object_field_end(void *state, char *fname, bool isnull)
* that, a later field with the same name overrides the earlier field.
*/
- hashentry->isnull = isnull;
+ hashentry->type = _state->saved_token_type;
+ Assert(isnull == (hashentry->type == JSON_TOKEN_NULL));
+
if (_state->save_json_start != NULL)
{
int len = _state->lex->prev_token_terminator - _state->save_json_start;
0002-recursive-json_populate_record-v05.patchtext/x-patch; name=0002-recursive-json_populate_record-v05.patchDownload
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index 9408a25..edcc96d 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -11525,12 +11525,12 @@ table2-mapping
whose columns match the record type defined by <replaceable>base</>
(see note below).
</entry>
- <entry><literal>select * from json_populate_record(null::myrowtype, '{"a":1,"b":2}')</literal></entry>
+ <entry><literal>select * from json_populate_record(null::myrowtype, '{"a": 1, "b": ["2", "a b"], "c": {"d": 4, "e": "a b c"}}')</literal></entry>
<entry>
<programlisting>
- a | b
----+---
- 1 | 2
+ a | b | c
+---+-----------+-------------
+ 1 | {2,"a b"} | (4,"a b c")
</programlisting>
</entry>
</row>
@@ -11619,12 +11619,12 @@ table2-mapping
explicitly define the structure of the record with an <literal>AS</>
clause.
</entry>
- <entry><literal>select * from json_to_record('{"a":1,"b":[1,2,3],"c":"bar"}') as x(a int, b text, d text) </literal></entry>
+ <entry><literal>select * from json_to_record('{"a":1,"b":[1,2,3],"c":[1,2,3],"e":"bar","r": {"a": 123, "b": "a b c"}}') as x(a int, b text, c int[], d text, r myrowtype) </literal></entry>
<entry>
<programlisting>
- a | b | d
----+---------+---
- 1 | [1,2,3] |
+ a | b | c | d | r
+---+---------+---------+---+---------------
+ 1 | [1,2,3] | {1,2,3} | | (123,"a b c")
</programlisting>
</entry>
</row>
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index da29dab..507b47f 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -31,6 +31,7 @@
#include "utils/jsonb.h"
#include "utils/lsyscache.h"
#include "utils/memutils.h"
+#include "utils/syscache.h"
#include "utils/typcache.h"
/* Operations available for setPath */
@@ -130,6 +131,14 @@ typedef struct ScalarIOData
typedef struct ColumnIOData ColumnIOData;
typedef struct RecordIOData RecordIOData;
+/* structure to cache metadata needed for populate_array() */
+typedef struct ArrayIOData
+{
+ ColumnIOData *element_info; /* metadata cache */
+ Oid element_type; /* array element type id */
+ int32 element_typmod; /* array element type modifier */
+} ArrayIOData;
+
/* structure to cache metadata needed for populate_composite() */
typedef struct CompositeIOData
{
@@ -141,6 +150,24 @@ typedef struct CompositeIOData
TupleDesc tupdesc; /* cached tuple descriptor */
} CompositeIOData;
+/* structure to cache metadata needed for populate_domain() */
+typedef struct DomainIOData
+{
+ ColumnIOData *base_io; /* metadata cache */
+ Oid base_typid; /* base type id */
+ int32 base_typmod; /* base type modifier */
+ void *domain_info; /* opaque cache for domain checks */
+} DomainIOData;
+
+/* enumeration type categories */
+typedef enum TypeCat
+{
+ TYPECAT_SCALAR = 's',
+ TYPECAT_ARRAY = 'a',
+ TYPECAT_COMPOSITE = 'c',
+ TYPECAT_DOMAIN = 'd',
+} TypeCat;
+
/* these two are stolen from hstore / record_out, used in populate_record* */
/* structure to cache record metadata needed for populate_record_field() */
@@ -148,8 +175,15 @@ struct ColumnIOData
{
Oid typid; /* column type id */
int32 typmod; /* column type modifier */
+ TypeCat typcat; /* column type category */
ScalarIOData scalar_io; /* metadata cache for directi conversion
* through input function */
+ union
+ {
+ ArrayIOData array;
+ CompositeIOData composite;
+ DomainIOData domain;
+ } io; /* metadata cache for various column type categories */
};
/* structure to cache record metadata needed for populate_record() */
@@ -177,6 +211,29 @@ typedef struct PopulateRecordsetState
MemoryContext fn_mcxt; /* used to stash IO funcs */
} PopulateRecordsetState;
+/* common data for populate_array_json() and populate_array_dim_jsonb() */
+typedef struct PopulateArrayContext
+{
+ ArrayBuildState *astate; /* array build state */
+ ArrayIOData *aio; /* metadata cache */
+ MemoryContext acxt; /* array build memory context */
+ MemoryContext mcxt; /* cache memory context */
+ const char *colname; /* for diagnostics only */
+ int *dims; /* dimensions */
+ int *sizes; /* current dimension counters */
+ int ndims; /* number of dimensions */
+} PopulateArrayContext;
+
+/* state for populate_array_json() */
+typedef struct PopulateArrayState
+{
+ JsonLexContext *lex; /* json lexer */
+ PopulateArrayContext *ctx; /* context */
+ char *element_start; /* start of the current array element */
+ char *element_scalar; /* current array element token if it is a scalar */
+ JsonTokenType element_type; /* current array element type */
+} PopulateArrayState;
+
/* state for json_strip_nulls */
typedef struct StripnullState
{
@@ -2179,6 +2236,358 @@ json_to_record(PG_FUNCTION_ARGS)
return populate_record_worker(fcinfo, "json_to_record", false);
}
+/* helper function for diagnostics */
+static void
+populate_array_report_expected_array(PopulateArrayContext *ctx, int ndim)
+{
+ if (ndim <= 0)
+ {
+ if (ctx->colname)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("expected json array"),
+ errhint("see the value of key \"%s\"", ctx->colname)));
+ else
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("expected json array")));
+ }
+ else
+ {
+ StringInfoData indices;
+ int i;
+
+ initStringInfo(&indices);
+
+ Assert(ctx->ndims > 0 && ndim < ctx->ndims);
+
+ for (i = 0; i < ndim; i++)
+ appendStringInfo(&indices, "[%d]", ctx->sizes[i]);
+
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("expected json array"),
+ errhint(ctx->colname ? "see the array element %s of key \"%s\""
+ : "see the array element %s",
+ indices.data, ctx->colname)));
+ }
+}
+
+/* assign number of dimensions of the populated array when it became known */
+static void
+populate_array_assign_ndims(PopulateArrayContext *ctx, int ndims)
+{
+ int i;
+
+ Assert(ctx->ndims <= 0);
+
+ if (ndims <= 0)
+ populate_array_report_expected_array(ctx, ndims);
+
+ ctx->ndims = ndims;
+ ctx->dims = palloc(sizeof(int) * ndims);
+ ctx->sizes = palloc0(sizeof(int) * ndims);
+
+ for (i = 0; i < ndims; i++)
+ ctx->dims[i] = -1; /* dimensions are unknown yet */
+}
+
+/* check populated subarray dimension */
+static void
+populate_array_check_dimension(PopulateArrayContext *ctx, int ndim)
+{
+ int dim = ctx->sizes[ndim]; /* current dimension counter */
+
+ if (ctx->dims[ndim] == -1)
+ ctx->dims[ndim] = dim; /* assign dimension if unknown yet */
+ else if (ctx->dims[ndim] != dim)
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
+ errmsg("malformed json array"),
+ errdetail("Multidimensional arrays must have "
+ "sub-arrays with matching dimensions.")));
+
+ /* reset current array dimension size counter */
+ ctx->sizes[ndim] = 0;
+
+ /* increment parent dimension counter if it is a nested sub-array */
+ if (ndim > 0)
+ ctx->sizes[ndim - 1]++;
+}
+
+static void
+populate_array_element(PopulateArrayContext *ctx, int ndim, JsValue *jsv)
+{
+ Datum element;
+ bool element_isnull;
+
+ /* populate array element */
+ element = populate_record_field(ctx->aio->element_info,
+ ctx->aio->element_type,
+ ctx->aio->element_typmod,
+ NULL, ctx->mcxt, PointerGetDatum(NULL),
+ jsv, &element_isnull);
+
+ accumArrayResult(ctx->astate, element, element_isnull,
+ ctx->aio->element_type, ctx->acxt);
+
+ Assert(ndim > 0);
+ ctx->sizes[ndim - 1]++; /* increment current dimension counter */
+}
+
+/* json object start handler for populate_array_json() */
+static void
+populate_array_object_start(void *_state)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ if (state->ctx->ndims <= 0)
+ populate_array_assign_ndims(state->ctx, ndim);
+ else if (ndim < state->ctx->ndims)
+ populate_array_report_expected_array(state->ctx, ndim);
+}
+
+/* json array end handler for populate_array_json() */
+static void
+populate_array_array_end(void *_state)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ PopulateArrayContext *ctx = state->ctx;
+ int ndim = state->lex->lex_level;
+
+ if (ctx->ndims <= 0)
+ populate_array_assign_ndims(ctx, ndim + 1);
+
+ if (ndim < ctx->ndims)
+ populate_array_check_dimension(ctx, ndim);
+}
+
+/* json array element start handler for populate_array_json() */
+static void
+populate_array_element_start(void *_state, bool isnull)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ int ndim = state->lex->lex_level;
+
+ if (state->ctx->ndims <= 0 || ndim == state->ctx->ndims)
+ {
+ /* remember current array element start */
+ state->element_start = state->lex->token_start;
+ state->element_type = state->lex->token_type;
+ state->element_scalar = NULL;
+ }
+}
+
+/* json array element end handler for populate_array_json() */
+static void
+populate_array_element_end(void *_state, bool isnull)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ PopulateArrayContext *ctx = state->ctx;
+ int ndim = state->lex->lex_level;
+
+ Assert(ctx->ndims > 0);
+
+ if (ndim == ctx->ndims)
+ {
+ JsValue jsv;
+
+ jsv.is_json = true;
+ jsv.val.json.type = state->element_type;
+
+ if (isnull)
+ {
+ Assert(jsv.val.json.type == JSON_TOKEN_NULL);
+ jsv.val.json.str = NULL;
+ jsv.val.json.len = 0;
+ }
+ else if (state->element_scalar)
+ {
+ jsv.val.json.str = state->element_scalar;
+ jsv.val.json.len = -1; /* null-terminated */
+ }
+ else
+ {
+ jsv.val.json.str = state->element_start;
+ jsv.val.json.len = (state->lex->prev_token_terminator -
+ state->element_start) * sizeof(char);
+ }
+
+ populate_array_element(ctx, ndim, &jsv);
+ }
+}
+
+/* json scalar handler for populate_array_json() */
+static void
+populate_array_scalar(void *_state, char *token, JsonTokenType tokentype)
+{
+ PopulateArrayState *state = (PopulateArrayState *) _state;
+ PopulateArrayContext *ctx = state->ctx;
+ int ndim = state->lex->lex_level;
+
+ if (ctx->ndims <= 0)
+ populate_array_assign_ndims(ctx, ndim);
+ else if (ndim < ctx->ndims)
+ populate_array_report_expected_array(ctx, ndim);
+
+ if (ndim == ctx->ndims)
+ {
+ /* remeber scalar element token */
+ state->element_scalar = token;
+ /* element_type must already be set in populate_array_element_start() */
+ Assert(state->element_type == tokentype);
+ }
+}
+
+/* parse json array and populate array */
+static void
+populate_array_json(PopulateArrayContext *ctx, char *json, int len)
+{
+ PopulateArrayState state;
+ JsonSemAction sem;
+
+ state.lex = makeJsonLexContextCstringLen(json, len, true);
+ state.ctx = ctx;
+
+ memset(&sem, 0, sizeof(sem));
+ sem.semstate = (void *) &state;
+ sem.object_start = populate_array_object_start;
+ sem.array_end = populate_array_array_end;
+ sem.array_element_start = populate_array_element_start;
+ sem.array_element_end = populate_array_element_end;
+ sem.scalar = populate_array_scalar;
+
+ pg_parse_json(state.lex, &sem);
+
+ /* number of dimensions should be already known */
+ Assert(ctx->ndims > 0 && ctx->dims);
+
+ pfree(state.lex);
+}
+
+/*
+ * populate_array_dim_jsonb() -- Iterate recursively through jsonb sub-array
+ * elements and accumulate result using given ArrayBuildState.
+ */
+static void
+populate_array_dim_jsonb(PopulateArrayContext *ctx, /* context */
+ JsonbValue *jbv, /* jsonb sub-array */
+ int ndim) /* current dimension */
+{
+ JsonbContainer *jbc = jbv->val.binary.data;
+ JsonbIterator *it;
+ JsonbIteratorToken tok;
+ JsonbValue val;
+ JsValue jsv;
+
+ check_stack_depth();
+
+ if (jbv->type != jbvBinary || !JsonContainerIsArray(jbc))
+ populate_array_report_expected_array(ctx, ndim - 1);
+
+ Assert(!JsonContainerIsScalar(jbc));
+
+ it = JsonbIteratorInit(jbc);
+
+ tok = JsonbIteratorNext(&it, &val, true);
+ Assert(tok == WJB_BEGIN_ARRAY);
+
+ tok = JsonbIteratorNext(&it, &val, true);
+
+ /*
+ * If the number of dimensions is yet unknown and
+ * we have found end of array or the first child element is not an array
+ * then assign number of dimensions now.
+ */
+ if (ctx->ndims <= 0 &&
+ (tok == WJB_END_ARRAY ||
+ (tok == WJB_ELEM &&
+ (val.type != jbvBinary ||
+ !JsonContainerIsArray(val.val.binary.data)))))
+ populate_array_assign_ndims(ctx, ndim);
+
+ jsv.is_json = false;
+ jsv.val.jsonb = &val;
+
+ /* process all array elements */
+ while (tok == WJB_ELEM)
+ {
+ /*
+ * Recurse only if the dimensions of dimensions is yet unknown or
+ * if it is not the innermost dimension.
+ */
+ if (ctx->ndims > 0 && ndim >= ctx->ndims)
+ populate_array_element(ctx, ndim, &jsv);
+ else
+ {
+ /* populate child sub-array */
+ populate_array_dim_jsonb(ctx, &val, ndim + 1);
+
+ /* number of dimensions should be already known */
+ Assert(ctx->ndims > 0 && ctx->dims);
+
+ populate_array_check_dimension(ctx, ndim);
+ }
+
+ tok = JsonbIteratorNext(&it, &val, true);
+ }
+
+ Assert(tok == WJB_END_ARRAY);
+
+ /* free iterator, iterating until WJB_DONE */
+ tok = JsonbIteratorNext(&it, &val, true);
+ Assert(tok == WJB_DONE && !it);
+}
+
+/* populate recursively json/jsonb array */
+static Datum
+populate_array(ArrayIOData *aio, /* metadata cache */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ JsValue *jsv) /* json/jsonb array */
+{
+ PopulateArrayContext ctx;
+ Datum result;
+ int *lbs;
+ int i;
+
+ ctx.aio = aio;
+ ctx.mcxt = mcxt;
+ ctx.acxt = CurrentMemoryContext;
+ ctx.astate = initArrayResult(aio->element_type, ctx.acxt, true);
+ ctx.colname = colname;
+ ctx.ndims = 0; /* unknown yet */
+ ctx.dims = NULL;
+ ctx.sizes = NULL;
+
+ if (jsv->is_json)
+ populate_array_json(&ctx, jsv->val.json.str,
+ jsv->val.json.len >= 0 ? jsv->val.json.len
+ : strlen(jsv->val.json.str));
+ else
+ {
+ populate_array_dim_jsonb(&ctx, jsv->val.jsonb, 1);
+ ctx.dims[0] = ctx.sizes[0];
+ }
+
+ Assert(ctx.ndims > 0);
+
+ lbs = palloc(sizeof(int) * ctx.ndims);
+
+ for (i = 0; i < ctx.ndims; i++)
+ lbs[i] = 1;
+
+ result = makeMdArrayResult(ctx.astate, ctx.ndims, ctx.dims, lbs,
+ ctx.acxt, true);
+
+ pfree(ctx.dims);
+ pfree(ctx.sizes);
+ pfree(lbs);
+
+ return result;
+}
+
static void
JsValueToJsObject(JsValue *jsv, JsObject *jso)
{
@@ -2258,26 +2667,35 @@ populate_scalar(ScalarIOData *io, /* metadata cache */
JsValue *jsv) /* json/jsonb value to convert */
{
Datum res;
- char *str;
+ char *str = NULL;
char *json = NULL;
- if (!jsv)
- {
- /*
- * Need InputFunctionCall to happen even for NULLs, so that domain
- * checks are done.
- */
- str = NULL;
- }
- else if (jsv->is_json)
+ if (jsv->is_json)
{
- /* already done the hard work in the json case */
int len = jsv->val.json.len;
json = jsv->val.json.str;
Assert(json);
- if (len >= 0)
+ /* already done the hard work in the json case */
+ if ((typid == JSONOID || typid == JSONBOID) &&
+ jsv->val.json.type == JSON_TOKEN_STRING)
+ {
+ /*
+ * Add quotes around string value (should be already escaped)
+ * if converting to json/jsonb.
+ */
+
+ if (len < 0)
+ len = strlen(json);
+
+ str = palloc(len + sizeof(char) * 3);
+ str[0] = '"';
+ memcpy(&str[1], json, len);
+ str[len + 1] = '"';
+ str[len + 2] = '\0';
+ }
+ else if (len >= 0)
{
/* Need to copy non-null-terminated string */
str = palloc(len + 1 * sizeof(char));
@@ -2291,10 +2709,25 @@ populate_scalar(ScalarIOData *io, /* metadata cache */
{
JsonbValue *jbv = jsv->val.jsonb;
- if (jbv->type == jbvString) /* quotes are stripped */
+ if (typid == JSONBOID)
+ {
+ Jsonb *jsonb = JsonbValueToJsonb(jbv); /* directly use jsonb */
+ return JsonbGetDatum(jsonb);
+ }
+ /* convert jsonb to string for typio call */
+ else if (typid == JSONOID && jbv->type != jbvBinary)
+ {
+ /*
+ * Convert scalar jsonb (non-scalars are passed here as jbvBinary)
+ * to json string, preserving quotes around top-level strings.
+ */
+ Jsonb *jsonb = JsonbValueToJsonb(jbv);
+ str = JsonbToCString(NULL, &jsonb->root, VARSIZE(jsonb));
+ }
+ else if (jbv->type == jbvString) /* quotes are stripped */
str = pnstrdup(jbv->val.string.val, jbv->val.string.len);
else if (jbv->type == jbvBool)
- str = pnstrdup(jbv->val.boolean ? "t" : "f", 1);
+ str = pstrdup(jbv->val.boolean ? "true" : "false");
else if (jbv->type == jbvNumeric)
str = DatumGetCString(DirectFunctionCall1(numeric_out,
PointerGetDatum(jbv->val.numeric)));
@@ -2302,10 +2735,7 @@ populate_scalar(ScalarIOData *io, /* metadata cache */
str = JsonbToCString(NULL, jbv->val.binary.data,
jbv->val.binary.len);
else
- {
elog(ERROR, "unrecognized jsonb type: %d", (int) jbv->type);
- str = NULL;
- }
}
res = InputFunctionCall(&io->typiofunc, str, io->typioparam, typmod);
@@ -2317,6 +2747,32 @@ populate_scalar(ScalarIOData *io, /* metadata cache */
return res;
}
+static Datum
+populate_domain(DomainIOData *io, /* metadata cache */
+ Oid typid, /* domain type id */
+ const char *colname, /* for diagnostics only */
+ MemoryContext mcxt, /* cache memory context */
+ JsValue *jsv, /* json/jsonb value to convert */
+ bool isnull) /* value is NULL */
+{
+ Datum res;
+
+ if (isnull)
+ res = (Datum) 0;
+ else
+ {
+ res = populate_record_field(io->base_io,
+ io->base_typid, io->base_typmod,
+ colname, mcxt, PointerGetDatum(NULL),
+ jsv, &isnull);
+ Assert(!isnull);
+ }
+
+ domain_check(res, isnull, typid, &io->domain_info, mcxt);
+
+ return res;
+}
+
/* prepare column metadata cache for the given type */
static void
prepare_column_cache(ColumnIOData *column, /* metadata cache */
@@ -2325,13 +2781,55 @@ prepare_column_cache(ColumnIOData *column, /* metadata cache */
MemoryContext mcxt, /* cache memory context */
bool json) /* json/jsonb */
{
- Oid typioproc;
+ HeapTuple tup;
+ Form_pg_type type;
column->typid = typid;
column->typmod = typmod;
- getTypeInputInfo(typid, &typioproc, &column->scalar_io.typioparam);
- fmgr_info_cxt(typioproc, &column->scalar_io.typiofunc, mcxt);
+ tup = SearchSysCache1(TYPEOID, ObjectIdGetDatum(typid));
+ if (!HeapTupleIsValid(tup))
+ elog(ERROR, "cache lookup failed for type %u", typid);
+
+ type = (Form_pg_type) GETSTRUCT(tup);
+
+ if (type->typtype == TYPTYPE_DOMAIN)
+ {
+ column->typcat = TYPECAT_DOMAIN;
+ column->io.domain.base_typid = type->typbasetype;
+ column->io.domain.base_typmod = type->typtypmod;
+ column->io.domain.base_io = MemoryContextAllocZero(mcxt,
+ sizeof(ColumnIOData));
+ column->io.domain.domain_info = NULL;
+ }
+ else if (type->typtype == TYPTYPE_COMPOSITE || typid == RECORDOID)
+ {
+ column->typcat = TYPECAT_COMPOSITE;
+ column->io.composite.record_io = NULL;
+ column->io.composite.tupdesc = NULL;
+ }
+ else if (type->typlen == -1 && OidIsValid(type->typelem))
+ {
+ column->typcat = TYPECAT_ARRAY;
+ column->io.array.element_info = MemoryContextAllocZero(mcxt,
+ sizeof(ColumnIOData));
+ column->io.array.element_type = type->typelem;
+ /* array element typemod stored in attribute's typmod */
+ column->io.array.element_typmod = typmod;
+ }
+ else
+ column->typcat = TYPECAT_SCALAR;
+
+ /* don't need input function when converting from jsonb to jsonb */
+ if (json || typid != JSONBOID)
+ {
+ Oid typioproc;
+
+ getTypeInputInfo(typid, &typioproc, &column->scalar_io.typioparam);
+ fmgr_info_cxt(typioproc, &column->scalar_io.typiofunc, mcxt);
+ }
+
+ ReleaseSysCache(tup);
}
/* populate recursively a record field or an array element from json/jsonb value */
@@ -2345,6 +2843,8 @@ populate_record_field(ColumnIOData *col, /* metadata cache */
JsValue *jsv, /* json/jsonb value to convert */
bool *isnull) /* value is NULL */
{
+ TypeCat typcat;
+
check_stack_depth();
/* prepare column metadata cache for the given type */
@@ -2353,8 +2853,41 @@ populate_record_field(ColumnIOData *col, /* metadata cache */
*isnull = JsValueIsNull(jsv);
- return populate_scalar(&col->scalar_io, typid, typmod,
- *isnull ? NULL : jsv);
+ typcat = col->typcat;
+
+ /* try to convert json string to a non-scalar type through input function */
+ if (JsValueIsString(jsv) &&
+ (typcat == TYPECAT_ARRAY || typcat == TYPECAT_COMPOSITE))
+ typcat = TYPECAT_SCALAR;
+
+ /* we must perform domain checks for NULLs */
+ if (*isnull && typcat != TYPECAT_DOMAIN)
+ return (Datum) 0;
+
+ switch (typcat)
+ {
+ case TYPECAT_SCALAR:
+ return populate_scalar(&col->scalar_io, typid, typmod, jsv);
+
+ case TYPECAT_ARRAY:
+ return populate_array(&col->io.array, colname, mcxt, jsv);
+
+ case TYPECAT_COMPOSITE:
+ return populate_composite(&col->io.composite, typid, typmod,
+ colname, mcxt,
+ DatumGetPointer(defaultval)
+ ? DatumGetHeapTupleHeader(defaultval)
+ : NULL,
+ jsv);
+
+ case TYPECAT_DOMAIN:
+ return populate_domain(&col->io.domain, typid, colname, mcxt,
+ jsv, *isnull);
+
+ default:
+ elog(ERROR, "unrecognized type category '%c'", typcat);
+ return (Datum) 0;
+ }
}
static RecordIOData *
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index 1bb8768..c7182c8 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -1313,6 +1313,31 @@ select * from json_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8
-- populate_record
create type jpop as (a text, b int, c timestamp);
+CREATE DOMAIN js_int_not_null AS int NOT NULL;
+CREATE DOMAIN js_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN js_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+CREATE TYPE jsrec AS (
+ i int,
+ ia _int4,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d js_int_array_1d,
+ ia2d js_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jpop,
+ reca jpop[]
+);
+CREATE TYPE jsrec_i_not_null AS (
+ i js_int_not_null
+);
select * from json_populate_record(null::jpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+---
@@ -1351,6 +1376,370 @@ select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"c":[100,200,false],"x":43.2}') q;
ERROR: invalid input syntax for type timestamp: "[100,200,false]"
+select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{}') q;
+ a | b | c
+---+---+--------------------------
+ x | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"x": 43.2}') q;
+ERROR: domain js_int_not_null does not allow null values
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": null}') q;
+ERROR: domain js_int_not_null does not allow null values
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": 12345}') q;
+ i
+-------
+ 12345
+(1 row)
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": null}') q;
+ ia
+----
+
+(1 row)
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ia"
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [1, "2", null, 4]}') q;
+ ia
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1, 2], [3, 4]]}') q;
+ ia
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1], 2]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ia"
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1], [2, 3]]}') q;
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": "{1,2,3}"}') q;
+ ia
+---------
+ {1,2,3}
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": null}') q;
+ ia1
+-----
+
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ia1"
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [1, "2", null, 4]}') q;
+ ia1
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [[1, 2, 3]]}') q;
+ ia1
+-----------
+ {{1,2,3}}
+(1 row)
+
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": null}') q;
+ ia1d
+------
+
+(1 row)
+
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ia1d"
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null, 4]}') q;
+ERROR: value for domain js_int_array_1d violates check constraint "js_int_array_1d_check"
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null]}') q;
+ ia1d
+------------
+ {1,2,NULL}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [1, "2", null, 4]}') q;
+ ia2
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+ ia2
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[], []]}') q;
+ ia2
+-----
+ {}
+(1 row)
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [3]]}') q;
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], 3, 4]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ia2"
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+ERROR: value for domain js_int_array_2d violates check constraint "js_int_array_2d_check"
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+ ia2d
+----------------------
+ {{1,2,3},{NULL,5,6}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [1, "2", null, 4]}') q;
+ ia3
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+ ia3
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+ ia3
+-----
+ {}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+ ia3
+-------------------
+ {{{1,2}},{{3,4}}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+ ia3
+-------------------------------
+ {{{1,2},{3,4}},{{5,6},{7,8}}}
+(1 row)
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": null}') q;
+ ta
+----
+
+(1 row)
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ta"
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [1, "2", null, 4]}') q;
+ ta
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ta"
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": null}') q;
+ c
+---
+
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaa"}') q;
+ c
+------------
+ aaa
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaa"}') q;
+ c
+------------
+ aaaaaaaaaa
+(1 row)
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaaaaa"}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": null}') q;
+ ca
+----
+
+(1 row)
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ca"
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [1, "2", null, 4]}') q;
+ ca
+-----------------------------------------------
+ {"1 ","2 ",NULL,"4 "}
+(1 row)
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ca"
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": null}') q;
+ js
+----
+
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": true}') q;
+ js
+------
+ true
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": 123.45}') q;
+ js
+--------
+ 123.45
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "123.45"}') q;
+ js
+----------
+ "123.45"
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "abc"}') q;
+ js
+-------
+ "abc"
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+ js
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ js
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": null}') q;
+ jsb
+-----
+
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": true}') q;
+ jsb
+------
+ true
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": 123.45}') q;
+ jsb
+--------
+ 123.45
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "123.45"}') q;
+ jsb
+----------
+ "123.45"
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "abc"}') q;
+ jsb
+-------
+ "abc"
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+ jsb
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ jsb
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": null}') q;
+ jsa
+-----
+
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "jsa"
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": [1, "2", null, 4]}') q;
+ jsa
+--------------------
+ {1,"\"2\"",NULL,4}
+(1 row)
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+ jsa
+----------------------------------------------------------
+ {"\"aaa\"",NULL,"[1, 2, \"3\", {}]","{ \"k\" : \"v\" }"}
+(1 row)
+
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": 123}') q;
+ERROR: cannot call populate_composite on a scalar
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": [1, 2]}') q;
+ERROR: cannot call populate_composite on an array
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+ rec
+-----------------------------------
+ (abc,,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": "(abc,42,01.02.2003)"}') q;
+ rec
+-------------------------------------
+ (abc,42,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "reca"
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [1, 2]}') q;
+ERROR: cannot call populate_composite on a scalar
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+ reca
+--------------------------------------------------------
+ {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": ["(abc,42,01.02.2003)"]}') q;
+ reca
+-------------------------------------------
+ {"(abc,42,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": "{\"(abc,42,01.02.2003)\"}"}') q;
+ reca
+-------------------------------------------
+ {"(abc,42,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT rec FROM json_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jpop,NULL)::jsrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+ rec
+------------------------------------
+ (abc,3,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
-- populate_recordset
select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
@@ -1417,6 +1806,28 @@ select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":[100,200,3
{"z":true} | 3 | Fri Jan 20 10:42:53 2012
(2 rows)
+-- test type info caching in json_populate_record()
+CREATE TEMP TABLE jspoptest (js json);
+INSERT INTO jspoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::json
+FROM generate_series(1, 3);
+SELECT (json_populate_record(NULL::jsrec, js)).* FROM jspoptest;
+ i | ia | ia1 | ia2 | ia3 | ia1d | ia2d | t | ta | c | ca | ts | js | jsb | jsa | rec | reca
+---+----+-----+-----+-----+------+------+---+----+---+----+----+----+-----+--------------------+-----------------------------------+--------------------------------------------------------
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(3 rows)
+
+DROP TYPE jsrec;
+DROP TYPE jsrec_i_not_null;
+DROP DOMAIN js_int_not_null;
+DROP DOMAIN js_int_array_1d;
+DROP DOMAIN js_int_array_2d;
--json_typeof() function
select value, json_typeof(value)
from (values (json '123.4'),
@@ -1609,11 +2020,11 @@ select * from json_to_recordset('[{"a":1,"b":{"d":"foo"},"c":true},{"a":2,"c":fa
(2 rows)
select *, c is null as c_is_null
-from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::json)
- as t(a int, b json, c text, x int);
- a | b | c | x | c_is_null
----+-----------------+---+---+-----------
- 1 | {"c":16, "d":2} | | 8 | t
+from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::json)
+ as t(a int, b json, c text, x int, ca char(5)[], ia int[][], r jpop);
+ a | b | c | x | ca | ia | r | c_is_null
+---+-----------------+---+---+-------------------+---------------+------------+-----------
+ 1 | {"c":16, "d":2} | | 8 | {"1 2 ","3 "} | {{1,2},{3,4}} | (aaa,123,) | t
(1 row)
select *, c is null as c_is_null
@@ -1624,6 +2035,51 @@ from json_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::json)
1 | {"c":16, "d":2} | | 8 | t
(1 row)
+select * from json_to_record('{"ia": null}') as x(ia _int4);
+ ia
+----
+
+(1 row)
+
+select * from json_to_record('{"ia": 123}') as x(ia _int4);
+ERROR: expected json array
+HINT: see the value of key "ia"
+select * from json_to_record('{"ia": [1, "2", null, 4]}') as x(ia _int4);
+ ia
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+select * from json_to_record('{"ia": [[1, 2], [3, 4]]}') as x(ia _int4);
+ ia
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+select * from json_to_record('{"ia": [[1], 2]}') as x(ia _int4);
+ERROR: expected json array
+HINT: see the array element [1] of key "ia"
+select * from json_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+select * from json_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
+ ia2
+---------
+ {1,2,3}
+(1 row)
+
+select * from json_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
+ ia2
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+select * from json_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
+ ia2
+-----------------
+ {{{1},{2},{3}}}
+(1 row)
+
-- json_strip_nulls
select json_strip_nulls(null);
json_strip_nulls
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
index 8ec4150..82a1d1c 100644
--- a/src/test/regress/expected/jsonb.out
+++ b/src/test/regress/expected/jsonb.out
@@ -1897,6 +1897,31 @@ SELECT * FROM jsonb_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,
-- populate_record
CREATE TYPE jbpop AS (a text, b int, c timestamp);
+CREATE DOMAIN jsb_int_not_null AS int NOT NULL;
+CREATE DOMAIN jsb_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN jsb_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+CREATE TYPE jsbrec AS (
+ i int,
+ ia _int4,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d jsb_int_array_1d,
+ ia2d jsb_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jbpop,
+ reca jbpop[]
+);
+CREATE TYPE jsbrec_i_not_null AS (
+ i jsb_int_not_null
+);
SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+---
@@ -1935,6 +1960,382 @@ SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"c":[100,200,false],"x":43.2}') q;
ERROR: invalid input syntax for type timestamp: "[100, 200, false]"
+SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop, '{}') q;
+ a | b | c
+---+---+--------------------------
+ x | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"x": 43.2}') q;
+ERROR: domain jsb_int_not_null does not allow null values
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": null}') q;
+ERROR: domain jsb_int_not_null does not allow null values
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": 12345}') q;
+ i
+-------
+ 12345
+(1 row)
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": null}') q;
+ ia
+----
+
+(1 row)
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ia"
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [1, "2", null, 4]}') q;
+ ia
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1, 2], [3, 4]]}') q;
+ ia
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1], 2]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ia"
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1], [2, 3]]}') q;
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": "{1,2,3}"}') q;
+ ia
+---------
+ {1,2,3}
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": null}') q;
+ ia1
+-----
+
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ia1"
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [1, "2", null, 4]}') q;
+ ia1
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [[1, 2, 3]]}') q;
+ ia1
+-----------
+ {{1,2,3}}
+(1 row)
+
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": null}') q;
+ ia1d
+------
+
+(1 row)
+
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ia1d"
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null, 4]}') q;
+ERROR: value for domain jsb_int_array_1d violates check constraint "jsb_int_array_1d_check"
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null]}') q;
+ ia1d
+------------
+ {1,2,NULL}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [1, "2", null, 4]}') q;
+ ia2
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+ ia2
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[], []]}') q;
+ ia2
+-----
+ {}
+(1 row)
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [3]]}') q;
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], 3, 4]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ia2"
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+ERROR: value for domain jsb_int_array_2d violates check constraint "jsb_int_array_2d_check"
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+ ia2d
+----------------------
+ {{1,2,3},{NULL,5,6}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [1, "2", null, 4]}') q;
+ ia3
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+ ia3
+------------------
+ {{1,2},{NULL,4}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+ ia3
+-----
+ {}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+ ia3
+-------------------
+ {{{1,2}},{{3,4}}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+ ia3
+-------------------------------
+ {{{1,2},{3,4}},{{5,6},{7,8}}}
+(1 row)
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": null}') q;
+ ta
+----
+
+(1 row)
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ta"
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [1, "2", null, 4]}') q;
+ ta
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ta"
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": null}') q;
+ c
+---
+
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaa"}') q;
+ c
+------------
+ aaa
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaa"}') q;
+ c
+------------
+ aaaaaaaaaa
+(1 row)
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaaaaa"}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": null}') q;
+ ca
+----
+
+(1 row)
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "ca"
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [1, "2", null, 4]}') q;
+ ca
+-----------------------------------------------
+ {"1 ","2 ",NULL,"4 "}
+(1 row)
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+ERROR: value too long for type character(10)
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+ERROR: expected json array
+HINT: see the array element [1] of key "ca"
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": null}') q;
+ js
+----
+
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": true}') q;
+ js
+------
+ true
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": 123.45}') q;
+ js
+--------
+ 123.45
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "123.45"}') q;
+ js
+----------
+ "123.45"
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "abc"}') q;
+ js
+-------
+ "abc"
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+ js
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ js
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": null}') q;
+ jsb
+-----
+
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": true}') q;
+ jsb
+------
+ true
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": 123.45}') q;
+ jsb
+--------
+ 123.45
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "123.45"}') q;
+ jsb
+----------
+ "123.45"
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "abc"}') q;
+ jsb
+-------
+ "abc"
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+ jsb
+--------------------------------------
+ [123, "123", null, {"key": "value"}]
+(1 row)
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+ jsb
+--------------------------------------
+ {"a": "bbb", "b": null, "c": 123.45}
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": null}') q;
+ jsa
+-----
+
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "jsa"
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": [1, "2", null, 4]}') q;
+ jsa
+--------------------
+ {1,"\"2\"",NULL,4}
+(1 row)
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+ jsa
+-------------------------------------------------------
+ {"\"aaa\"",NULL,"[1, 2, \"3\", {}]","{\"k\": \"v\"}"}
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": 123}') q;
+ rec
+------
+ (,,)
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": [1, 2]}') q;
+ rec
+------
+ (,,)
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+ rec
+-----------------------------------
+ (abc,,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": "(abc,42,01.02.2003)"}') q;
+ rec
+-------------------------------------
+ (abc,42,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": 123}') q;
+ERROR: expected json array
+HINT: see the value of key "reca"
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [1, 2]}') q;
+ reca
+-----------------
+ {"(,,)","(,,)"}
+(1 row)
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+ reca
+--------------------------------------------------------
+ {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": ["(abc,42,01.02.2003)"]}') q;
+ reca
+-------------------------------------------
+ {"(abc,42,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": "{\"(abc,42,01.02.2003)\"}"}') q;
+ reca
+-------------------------------------------
+ {"(abc,42,\"Thu Jan 02 00:00:00 2003\")"}
+(1 row)
+
+SELECT rec FROM jsonb_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jbpop,NULL)::jsbrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+ rec
+------------------------------------
+ (abc,3,"Thu Jan 02 00:00:00 2003")
+(1 row)
+
-- populate_recordset
SELECT * FROM jsonb_populate_recordset(NULL::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
@@ -2011,11 +2412,11 @@ select * from jsonb_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar",
(2 rows)
select *, c is null as c_is_null
-from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::jsonb)
- as t(a int, b jsonb, c text, x int);
- a | b | c | x | c_is_null
----+-------------------+---+---+-----------
- 1 | {"c": 16, "d": 2} | | 8 | t
+from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::jsonb)
+ as t(a int, b jsonb, c text, x int, ca char(5)[], ia int[][], r jbpop);
+ a | b | c | x | ca | ia | r | c_is_null
+---+-------------------+---+---+-------------------+---------------+------------+-----------
+ 1 | {"c": 16, "d": 2} | | 8 | {"1 2 ","3 "} | {{1,2},{3,4}} | (aaa,123,) | t
(1 row)
select *, c is null as c_is_null
@@ -2026,6 +2427,73 @@ from jsonb_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::jsonb)
1 | {"c": 16, "d": 2} | | 8 | t
(1 row)
+select * from jsonb_to_record('{"ia": null}') as x(ia _int4);
+ ia
+----
+
+(1 row)
+
+select * from jsonb_to_record('{"ia": 123}') as x(ia _int4);
+ERROR: expected json array
+HINT: see the value of key "ia"
+select * from jsonb_to_record('{"ia": [1, "2", null, 4]}') as x(ia _int4);
+ ia
+--------------
+ {1,2,NULL,4}
+(1 row)
+
+select * from jsonb_to_record('{"ia": [[1, 2], [3, 4]]}') as x(ia _int4);
+ ia
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+select * from jsonb_to_record('{"ia": [[1], 2]}') as x(ia _int4);
+ERROR: expected json array
+HINT: see the array element [1] of key "ia"
+select * from jsonb_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
+ERROR: malformed json array
+DETAIL: Multidimensional arrays must have sub-arrays with matching dimensions.
+select * from jsonb_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
+ ia2
+---------
+ {1,2,3}
+(1 row)
+
+select * from jsonb_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
+ ia2
+---------------
+ {{1,2},{3,4}}
+(1 row)
+
+select * from jsonb_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
+ ia2
+-----------------
+ {{{1},{2},{3}}}
+(1 row)
+
+-- test type info caching in jsonb_populate_record()
+CREATE TEMP TABLE jsbpoptest (js jsonb);
+INSERT INTO jsbpoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::jsonb
+FROM generate_series(1, 3);
+SELECT (jsonb_populate_record(NULL::jsbrec, js)).* FROM jsbpoptest;
+ i | ia | ia1 | ia2 | ia3 | ia1d | ia2d | t | ta | c | ca | ts | js | jsb | jsa | rec | reca
+---+----+-----+-----+-----+------+------+---+----+---+----+----+----+-----+--------------------+-----------------------------------+--------------------------------------------------------
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+ | | | | | | | | | | | | | | {1,"\"2\"",NULL,4} | (abc,,"Thu Jan 02 00:00:00 2003") | {"(abc,456,)",NULL,"(,,\"Thu Jan 02 00:00:00 2003\")"}
+(3 rows)
+
+DROP TYPE jsbrec;
+DROP TYPE jsbrec_i_not_null;
+DROP DOMAIN jsb_int_not_null;
+DROP DOMAIN jsb_int_array_1d;
+DROP DOMAIN jsb_int_array_2d;
-- indexing
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":null}';
count
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index 5e61922..ce5fb77 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -384,6 +384,34 @@ select * from json_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8
-- populate_record
create type jpop as (a text, b int, c timestamp);
+CREATE DOMAIN js_int_not_null AS int NOT NULL;
+CREATE DOMAIN js_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN js_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+
+CREATE TYPE jsrec AS (
+ i int,
+ ia _int4,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d js_int_array_1d,
+ ia2d js_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jpop,
+ reca jpop[]
+);
+
+CREATE TYPE jsrec_i_not_null AS (
+ i js_int_not_null
+);
+
select * from json_populate_record(null::jpop,'{"a":"blurfl","x":43.2}') q;
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":"blurfl","x":43.2}') q;
@@ -394,6 +422,100 @@ select * from json_populate_record(null::jpop,'{"a":[100,200,false],"x":43.2}')
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":[100,200,false],"x":43.2}') q;
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"c":[100,200,false],"x":43.2}') q;
+select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{}') q;
+
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"x": 43.2}') q;
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": null}') q;
+SELECT i FROM json_populate_record(NULL::jsrec_i_not_null, '{"i": 12345}') q;
+
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": null}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": 123}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [1, "2", null, 4]}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1, 2], [3, 4]]}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1], 2]}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": [[1], [2, 3]]}') q;
+SELECT ia FROM json_populate_record(NULL::jsrec, '{"ia": "{1,2,3}"}') q;
+
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": null}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": 123}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [1, "2", null, 4]}') q;
+SELECT ia1 FROM json_populate_record(NULL::jsrec, '{"ia1": [[1, 2, 3]]}') q;
+
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": null}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": 123}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null, 4]}') q;
+SELECT ia1d FROM json_populate_record(NULL::jsrec, '{"ia1d": [1, "2", null]}') q;
+
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [1, "2", null, 4]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[], []]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], [3]]}') q;
+SELECT ia2 FROM json_populate_record(NULL::jsrec, '{"ia2": [[1, 2], 3, 4]}') q;
+
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+SELECT ia2d FROM json_populate_record(NULL::jsrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [1, "2", null, 4]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+SELECT ia3 FROM json_populate_record(NULL::jsrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": null}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": 123}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [1, "2", null, 4]}') q;
+SELECT ta FROM json_populate_record(NULL::jsrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": null}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaa"}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaa"}') q;
+SELECT c FROM json_populate_record(NULL::jsrec, '{"c": "aaaaaaaaaaaaa"}') q;
+
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": null}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": 123}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [1, "2", null, 4]}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+SELECT ca FROM json_populate_record(NULL::jsrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": null}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": true}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": 123.45}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "123.45"}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": "abc"}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+SELECT js FROM json_populate_record(NULL::jsrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": null}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": true}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": 123.45}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "123.45"}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": "abc"}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+SELECT jsb FROM json_populate_record(NULL::jsrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": null}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": 123}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": [1, "2", null, 4]}') q;
+SELECT jsa FROM json_populate_record(NULL::jsrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": 123}') q;
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": [1, 2]}') q;
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+SELECT rec FROM json_populate_record(NULL::jsrec, '{"rec": "(abc,42,01.02.2003)"}') q;
+
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": 123}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [1, 2]}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": ["(abc,42,01.02.2003)"]}') q;
+SELECT reca FROM json_populate_record(NULL::jsrec, '{"reca": "{\"(abc,42,01.02.2003)\"}"}') q;
+
+SELECT rec FROM json_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jpop,NULL)::jsrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+
-- populate_recordset
select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
@@ -410,6 +532,25 @@ select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
+-- test type info caching in json_populate_record()
+CREATE TEMP TABLE jspoptest (js json);
+
+INSERT INTO jspoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::json
+FROM generate_series(1, 3);
+
+SELECT (json_populate_record(NULL::jsrec, js)).* FROM jspoptest;
+
+DROP TYPE jsrec;
+DROP TYPE jsrec_i_not_null;
+DROP DOMAIN js_int_not_null;
+DROP DOMAIN js_int_array_1d;
+DROP DOMAIN js_int_array_2d;
+
--json_typeof() function
select value, json_typeof(value)
from (values (json '123.4'),
@@ -526,13 +667,24 @@ select * from json_to_recordset('[{"a":1,"b":{"d":"foo"},"c":true},{"a":2,"c":fa
as x(a int, b json, c boolean);
select *, c is null as c_is_null
-from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::json)
- as t(a int, b json, c text, x int);
+from json_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::json)
+ as t(a int, b json, c text, x int, ca char(5)[], ia int[][], r jpop);
select *, c is null as c_is_null
from json_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::json)
as t(a int, b json, c text, x int);
+select * from json_to_record('{"ia": null}') as x(ia _int4);
+select * from json_to_record('{"ia": 123}') as x(ia _int4);
+select * from json_to_record('{"ia": [1, "2", null, 4]}') as x(ia _int4);
+select * from json_to_record('{"ia": [[1, 2], [3, 4]]}') as x(ia _int4);
+select * from json_to_record('{"ia": [[1], 2]}') as x(ia _int4);
+select * from json_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
+
+select * from json_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
+select * from json_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
+select * from json_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
+
-- json_strip_nulls
select json_strip_nulls(null);
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
index e2eaca0..1f4575d 100644
--- a/src/test/regress/sql/jsonb.sql
+++ b/src/test/regress/sql/jsonb.sql
@@ -484,6 +484,34 @@ SELECT * FROM jsonb_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,
-- populate_record
CREATE TYPE jbpop AS (a text, b int, c timestamp);
+CREATE DOMAIN jsb_int_not_null AS int NOT NULL;
+CREATE DOMAIN jsb_int_array_1d AS int[] CHECK(array_length(VALUE, 1) = 3);
+CREATE DOMAIN jsb_int_array_2d AS int[][] CHECK(array_length(VALUE, 2) = 3);
+
+CREATE TYPE jsbrec AS (
+ i int,
+ ia _int4,
+ ia1 int[],
+ ia2 int[][],
+ ia3 int[][][],
+ ia1d jsb_int_array_1d,
+ ia2d jsb_int_array_2d,
+ t text,
+ ta text[],
+ c char(10),
+ ca char(10)[],
+ ts timestamp,
+ js json,
+ jsb jsonb,
+ jsa json[],
+ rec jbpop,
+ reca jbpop[]
+);
+
+CREATE TYPE jsbrec_i_not_null AS (
+ i jsb_int_not_null
+);
+
SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":"blurfl","x":43.2}') q;
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":"blurfl","x":43.2}') q;
@@ -494,6 +522,100 @@ SELECT * FROM jsonb_populate_record(NULL::jbpop,'{"a":[100,200,false],"x":43.2}'
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":[100,200,false],"x":43.2}') q;
SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"c":[100,200,false],"x":43.2}') q;
+SELECT * FROM jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop, '{}') q;
+
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"x": 43.2}') q;
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": null}') q;
+SELECT i FROM jsonb_populate_record(NULL::jsbrec_i_not_null, '{"i": 12345}') q;
+
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": null}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": 123}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [1, "2", null, 4]}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1, 2], [3, 4]]}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1], 2]}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": [[1], [2, 3]]}') q;
+SELECT ia FROM jsonb_populate_record(NULL::jsbrec, '{"ia": "{1,2,3}"}') q;
+
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": null}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": 123}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [1, "2", null, 4]}') q;
+SELECT ia1 FROM jsonb_populate_record(NULL::jsbrec, '{"ia1": [[1, 2, 3]]}') q;
+
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": null}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": 123}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null, 4]}') q;
+SELECT ia1d FROM jsonb_populate_record(NULL::jsbrec, '{"ia1d": [1, "2", null]}') q;
+
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [1, "2", null, 4]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [null, 4]]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[], []]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], [3]]}') q;
+SELECT ia2 FROM jsonb_populate_record(NULL::jsbrec, '{"ia2": [[1, 2], 3, 4]}') q;
+
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2"], [null, 4]]}') q;
+SELECT ia2d FROM jsonb_populate_record(NULL::jsbrec, '{"ia2d": [[1, "2", 3], [null, 5, 6]]}') q;
+
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [1, "2", null, 4]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [[1, 2], [null, 4]]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[], []], [[], []], [[], []] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2]], [[3, 4]] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8]] ]}') q;
+SELECT ia3 FROM jsonb_populate_record(NULL::jsbrec, '{"ia3": [ [[1, 2], [3, 4]], [[5, 6], [7, 8], [9, 10]] ]}') q;
+
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": null}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": 123}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [1, "2", null, 4]}') q;
+SELECT ta FROM jsonb_populate_record(NULL::jsbrec, '{"ta": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": null}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaa"}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaa"}') q;
+SELECT c FROM jsonb_populate_record(NULL::jsbrec, '{"c": "aaaaaaaaaaaaa"}') q;
+
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": null}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": 123}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [1, "2", null, 4]}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": ["aaaaaaaaaaaaaaaa"]}') q;
+SELECT ca FROM jsonb_populate_record(NULL::jsbrec, '{"ca": [[1, 2, 3], {"k": "v"}]}') q;
+
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": null}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": true}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": 123.45}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "123.45"}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": "abc"}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": [123, "123", null, {"key": "value"}]}') q;
+SELECT js FROM jsonb_populate_record(NULL::jsbrec, '{"js": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": null}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": true}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": 123.45}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "123.45"}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": "abc"}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": [123, "123", null, {"key": "value"}]}') q;
+SELECT jsb FROM jsonb_populate_record(NULL::jsbrec, '{"jsb": {"a": "bbb", "b": null, "c": 123.45}}') q;
+
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": null}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": 123}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": [1, "2", null, 4]}') q;
+SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": ["aaa", null, [1, 2, "3", {}], { "k" : "v" }]}') q;
+
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": 123}') q;
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": [1, 2]}') q;
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
+SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": "(abc,42,01.02.2003)"}') q;
+
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": 123}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [1, 2]}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": ["(abc,42,01.02.2003)"]}') q;
+SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": "{\"(abc,42,01.02.2003)\"}"}') q;
+
+SELECT rec FROM jsonb_populate_record(
+ row(NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,
+ row('x',3,'2012-12-31 15:30:56')::jbpop,NULL)::jsbrec,
+ '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}'
+) q;
+
-- populate_recordset
SELECT * FROM jsonb_populate_recordset(NULL::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
SELECT * FROM jsonb_populate_recordset(row('def',99,NULL)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
@@ -515,13 +637,43 @@ select * from jsonb_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar",
as x(a int, b text, c boolean);
select *, c is null as c_is_null
-from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8}'::jsonb)
- as t(a int, b jsonb, c text, x int);
+from jsonb_to_record('{"a":1, "b":{"c":16, "d":2}, "x":8, "ca": ["1 2", 3], "ia": [[1,2],[3,4]], "r": {"a": "aaa", "b": 123}}'::jsonb)
+ as t(a int, b jsonb, c text, x int, ca char(5)[], ia int[][], r jbpop);
select *, c is null as c_is_null
from jsonb_to_recordset('[{"a":1, "b":{"c":16, "d":2}, "x":8}]'::jsonb)
as t(a int, b jsonb, c text, x int);
+select * from jsonb_to_record('{"ia": null}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": 123}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": [1, "2", null, 4]}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": [[1, 2], [3, 4]]}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": [[1], 2]}') as x(ia _int4);
+select * from jsonb_to_record('{"ia": [[1], [2, 3]]}') as x(ia _int4);
+
+select * from jsonb_to_record('{"ia2": [1, 2, 3]}') as x(ia2 int[][]);
+select * from jsonb_to_record('{"ia2": [[1, 2], [3, 4]]}') as x(ia2 int4[][]);
+select * from jsonb_to_record('{"ia2": [[[1], [2], [3]]]}') as x(ia2 int4[][]);
+
+-- test type info caching in jsonb_populate_record()
+CREATE TEMP TABLE jsbpoptest (js jsonb);
+
+INSERT INTO jsbpoptest
+SELECT '{
+ "jsa": [1, "2", null, 4],
+ "rec": {"a": "abc", "c": "01.02.2003", "x": 43.2},
+ "reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]
+}'::jsonb
+FROM generate_series(1, 3);
+
+SELECT (jsonb_populate_record(NULL::jsbrec, js)).* FROM jsbpoptest;
+
+DROP TYPE jsbrec;
+DROP TYPE jsbrec_i_not_null;
+DROP DOMAIN jsb_int_not_null;
+DROP DOMAIN jsb_int_array_1d;
+DROP DOMAIN jsb_int_array_2d;
+
-- indexing
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":null}';
SELECT count(*) FROM testjsonb WHERE j @> '{"wait":"CC"}';
On 2017-03-21 14:31:08 -0400, Andrew Dunstan wrote:
On 03/21/2017 01:37 PM, David Steele wrote:
On 3/16/17 11:54 AM, David Steele wrote:
On 2/1/17 12:53 AM, Michael Paquier wrote:
On Thu, Jan 26, 2017 at 6:49 AM, Tom Lane <tgl@sss.pgh.pa.us> wrote:
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
On 25.01.2017 23:58, Tom Lane wrote:
I think you need to take a second look at the code you're producing
and realize that it's not so clean either. This extract from
populate_record_field, for example, is pretty horrid:But what if we introduce some helper macros like this:
#define JsValueIsNull(jsv) \
((jsv)->is_json ? !(jsv)->val.json.str \
: !(jsv)->val.jsonb || (jsv)->val.jsonb->type == jbvNull)#define JsValueIsString(jsv) \
((jsv)->is_json ? (jsv)->val.json.type == JSON_TOKEN_STRING \
: (jsv)->val.jsonb && (jsv)->val.jsonb->type == jbvString)Yeah, I was wondering about that too. I'm not sure that you can make
a reasonable set of helper macros that will fix this, but if you want
to try, go for it.BTW, just as a stylistic thing, I find "a?b:c||d" unreadable: I have
to go back to the manual every darn time to convince myself whether
that means (a?b:c)||d or a?b:(c||d). It's better not to rely on
the reader (... or the author) having memorized C's precedence rules
in quite that much detail. Extra parens help.Moved to CF 2017-03 as discussion is going on and more review is
needed on the last set of patches.The current patches do not apply cleanly at cccbdde:
$ git apply ../other/0001-introduce-JsonContainerXxx-macros-v04.patch
error: patch failed: src/backend/utils/adt/jsonb_util.c:328
error: src/backend/utils/adt/jsonb_util.c: patch does not apply
error: patch failed: src/backend/utils/adt/jsonfuncs.c:1266
error: src/backend/utils/adt/jsonfuncs.c: patch does not apply
error: patch failed: src/include/utils/jsonb.h:218
error: src/include/utils/jsonb.h: patch does not applyIn addition, it appears a number of suggestions have been made by Tom
that warrant new patches. Marked "Waiting on Author".This thread has been idle for months since Tom's review.
The submission has been marked "Returned with Feedback". Please feel
free to resubmit to a future commitfest.Please revive. I am planning to look at this later this week.
Since that was 10 days ago - you're still planning to get this in before
the freeze?
- Andres
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On 04/03/2017 05:17 PM, Andres Freund wrote:
On 2017-03-21 14:31:08 -0400, Andrew Dunstan wrote:
On 03/21/2017 01:37 PM, David Steele wrote:
On 3/16/17 11:54 AM, David Steele wrote:
On 2/1/17 12:53 AM, Michael Paquier wrote:
On Thu, Jan 26, 2017 at 6:49 AM, Tom Lane <tgl@sss.pgh.pa.us> wrote:
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
On 25.01.2017 23:58, Tom Lane wrote:
I think you need to take a second look at the code you're producing
and realize that it's not so clean either. This extract from
populate_record_field, for example, is pretty horrid:But what if we introduce some helper macros like this:
#define JsValueIsNull(jsv) \
((jsv)->is_json ? !(jsv)->val.json.str \
: !(jsv)->val.jsonb || (jsv)->val.jsonb->type == jbvNull)
#define JsValueIsString(jsv) \
((jsv)->is_json ? (jsv)->val.json.type == JSON_TOKEN_STRING \
: (jsv)->val.jsonb && (jsv)->val.jsonb->type == jbvString)Yeah, I was wondering about that too. I'm not sure that you can make
a reasonable set of helper macros that will fix this, but if you want
to try, go for it.BTW, just as a stylistic thing, I find "a?b:c||d" unreadable: I have
to go back to the manual every darn time to convince myself whether
that means (a?b:c)||d or a?b:(c||d). It's better not to rely on
the reader (... or the author) having memorized C's precedence rules
in quite that much detail. Extra parens help.Moved to CF 2017-03 as discussion is going on and more review is
needed on the last set of patches.The current patches do not apply cleanly at cccbdde:
$ git apply ../other/0001-introduce-JsonContainerXxx-macros-v04.patch
error: patch failed: src/backend/utils/adt/jsonb_util.c:328
error: src/backend/utils/adt/jsonb_util.c: patch does not apply
error: patch failed: src/backend/utils/adt/jsonfuncs.c:1266
error: src/backend/utils/adt/jsonfuncs.c: patch does not apply
error: patch failed: src/include/utils/jsonb.h:218
error: src/include/utils/jsonb.h: patch does not applyIn addition, it appears a number of suggestions have been made by Tom
that warrant new patches. Marked "Waiting on Author".This thread has been idle for months since Tom's review.
The submission has been marked "Returned with Feedback". Please feel
free to resubmit to a future commitfest.Please revive. I am planning to look at this later this week.
Since that was 10 days ago - you're still planning to get this in before
the freeze?
Hoping to, other demands have intervened a bit, but I might be able to.
cheers
andrew
--
Andrew Dunstan https://www.2ndQuadrant.com
PostgreSQL Development, 24x7 Support, Remote DBA, Training & Services
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On 04/04/2017 05:18 PM, Andrew Dunstan wrote:
On 04/03/2017 05:17 PM, Andres Freund wrote:
On 2017-03-21 14:31:08 -0400, Andrew Dunstan wrote:
On 03/21/2017 01:37 PM, David Steele wrote:
On 3/16/17 11:54 AM, David Steele wrote:
On 2/1/17 12:53 AM, Michael Paquier wrote:
On Thu, Jan 26, 2017 at 6:49 AM, Tom Lane <tgl@sss.pgh.pa.us> wrote:
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
On 25.01.2017 23:58, Tom Lane wrote:
I think you need to take a second look at the code you're producing
and realize that it's not so clean either. This extract from
populate_record_field, for example, is pretty horrid:But what if we introduce some helper macros like this:
#define JsValueIsNull(jsv) \
((jsv)->is_json ? !(jsv)->val.json.str \
: !(jsv)->val.jsonb || (jsv)->val.jsonb->type == jbvNull)
#define JsValueIsString(jsv) \
((jsv)->is_json ? (jsv)->val.json.type == JSON_TOKEN_STRING \
: (jsv)->val.jsonb && (jsv)->val.jsonb->type == jbvString)Yeah, I was wondering about that too. I'm not sure that you can make
a reasonable set of helper macros that will fix this, but if you want
to try, go for it.BTW, just as a stylistic thing, I find "a?b:c||d" unreadable: I have
to go back to the manual every darn time to convince myself whether
that means (a?b:c)||d or a?b:(c||d). It's better not to rely on
the reader (... or the author) having memorized C's precedence rules
in quite that much detail. Extra parens help.Moved to CF 2017-03 as discussion is going on and more review is
needed on the last set of patches.The current patches do not apply cleanly at cccbdde:
$ git apply ../other/0001-introduce-JsonContainerXxx-macros-v04.patch
error: patch failed: src/backend/utils/adt/jsonb_util.c:328
error: src/backend/utils/adt/jsonb_util.c: patch does not apply
error: patch failed: src/backend/utils/adt/jsonfuncs.c:1266
error: src/backend/utils/adt/jsonfuncs.c: patch does not apply
error: patch failed: src/include/utils/jsonb.h:218
error: src/include/utils/jsonb.h: patch does not applyIn addition, it appears a number of suggestions have been made by Tom
that warrant new patches. Marked "Waiting on Author".This thread has been idle for months since Tom's review.
The submission has been marked "Returned with Feedback". Please feel
free to resubmit to a future commitfest.Please revive. I am planning to look at this later this week.
Since that was 10 days ago - you're still planning to get this in before
the freeze?Hoping to, other demands have intervened a bit, but I might be able to.
OK, I have been through this and I think it's basically committable. I
am currently doing some cleanup, such as putting all the typedefs in one
place, fixing redundant comments and adding more comments, declaring all
the static functions, and minor code changes, but nothing major. I
should be ready to commit tomorrow some time.
cheers
andrew
--
Andrew Dunstan https://www.2ndQuadrant.com
PostgreSQL Development, 24x7 Support, Remote DBA, Training & Services
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On 04/05/2017 07:24 PM, Andrew Dunstan wrote:
OK, I have been through this and I think it's basically committable. I
am currently doing some cleanup, such as putting all the typedefs in one
place, fixing redundant comments and adding more comments, declaring all
the static functions, and minor code changes, but nothing major. I
should be ready to commit tomorrow some time.
I have committed this. I will continue to work on adding comments.
cheers
andrew
--
Andrew Dunstan https://www.2ndQuadrant.com
PostgreSQL Development, 24x7 Support, Remote DBA, Training & Services
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
Attached two small fixes for the previous committed patch:
1. I've noticed a difference in behavior between json_populate_record()
and jsonb_populate_record() if we are trying to populate record from a
non-JSON-object: json function throws an error but jsonb function returns
a record with NULL fields. So I think it would be better to throw an error
in jsonb case too, but I'm not sure.
2. Also I've found a some kind of thinko in JsGetObjectSize() macro, but it
seems that this obvious mistake can not lead to incorrect behavior.
--
Nikita Glukhov
Postgres Professional:http://www.postgrespro.com
The Russian Postgres Company
Attachments:
fix-thinko-in-JsObjectSize-macro.patchtext/x-patch; name=fix-thinko-in-JsObjectSize-macro.patchDownload
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index ab9a745..a98742f 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -308,12 +308,11 @@ typedef struct JsObject
((jsv)->is_json ? (jsv)->val.json.type == JSON_TOKEN_STRING \
: ((jsv)->val.jsonb && (jsv)->val.jsonb->type == jbvString))
-#define JsObjectSize(jso) \
+#define JsObjectIsEmpty(jso) \
((jso)->is_json \
- ? hash_get_num_entries((jso)->val.json_hash) \
- : !(jso)->val.jsonb_cont || JsonContainerSize((jso)->val.jsonb_cont))
-
-#define JsObjectIsEmpty(jso) (JsObjectSize(jso) == 0)
+ ? hash_get_num_entries((jso)->val.json_hash) == 0 \
+ : (!(jso)->val.jsonb_cont || \
+ JsonContainerSize((jso)->val.jsonb_cont) == 0))
#define JsObjectFree(jso) do { \
if ((jso)->is_json) \
fix-jsonb_populate_record-behavior-on-non-objects.patchtext/x-patch; name=fix-jsonb_populate_record-behavior-on-non-objects.patchDownload
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index a98742f..083982c 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -2683,7 +2683,20 @@ JsValueToJsObject(JsValue *jsv, JsObject *jso)
JsonContainerIsObject(jbv->val.binary.data))
jso->val.jsonb_cont = jbv->val.binary.data;
else
+ {
+ bool is_scalar = IsAJsonbScalar(jbv) ||
+ (jbv->type == jbvBinary &&
+ JsonContainerIsScalar(jbv->val.binary.data));
+
+ ereport(ERROR,
+ (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+ errmsg(is_scalar
+ ? "cannot call %s on a scalar"
+ : "cannot call %s on an array",
+ "populate_composite")));
+
jso->val.jsonb_cont = NULL;
+ }
}
}
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
index f199eb4..7954703 100644
--- a/src/test/regress/expected/jsonb.out
+++ b/src/test/regress/expected/jsonb.out
@@ -2276,17 +2276,9 @@ SELECT jsa FROM jsonb_populate_record(NULL::jsbrec, '{"jsa": ["aaa", null, [1, 2
(1 row)
SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": 123}') q;
- rec
-------
- (,,)
-(1 row)
-
+ERROR: cannot call populate_composite on a scalar
SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": [1, 2]}') q;
- rec
-------
- (,,)
-(1 row)
-
+ERROR: cannot call populate_composite on an array
SELECT rec FROM jsonb_populate_record(NULL::jsbrec, '{"rec": {"a": "abc", "c": "01.02.2003", "x": 43.2}}') q;
rec
-----------------------------------
@@ -2303,11 +2295,7 @@ SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": 123}') q;
ERROR: expected json array
HINT: see the value of key "reca"
SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [1, 2]}') q;
- reca
------------------
- {"(,,)","(,,)"}
-(1 row)
-
+ERROR: cannot call populate_composite on a scalar
SELECT reca FROM jsonb_populate_record(NULL::jsbrec, '{"reca": [{"a": "abc", "b": 456}, null, {"c": "01.02.2003", "x": 43.2}]}') q;
reca
--------------------------------------------------------
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
Attached two small fixes for the previous committed patch:
1. I've noticed a difference in behavior between json_populate_record()
and jsonb_populate_record() if we are trying to populate record from a
non-JSON-object: json function throws an error but jsonb function returns
a record with NULL fields. So I think it would be better to throw an error
in jsonb case too, but I'm not sure.
Agreed on the error. I reformatted the code a bit.
2. Also I've found a some kind of thinko in JsGetObjectSize() macro, but it
seems that this obvious mistake can not lead to incorrect behavior.
Hm, I think it actually was wrong for the case of jsonb_cont == NULL,
wasn't it? But maybe that case didn't arise for the sole call site.
Anyway, agreed. Pushed both patches.
regards, tom lane
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On Mon, May 22, 2017 at 10:19:37PM +0300, Nikita Glukhov wrote:
Attached two small fixes for the previous committed patch:
1. I've noticed a difference in behavior between json_populate_record()
and jsonb_populate_record() if we are trying to populate record from a
non-JSON-object: json function throws an error but jsonb function returns
a record with NULL fields. So I think it would be better to throw an error
in jsonb case too, but I'm not sure.2. Also I've found a some kind of thinko in JsGetObjectSize() macro, but it
seems that this obvious mistake can not lead to incorrect behavior.
[Action required within three days. This is a generic notification.]
The above-described topic is currently a PostgreSQL 10 open item. Andrew,
since you committed the patch believed to have created it, you own this open
item. If some other commit is more relevant or if this does not belong as a
v10 open item, please let us know. Otherwise, please observe the policy on
open item ownership[1]/messages/by-id/20170404140717.GA2675809@tornado.leadboat.com and send a status update within three calendar days of
this message. Include a date for your subsequent status update. Testers may
discover new open items at any time, and I want to plan to get them all fixed
well in advance of shipping v10. Consequently, I will appreciate your efforts
toward speedy resolution. Thanks.
[1]: /messages/by-id/20170404140717.GA2675809@tornado.leadboat.com
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
Noah Misch <noah@leadboat.com> writes:
On Mon, May 22, 2017 at 10:19:37PM +0300, Nikita Glukhov wrote:
Attached two small fixes for the previous committed patch:
[Action required within three days. This is a generic notification.]
The above-described topic is currently a PostgreSQL 10 open item. Andrew,
since you committed the patch believed to have created it, you own this open
item.
These fixes are committed in e45c5be99d08d7bb6708d7bb1dd0f5d99798c6aa and
68cff231e3a3d0ca9988cf1fde5a3be53235b3bb.
regards, tom lane
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
On 30.05.2017 02:31, Tom Lane wrote:
Nikita Glukhov <n.gluhov@postgrespro.ru> writes:
2. Also I've found a some kind of thinko in JsGetObjectSize() macro, but it
seems that this obvious mistake can not lead to incorrect behavior.Hm, I think it actually was wrong for the case of jsonb_cont == NULL,
wasn't it?
Yes, JsObjectIsEmpty() returned false negative answer for jsonb_cont == NULL,
but this could only lead to suboptimal behavior of populate_record() when the
default record value was given.
But maybe that case didn't arise for the sole call site.
It also seems to me that populate_record() can't be called now with
jsonb_cont == NULL from the existing call sites.
--
Nikita Glukhov
Postgres Professional:http://www.postgrespro.com
The Russian Postgres Company
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers