diff --git a/doc/src/sgml/datatype.sgml b/doc/src/sgml/datatype.sgml
index 6bf4cf6..12832cb 100644
--- a/doc/src/sgml/datatype.sgml
+++ b/doc/src/sgml/datatype.sgml
@@ -143,6 +143,12 @@
       </row>
 
       <row>
+       <entry><type>jsonb</type></entry>
+       <entry></entry>
+       <entry>JSON data, decomposed</entry>
+      </row>
+
+      <row>
        <entry><type>line</type></entry>
        <entry></entry>
        <entry>infinite line on a plane</entry>
@@ -4225,27 +4231,58 @@ SET xmloption TO { DOCUMENT | CONTENT };
   </sect1>
 
   <sect1 id="datatype-json">
-   <title><acronym>JSON</> Type</title>
+   <title><acronym>JSON</> Types</title>
 
    <indexterm zone="datatype-json">
     <primary>JSON</primary>
    </indexterm>
 
+   <indexterm zone="datatype-json">
+    <primary>JSONB</primary>
+   </indexterm>
+
    <para>
-    The <type>json</type> data type can be used to store JSON (JavaScript
-    Object Notation) data, as specified in <ulink
-    url="http://www.ietf.org/rfc/rfc4627.txt">RFC 4627</ulink>.  Such
-    data can also be stored as <type>text</type>, but the
-    <type>json</type> data type has the advantage of checking that each
-    stored value is a valid JSON value.  There are also related support
+    JSON data types are for storing JSON (JavaScript Object Notation)
+    data, as specified in <ulink url="http://www.ietf.org/rfc/rfc4627.txt"
+    >RFC 4627</ulink>. Such data can also be stored as <type>text</type>,
+    but the JSON data types have the advantage of checking that each
+    stored value is a valid JSON value. There are also related support
     functions available; see <xref linkend="functions-json">.
    </para>
 
    <para>
+    There are two JSON data types: <type>json</type> and <type>jsonb</type>.
+    Both accept identical sets of values as input. The difference is primarily
+    a matter of efficiency. The <type>json</type> data type stores an exact
+    copy of the the input text, and the processing functions have to reparse
+    it to precess it, while the <type>jsonb</type> is stored in a decomposed
+    form that makes it slightly less efficient to input but very much faster
+    to process, since it never needs reparsing.
+   </para>
+
+   <para>
+    The other difference between the types is that the <type>json</type> type
+    is guaranteed to contain an exact copy of the input, including
+    preservation of semantically insignificant white space, and the order of
+    keys within JSON objects. Also, because the exact text is kept, if a JSON
+    object within the value contains the same key more than once, all the
+    key/value pairs are kept. In that case, the processing functions consider
+    the last value as the operative one. By contrast, <type>jsonb</type>
+    does not preserve white space, does not preserve the order of object keys,
+    and does not keep duplicate object keys. Only the last value for a key
+    specified in the input is kept.
+   </para>
+
+   <para>
+    In general, most applications will find it advantageous to store JSON data
+    as <type>jsonb</type>, unless they have quite specialised needs.
+   </para>
+
+   <para>
     <productname>PostgreSQL</productname> allows only one server encoding
-    per database.  It is therefore not possible for JSON to conform rigidly
-    to the specification unless the server encoding is UTF-8.  Attempts to
-    directly include characters which cannot be represented in the server
+    per database.  It is therefore not possible for the JSON types to conform
+    rigidly to the specification unless the server encoding is UTF-8. Attempts
+    to directly include characters which cannot be represented in the server
     encoding will fail; conversely, characters which can be represented in
     the server encoding but not in UTF-8 will be allowed.
     <literal>\uXXXX</literal> escapes are allowed regardless of the server
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index c0a75de..1cc0ad1 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -10040,13 +10040,27 @@ table2-mapping
      </tgroup>
    </table>
 
-  <para>
-   <xref linkend="functions-json-table"> shows the functions that are available
-   for creating and manipulating JSON (see <xref linkend="datatype-json">) data.
+  <note>
+   <para>
+    The operators above can take either <type>json</type> or <type>jsonb</type>
+    values as their left hand operands. In general they work much faster with
+    <type>jsonb</type>.
+   </para>
+  </note>
+
+  <!-- 
+     The release notes contain a reference to "functions-json-table". Since
+     that table is now split in two, the id has been parked here so we don't
+     have to change the release notes.
+  -->
+  <para id="functions-json-table">
+   <xref linkend="functions-json-creation-table"> shows the functions that are
+   available for creating <type>json</type> values.
+   (see <xref linkend="datatype-json">)
   </para>
 
-  <table id="functions-json-table">
-    <title>JSON Support Functions</title>
+  <table id="functions-json-creation-table">
+    <title>JSON Creation Functions</title>
     <tgroup cols="5">
      <thead>
       <row>
@@ -10107,13 +10121,38 @@ table2-mapping
        <entry><literal>to_json('Fred said "Hi."'::text)</literal></entry>
        <entry><literal>"Fred said \"Hi.\""</literal></entry>
       </row>
+     </tbody>
+    </tgroup>
+   </table>
+
+
+  <para>
+   <xref linkend="functions-json-processing-table"> shows the functions that
+   are available for processing <type>json</type> and <type>jsonb</type> values.
+   (see <xref linkend="datatype-json">)
+  </para>
+
+  <table id="functions-json-processing-table">
+    <title>JSON Processing Functions</title>
+    <tgroup cols="5">
+     <thead>
       <row>
-       <entry>
-         <indexterm>
+       <entry>Function</entry>
+       <entry>Return Type</entry>
+       <entry>Description</entry>
+       <entry>Example</entry>
+       <entry>Example Result</entry>
+      </row>
+     </thead>
+     <tbody>
+      <row>
+       <entry><indexterm>
           <primary>json_array_length</primary>
-         </indexterm>
-         <literal>json_array_length(json)</literal>
-       </entry>
+         </indexterm><indexterm>
+          <primary>jsonb_array_length</primary>
+         </indexterm><para><literal>json_array_length(json)</literal>
+         </para><para><literal>jsonb_array_length(jsonb)</literal>
+       </para></entry>
        <entry><type>int</type></entry>
        <entry>
          Returns the number of elements in the outermost JSON array.
@@ -10122,13 +10161,16 @@ table2-mapping
        <entry><literal>5</literal></entry>
       </row>
       <row>
-       <entry>
-         <indexterm>
+       <entry><indexterm>
           <primary>json_each</primary>
-         </indexterm>
-         <literal>json_each(json)</literal>
-       </entry>
-       <entry><type>SETOF key text, value json</type></entry>
+         </indexterm><indexterm>
+          <primary>jsonb_each</primary>
+         </indexterm><para><literal>json_each(json)</literal>
+         </para><para><literal>jsonb_each(jsonb)</literal>
+       </para></entry>
+       <entry><para><literal>SETOF key text, value json</literal>
+         </para><para><literal>SETOF key text, value jsonb</literal>
+       </para></entry>
        <entry>
          Expands the outermost JSON object into a set of key/value pairs.
        </entry>
@@ -10143,12 +10185,13 @@ table2-mapping
        </entry>
       </row>
       <row>
-       <entry>
-         <indexterm>
+       <entry><indexterm>
           <primary>json_each_text</primary>
-         </indexterm>
-         <literal>json_each_text(from_json json)</literal>
-       </entry>
+         </indexterm><indexterm>
+          <primary>jsonb_each_text</primary>
+         </indexterm><para><literal>json_each_text(from_json json)</literal>
+         </para><para><literal>jsonb_each_text(from_json jsonb)</literal>
+       </para></entry>
        <entry><type>SETOF key text, value text</type></entry>
        <entry>
          Expands the outermost JSON object into a set of key/value pairs. The
@@ -10165,13 +10208,14 @@ table2-mapping
        </entry>
       </row>
       <row>
-       <entry>
-         <indexterm>
+       <entry><indexterm>
           <primary>json_extract_path</primary>
-         </indexterm>
-         <literal>json_extract_path(from_json json, VARIADIC path_elems text[])</literal>
-       </entry>
-       <entry><type>json</type></entry>
+         </indexterm><indexterm>
+          <primary>jsonb_extract_path</primary>
+         </indexterm><para><literal>json_extract_path(from_json json, VARIADIC path_elems text[])</literal></para><para><literal>jsonb_extract_path(from_jsonb jsonb, VARIADIC path_elems text[])</literal>
+       </para></entry>
+       <entry><para><type>json</type></para><para><type>jsonb</type>
+       </para></entry>
        <entry>
          Returns JSON value pointed to by <parameter>path_elems</parameter>.
        </entry>
@@ -10179,12 +10223,12 @@ table2-mapping
        <entry><literal>{"f5":99,"f6":"foo"}</literal></entry>
       </row>
       <row>
-       <entry>
-         <indexterm>
+       <entry><indexterm>
           <primary>json_extract_path_text</primary>
-         </indexterm>
-         <literal>json_extract_path_text(from_json json, VARIADIC path_elems text[])</literal>
-       </entry>
+         </indexterm><indexterm>
+          <primary>jsonb_extract_path_text</primary>
+         </indexterm><para><literal>json_extract_path_text(from_json json, VARIADIC path_elems text[])</literal></para><para><literal>json_extract_path_text(from_json json, VARIADIC path_elems text[])</literal>
+       </para></entry>
        <entry><type>text</type></entry>
        <entry>
          Returns JSON value pointed to by <parameter>path_elems</parameter>.
@@ -10193,12 +10237,13 @@ table2-mapping
        <entry><literal>foo</literal></entry>
       </row>
       <row>
-       <entry>
-         <indexterm>
+       <entry><indexterm>
           <primary>json_object_keys</primary>
-         </indexterm>
-         <literal>json_object_keys(json)</literal>
-       </entry>
+         </indexterm><indexterm>
+          <primary>jsonb_object_keys</primary>
+         </indexterm><para><literal>json_object_keys(json)</literal>
+         </para><para><literal>jsonb_object_keys(jsonb)</literal>
+       </para></entry>
        <entry><type>SETOF text</type></entry>
        <entry>
           Returns set of keys in the JSON object.  Only the <quote>outer</quote> object will be displayed.
@@ -10214,18 +10259,20 @@ table2-mapping
        </entry>
       </row>
       <row>
-       <entry>
-         <indexterm>
+       <entry><indexterm>
           <primary>json_populate_record</primary>
-         </indexterm>
-         <literal>json_populate_record(base anyelement, from_json json, [, use_json_as_text bool=false]</literal>
-       </entry>
+         </indexterm><indexterm>
+          <primary>jsonb_populate_record</primary>
+         </indexterm><para><literal>json_populate_record(base anyelement, from_json json, [, use_json_as_text bool=false])</literal>
+         </para><para><literal>jsonb_populate_record(base anyelement, from_json jsonb, [, use_json_as_text bool=false])</literal>
+       </para></entry>
        <entry><type>anyelement</type></entry>
        <entry>
          Expands the object in <replaceable>from_json</replaceable> to a row whose columns match
          the record type defined by base. Conversion will be best
          effort; columns in base with no corresponding key in <replaceable>from_json</replaceable>
-         will be left null. If a column is specified more than once, the last value is used.
+         will be left null. When processing <type>json</type>, if a column is 
+         specified more than once, the last value is used.
        </entry>
        <entry><literal>select * from json_populate_record(null::x, '{"a":1,"b":2}')</literal></entry>
        <entry>
@@ -10237,19 +10284,21 @@ table2-mapping
        </entry>
       </row>
       <row>
-       <entry>
-         <indexterm>
+       <entry><indexterm>
           <primary>json_populate_recordset</primary>
-         </indexterm>
-         <literal>json_populate_recordset(base anyelement, from_json json, [, use_json_as_text bool=false]</literal>
-       </entry>
+         </indexterm><indexterm>
+          <primary>jsonb_populate_recordset</primary>
+         </indexterm><para><literal>json_populate_recordset(base anyelement, from_json json, [, use_json_as_text bool=false])</literal>
+         </para><para><literal>jsonb_populate_recordset(base anyelement, from_json jsonb, [, use_json_as_text bool=false])</literal>
+       </para></entry>
        <entry><type>SETOF anyelement</type></entry>
        <entry>
          Expands the outermost set of objects in <replaceable>from_json</replaceable> to a set
          whose columns match the record type defined by base.
          Conversion will be best effort; columns in base with no
          corresponding key in <replaceable>from_json</replaceable> will be left null.
-         If a column is specified more than once, the last value is used.
+         When processing <type>json</type>, if a column is specified more 
+         than once, the last value is used.
        </entry>
        <entry><literal>select * from json_populate_recordset(null::x, '[{"a":1,"b":2},{"a":3,"b":4}]')</literal></entry>
        <entry>
@@ -10262,13 +10311,16 @@ table2-mapping
        </entry>
       </row>
       <row>
-       <entry>
-         <indexterm>
+       <entry><indexterm>
           <primary>json_array_elements</primary>
-         </indexterm>
-         <literal>json_array_elements(json)</literal>
-       </entry>
-       <entry><type>SETOF json</type></entry>
+         </indexterm><indexterm>
+          <primary>jsonb_array_elements</primary>
+         </indexterm><para><literal>json_array_elements(json)</literal>
+         </para><para><literal>jsonb_array_elements(jsonb)</literal>
+       </para></entry>
+       <entry><para><type>SETOF json</type>
+         </para><para><type>SETOF jsonb</type>
+       </para></entry>
        <entry>
          Expands a JSON array to a set of JSON values.
        </entry>
@@ -10284,12 +10336,13 @@ table2-mapping
        </entry>
       </row>
       <row>
-       <entry>
-         <indexterm>
+       <entry><indexterm>
           <primary>json_typeof</primary>
-         </indexterm>
-         <literal>json_typeof(json)</literal>
-       </entry>
+         </indexterm><indexterm>
+          <primary>jsonb_typeof</primary>
+         </indexterm><para><literal>json_typeof(json)</literal>
+         </para><para><literal>jsonb_typeof(jsonb)</literal>
+       </para></entry>
        <entry><type>text</type></entry>
        <entry>
          Returns the type of the outermost JSON value as a text string.  The types are
diff --git a/src/backend/catalog/system_views.sql b/src/backend/catalog/system_views.sql
index 043d118..ca6d14c 100644
--- a/src/backend/catalog/system_views.sql
+++ b/src/backend/catalog/system_views.sql
@@ -796,3 +796,11 @@ CREATE OR REPLACE FUNCTION
 CREATE OR REPLACE FUNCTION
   json_populate_recordset(base anyelement, from_json json, use_json_as_text boolean DEFAULT false)
   RETURNS SETOF anyelement LANGUAGE internal STABLE ROWS 100  AS 'json_populate_recordset';
+
+CREATE OR REPLACE FUNCTION
+  jsonb_populate_record(base anyelement, from_json jsonb, use_json_as_text boolean DEFAULT false)
+  RETURNS anyelement LANGUAGE internal STABLE AS 'jsonb_populate_record';
+
+CREATE OR REPLACE FUNCTION
+  jsonb_populate_recordset(base anyelement, from_json jsonb, use_json_as_text boolean DEFAULT false)
+  RETURNS SETOF anyelement LANGUAGE internal STABLE ROWS 100  AS 'jsonb_populate_recordset';
diff --git a/src/backend/utils/adt/Makefile b/src/backend/utils/adt/Makefile
index 1ae9fa0..fd93d9b 100644
--- a/src/backend/utils/adt/Makefile
+++ b/src/backend/utils/adt/Makefile
@@ -32,7 +32,8 @@ OBJS = acl.o arrayfuncs.o array_selfuncs.o array_typanalyze.o \
 	tsquery_op.o tsquery_rewrite.o tsquery_util.o tsrank.o \
 	tsvector.o tsvector_op.o tsvector_parser.o \
 	txid.o uuid.o windowfuncs.o xml.o rangetypes_spgist.o \
-	rangetypes_typanalyze.o rangetypes_selfuncs.o
+	rangetypes_typanalyze.o rangetypes_selfuncs.o \
+	jsonb.o jsonb_support.o
 
 like.o: like.c like_match.c
 
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index 481db16..ab51a5e 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -1262,7 +1262,7 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
 			pfree(outputstr);
 			break;
 		case TYPCATEGORY_JSON:
-			/* JSON will already be escaped */
+			/* JSON and JSONB will already be escaped */
 			outputstr = OidOutputFunctionCall(typoutputfunc, val);
 			appendStringInfoString(result, outputstr);
 			pfree(outputstr);
@@ -1390,7 +1390,7 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
 		tcategory = TYPCATEGORY_JSON_CAST;
 	else if (element_type == RECORDOID)
 		tcategory = TYPCATEGORY_COMPOSITE;
-	else if (element_type == JSONOID)
+	else if (element_type == JSONOID || element_type == JSONBOID)
 		tcategory = TYPCATEGORY_JSON;
 	else
 		tcategory = TypeCategory(element_type);
@@ -1485,7 +1485,8 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
 			tcategory = TYPCATEGORY_ARRAY;
 		else if (tupdesc->attrs[i]->atttypid == RECORDOID)
 			tcategory = TYPCATEGORY_COMPOSITE;
-		else if (tupdesc->attrs[i]->atttypid == JSONOID)
+		else if (tupdesc->attrs[i]->atttypid == JSONOID ||
+				 tupdesc->attrs[i]->atttypid == JSONBOID)
 			tcategory = TYPCATEGORY_JSON;
 		else
 			tcategory = TypeCategory(tupdesc->attrs[i]->atttypid);
@@ -1611,7 +1612,7 @@ to_json(PG_FUNCTION_ARGS)
 		tcategory = TYPCATEGORY_ARRAY;
 	else if (val_type == RECORDOID)
 		tcategory = TYPCATEGORY_COMPOSITE;
-	else if (val_type == JSONOID)
+	else if (val_type == JSONOID || val_type == JSONBOID)
 		tcategory = TYPCATEGORY_JSON;
 	else
 		tcategory = TypeCategory(val_type);
@@ -1705,7 +1706,7 @@ json_agg_transfn(PG_FUNCTION_ARGS)
 		tcategory = TYPCATEGORY_ARRAY;
 	else if (val_type == RECORDOID)
 		tcategory = TYPCATEGORY_COMPOSITE;
-	else if (val_type == JSONOID)
+	else if (val_type == JSONOID || val_type == JSONBOID)
 		tcategory = TYPCATEGORY_JSON;
 	else
 		tcategory = TypeCategory(val_type);
@@ -1807,12 +1808,15 @@ escape_json(StringInfo buf, const char *str)
 Datum
 json_typeof(PG_FUNCTION_ARGS)
 {
-	text	   *json = PG_GETARG_TEXT_P(0);
+	text	   *json;
 
-	JsonLexContext *lex = makeJsonLexContext(json, false);
+	JsonLexContext *lex;
 	JsonTokenType tok;
 	char	   *type;
 
+	json = PG_GETARG_TEXT_P(0);
+	lex = makeJsonLexContext(json, false);
+
 	/* Lex exactly one token from the input and check its type. */
 	json_lex(lex);
 	tok = lex_peek(lex);
diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c
new file mode 100644
index 0000000..107ebf0
--- /dev/null
+++ b/src/backend/utils/adt/jsonb.c
@@ -0,0 +1,544 @@
+/*-------------------------------------------------------------------------
+ *
+ * jsonb.c
+ *		I/O for jsonb type
+ *
+ * Portions Copyright (c) 1996-2013, PostgreSQL Global Development Group
+ *
+ * NOTE. JSONB type is designed to be binary compatible with hstore.
+ *
+ * src/backend/utils/adt/jsonb_support.c
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#include "postgres.h"
+#include "libpq/pqformat.h"
+#include "utils/builtins.h"
+#include "utils/json.h"
+#include "utils/jsonapi.h"
+#include "utils/jsonb.h"
+
+static size_t
+checkStringLen(size_t len)
+{
+	if (len > JSONB_MAX_STRING_LEN)
+		ereport(ERROR,
+				(errcode(ERRCODE_STRING_DATA_RIGHT_TRUNCATION),
+				 errmsg("string too long for jsonb string")));
+	return len;
+}
+
+typedef struct JsonbInState
+{
+	ToJsonbState *state;
+	JsonbValue *res;
+}	JsonbInState;
+
+
+/*
+ * for jsonb we always want the de-escaped value - that's what's in token
+ */
+
+static void
+jsonb_in_scalar(void *state, char *token, JsonTokenType tokentype)
+{
+	JsonbInState *_state = (JsonbInState *) state;
+	JsonbValue	v;
+
+	v.size = sizeof(JEntry);
+
+	switch (tokentype)
+	{
+
+		case JSON_TOKEN_STRING:
+			v.type = jbvString;
+			v.string.len = token ? checkStringLen(strlen(token)) : 0;
+			v.string.val = token ? pnstrdup(token, v.string.len) : NULL;
+			v.size += v.string.len;
+			break;
+		case JSON_TOKEN_NUMBER:
+			v.type = jbvNumeric;
+			v.numeric = DatumGetNumeric(DirectFunctionCall3(numeric_in, CStringGetDatum(token), 0, -1));
+
+			v.size += VARSIZE_ANY(v.numeric) +sizeof(JEntry) /* alignment */ ;
+			break;
+		case JSON_TOKEN_TRUE:
+			v.type = jbvBool;
+			v.boolean = true;
+			break;
+		case JSON_TOKEN_FALSE:
+			v.type = jbvBool;
+			v.boolean = false;
+			break;
+		case JSON_TOKEN_NULL:
+			v.type = jbvNull;
+			break;
+		default:				/* nothing else should be here in fact */
+			break;
+	}
+
+	if (_state->state == NULL)
+	{
+		/* single scalar */
+		JsonbValue	va;
+
+		va.type = jbvArray;
+		va.array.scalar = true;
+		va.array.nelems = 1;
+
+		_state->res = pushJsonbValue(&_state->state, WJB_BEGIN_ARRAY, &va);
+		_state->res = pushJsonbValue(&_state->state, WJB_ELEM, &v);
+		_state->res = pushJsonbValue(&_state->state, WJB_END_ARRAY, NULL);
+	}
+	else
+	{
+		JsonbValue *o = &_state->state->v;
+
+		switch (o->type)
+		{
+			case jbvArray:
+				_state->res = pushJsonbValue(&_state->state, WJB_ELEM, &v);
+				break;
+			case jbvHash:
+				_state->res = pushJsonbValue(&_state->state, WJB_VALUE, &v);
+				break;
+			default:
+				elog(ERROR, "Wrong state");
+		}
+	}
+}
+
+static void
+jsonb_in_object_start(void *state)
+{
+	JsonbInState *_state = (JsonbInState *) state;
+
+	_state->res = pushJsonbValue(&_state->state, WJB_BEGIN_OBJECT, NULL);
+}
+
+static void
+jsonb_in_object_end(void *state)
+{
+	JsonbInState *_state = (JsonbInState *) state;
+
+	_state->res = pushJsonbValue(&_state->state, WJB_END_OBJECT, NULL);
+}
+
+static void
+jsonb_in_array_start(void *state)
+{
+	JsonbInState *_state = (JsonbInState *) state;
+
+	_state->res = pushJsonbValue(&_state->state, WJB_BEGIN_ARRAY, NULL);
+}
+
+static void
+jsonb_in_array_end(void *state)
+{
+	JsonbInState *_state = (JsonbInState *) state;
+
+	_state->res = pushJsonbValue(&_state->state, WJB_END_ARRAY, NULL);
+}
+
+static void
+jsonb_in_object_field_start(void *state, char *fname, bool isnull)
+{
+	JsonbInState *_state = (JsonbInState *) state;
+	JsonbValue	v;
+
+	v.type = jbvString;
+	v.string.len = fname ? checkStringLen(strlen(fname)) : 0;
+	v.string.val = fname ? pnstrdup(fname, v.string.len) : NULL;
+	v.size = sizeof(JEntry) + v.string.len;
+
+	_state->res = pushJsonbValue(&_state->state, WJB_KEY, &v);
+}
+
+Datum
+jsonb_in(PG_FUNCTION_ARGS)
+{
+	char	   *json = PG_GETARG_CSTRING(0);
+	text	   *result = cstring_to_text(json);
+	JsonLexContext *lex;
+	JsonbInState state;
+	JsonSemAction sem;
+
+	memset(&state, 0, sizeof(state));
+	memset(&sem, 0, sizeof(sem));
+	lex = makeJsonLexContext(result, true);
+
+	sem.semstate = (void *) &state;
+
+	sem.object_start = jsonb_in_object_start;
+	sem.array_start = jsonb_in_array_start;
+	sem.object_end = jsonb_in_object_end;
+	sem.array_end = jsonb_in_array_end;
+	sem.scalar = jsonb_in_scalar;
+	sem.object_field_start = jsonb_in_object_field_start;
+
+	pg_parse_json(lex, &sem);
+
+	/* after parsing, the item membar has the composed jsonn structure */
+	PG_RETURN_POINTER(JsonbValueToJsonb(state.res));
+}
+
+static void recvJsonb(StringInfo buf, JsonbValue *v, uint32 level, uint32 header);
+
+static void
+recvJsonbValue(StringInfo buf, JsonbValue *v, uint32 level, int c)
+{
+	uint32		hentry = c & JENTRY_TYPEMASK;
+
+	if (hentry == JENTRY_ISNULL)
+	{
+		v->type = jbvNull;
+		v->size = sizeof(JEntry);
+	}
+	else if (hentry == JENTRY_ISOBJECT || hentry == JENTRY_ISARRAY || hentry == JENTRY_ISCALAR)
+	{
+		recvJsonb(buf, v, level + 1, (uint32) c);
+	}
+	else if (hentry == JENTRY_ISFALSE || hentry == JENTRY_ISTRUE)
+	{
+		v->type = jbvBool;
+		v->size = sizeof(JEntry);
+		v->boolean = (hentry == JENTRY_ISFALSE) ? false : true;
+	}
+	else if (hentry == JENTRY_ISNUMERIC)
+	{
+		v->type = jbvNumeric;
+		v->numeric = DatumGetNumeric(DirectFunctionCall3(numeric_recv, PointerGetDatum(buf),
+									   Int32GetDatum(0), Int32GetDatum(-1)));
+
+		v->size = sizeof(JEntry) * 2 + VARSIZE_ANY(v->numeric);
+	}
+	else if (hentry == JENTRY_ISSTRING)
+	{
+		v->type = jbvString;
+		v->string.val = pq_getmsgtext(buf, c, &c);
+		v->string.len = checkStringLen(c);
+		v->size = sizeof(JEntry) + v->string.len;
+	}
+	else
+	{
+		elog(ERROR, "bogus input");
+	}
+}
+
+static void
+recvJsonb(StringInfo buf, JsonbValue *v, uint32 level, uint32 header)
+{
+	uint32		hentry;
+	uint32		i;
+
+	hentry = header & JENTRY_TYPEMASK;
+
+	v->size = 3 * sizeof(JEntry);
+	if (hentry == JENTRY_ISOBJECT)
+	{
+		v->type = jbvHash;
+		v->hash.npairs = header & JB_COUNT_MASK;
+		if (v->hash.npairs > 0)
+		{
+			v->hash.pairs = palloc(sizeof(*v->hash.pairs) * v->hash.npairs);
+
+			for (i = 0; i < v->hash.npairs; i++)
+			{
+				recvJsonbValue(buf, &v->hash.pairs[i].key, level, pq_getmsgint(buf, 4));
+				if (v->hash.pairs[i].key.type != jbvString)
+					elog(ERROR, "jsonb's key could be only a string");
+
+				recvJsonbValue(buf, &v->hash.pairs[i].value, level, pq_getmsgint(buf, 4));
+
+				v->size += v->hash.pairs[i].key.size + v->hash.pairs[i].value.size;
+			}
+
+			uniqueJsonbValue(v);
+		}
+	}
+	else if (hentry == JENTRY_ISARRAY || hentry == JENTRY_ISCALAR)
+	{
+		v->type = jbvArray;
+		v->array.nelems = header & JB_COUNT_MASK;
+		v->array.scalar = (hentry == JENTRY_ISCALAR) ? true : false;
+
+		if (v->array.scalar && v->array.nelems != 1)
+			elog(ERROR, "bogus input");
+
+		if (v->array.nelems > 0)
+		{
+			v->array.elems = palloc(sizeof(*v->array.elems) * v->array.nelems);
+
+			for (i = 0; i < v->array.nelems; i++)
+			{
+				recvJsonbValue(buf, v->array.elems + i, level, pq_getmsgint(buf, 4));
+				v->size += v->array.elems[i].size;
+			}
+		}
+	}
+	else
+	{
+		elog(ERROR, "bogus input");
+	}
+}
+
+Datum
+jsonb_recv(PG_FUNCTION_ARGS)
+{
+	StringInfo	buf = (StringInfo) PG_GETARG_POINTER(0);
+	JsonbValue	v;
+
+	recvJsonb(buf, &v, 0, pq_getmsgint(buf, 4));
+
+	PG_RETURN_POINTER(JsonbValueToJsonb(&v));
+}
+
+static void
+putEscapedValue(StringInfo out, JsonbValue *v)
+{
+	switch (v->type)
+	{
+		case jbvNull:
+			appendBinaryStringInfo(out, "null", 4);
+			break;
+		case jbvString:
+			escape_json(out, pnstrdup(v->string.val, v->string.len));
+			break;
+		case jbvBool:
+			if (v->boolean)
+				appendBinaryStringInfo(out, "true", 4);
+			else
+				appendBinaryStringInfo(out, "false", 5);
+			break;
+		case jbvNumeric:
+			appendStringInfoString(out, DatumGetCString(DirectFunctionCall1(numeric_out, PointerGetDatum(v->numeric))));
+			break;
+		default:
+			elog(PANIC, "Unknown type");
+	}
+}
+
+char *
+JsonbToCString(StringInfo out, char *in, int estimated_len)
+{
+	bool		first = true;
+	JsonbIterator *it;
+	int			type;
+	JsonbValue	v;
+	int			level = 0;
+
+	if (out == NULL)
+		out = makeStringInfo();
+
+	if (in == NULL)
+	{
+		appendStringInfoString(out, "");
+		return out->data;
+	}
+
+	enlargeStringInfo(out, (estimated_len >= 0) ? estimated_len : 64);
+
+	it = JsonbIteratorInit(in);
+
+	while ((type = JsonbIteratorGet(&it, &v, false)) != 0)
+	{
+reout:
+		switch (type)
+		{
+			case WJB_BEGIN_ARRAY:
+				if (first == false)
+					appendBinaryStringInfo(out, ", ", 2);
+				first = true;
+
+				if (v.array.scalar == false)
+					appendStringInfoChar(out, '[');
+				level++;
+				break;
+			case WJB_BEGIN_OBJECT:
+				if (first == false)
+					appendBinaryStringInfo(out, ", ", 2);
+				first = true;
+				appendStringInfoCharMacro(out, '{');
+
+				level++;
+				break;
+			case WJB_KEY:
+				if (first == false)
+					appendBinaryStringInfo(out, ", ", 2);
+				first = true;
+
+				putEscapedValue(out, &v);
+				appendBinaryStringInfo(out, ": ", 2);
+
+				type = JsonbIteratorGet(&it, &v, false);
+				if (type == WJB_VALUE)
+				{
+					first = false;
+					putEscapedValue(out, &v);
+				}
+				else
+				{
+					Assert(type == WJB_BEGIN_OBJECT || type == WJB_BEGIN_ARRAY);
+					goto reout;
+				}
+				break;
+			case WJB_ELEM:
+				if (first == false)
+					appendBinaryStringInfo(out, ", ", 2);
+				else
+					first = false;
+
+				putEscapedValue(out, &v);
+				break;
+			case WJB_END_ARRAY:
+				level--;
+				if (v.array.scalar == false)
+					appendStringInfoChar(out, ']');
+				first = false;
+				break;
+			case WJB_END_OBJECT:
+				level--;
+				appendStringInfoCharMacro(out, '}');
+				first = false;
+				break;
+			default:
+				elog(PANIC, "Wrong flags");
+		}
+	}
+
+	Assert(level == 0);
+
+	return out->data;
+}
+
+Datum
+jsonb_out(PG_FUNCTION_ARGS)
+{
+	Jsonb	   *jb = PG_GETARG_JSONB(0);
+	char	   *out;
+
+	out = JsonbToCString(NULL, (JB_ISEMPTY(jb)) ? NULL : VARDATA(jb), VARSIZE(jb));
+
+	PG_RETURN_CSTRING(out);
+}
+
+Datum
+jsonb_send(PG_FUNCTION_ARGS)
+{
+	Jsonb	   *in = PG_GETARG_JSONB(0);
+	StringInfoData buf;
+
+	pq_begintypsend(&buf);
+
+	if (JB_ISEMPTY(in))
+	{
+		pq_sendint(&buf, 0, 4);
+	}
+	else
+	{
+		JsonbIterator *it;
+		int			type;
+		JsonbValue	v;
+		uint32		flag;
+		bytea	   *nbuf;
+
+		enlargeStringInfo(&buf, VARSIZE_ANY(in) /* just estimation */ );
+
+		it = JsonbIteratorInit(VARDATA_ANY(in));
+
+		while ((type = JsonbIteratorGet(&it, &v, false)) != 0)
+		{
+			switch (type)
+			{
+				case WJB_BEGIN_ARRAY:
+					flag = (v.array.scalar) ? JENTRY_ISCALAR : JENTRY_ISARRAY;
+					pq_sendint(&buf, v.array.nelems | flag, 4);
+					break;
+				case WJB_BEGIN_OBJECT:
+					pq_sendint(&buf, v.hash.npairs | JENTRY_ISOBJECT, 4);
+					break;
+				case WJB_KEY:
+					pq_sendint(&buf, v.string.len | JENTRY_ISSTRING, 4);
+					pq_sendtext(&buf, v.string.val, v.string.len);
+					break;
+				case WJB_ELEM:
+				case WJB_VALUE:
+					switch (v.type)
+					{
+						case jbvNull:
+							pq_sendint(&buf, JENTRY_ISNULL, 4);
+							break;
+						case jbvString:
+							pq_sendint(&buf, v.string.len | JENTRY_ISSTRING, 4);
+							pq_sendtext(&buf, v.string.val, v.string.len);
+							break;
+						case jbvBool:
+							pq_sendint(&buf, (v.boolean) ? JENTRY_ISTRUE : JENTRY_ISFALSE, 4);
+							break;
+						case jbvNumeric:
+							nbuf = DatumGetByteaP(DirectFunctionCall1(numeric_send, NumericGetDatum(v.numeric)));
+							pq_sendint(&buf, VARSIZE_ANY(nbuf) | JENTRY_ISNUMERIC, 4);
+							pq_sendbytes(&buf, (char *) nbuf, VARSIZE_ANY(nbuf));
+							break;
+						default:
+							elog(PANIC, "Wrong type: %u", v.type);
+					}
+					break;
+				case WJB_END_ARRAY:
+				case WJB_END_OBJECT:
+					break;
+				default:
+					elog(PANIC, "Wrong flags");
+			}
+		}
+	}
+
+	PG_RETURN_BYTEA_P(pq_endtypsend(&buf));
+}
+
+Datum
+jsonb_typeof(PG_FUNCTION_ARGS)
+{
+	Jsonb	   *in = PG_GETARG_JSONB(0);
+	JsonbIterator *it;
+	JsonbValue	v;
+	char	   *result;
+
+	if (JB_ROOT_IS_OBJECT(in))
+		result = "object";
+	else if (JB_ROOT_IS_ARRAY(in) && !JB_ROOT_IS_SCALAR(in))
+		result = "array";
+	else
+	{
+		Assert(JB_ROOT_IS_SCALAR(in));
+
+		it = JsonbIteratorInit(VARDATA_ANY(in));
+
+		/*
+		 * a root scalar is stored as an array of one element, so we get the
+		 * array and then its first (and only) member.
+		 */
+		(void) JsonbIteratorGet(&it, &v, true);
+		(void) JsonbIteratorGet(&it, &v, true);
+		switch (v.type)
+		{
+			case jbvNull:
+				result = "null";
+				break;
+			case jbvString:
+				result = "string";
+				break;
+			case jbvBool:
+				result = "boolean";
+				break;
+			case jbvNumeric:
+				result = "number";
+				break;
+			default:
+				elog(ERROR, "Wrong jsonb scalar type: %u", v.type);
+		}
+	}
+
+	PG_RETURN_TEXT_P(cstring_to_text(result));
+}
diff --git a/src/backend/utils/adt/jsonb_support.c b/src/backend/utils/adt/jsonb_support.c
new file mode 100644
index 0000000..79da6eb
--- /dev/null
+++ b/src/backend/utils/adt/jsonb_support.c
@@ -0,0 +1,1261 @@
+/*-------------------------------------------------------------------------
+ *
+ * jsonb_support.c
+ *	  Support functions for jsonb
+ *
+ * Portions Copyright (c) 1996-2013, PostgreSQL Global Development Group
+ *
+ * src/backend/utils/adt/jsonb_support.c
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#include "postgres.h"
+#include "utils/builtins.h"
+#include "utils/jsonb.h"
+
+/*
+ * turn a JsonbValue into a Jsonb
+ */
+Jsonb *
+JsonbValueToJsonb(JsonbValue *v)
+{
+	Jsonb	   *out;
+
+	if (v == NULL)
+	{
+		out = NULL;
+	}
+	else if (v->type == jbvString || v->type == jbvBool ||
+			 v->type == jbvNumeric || v->type == jbvNull)
+	{
+		/* scalar value */
+
+		ToJsonbState *state = NULL;
+		JsonbValue *res;
+		uint32		sz;
+		JsonbValue	scalarArray;
+
+		scalarArray.type = jbvArray;
+		scalarArray.array.scalar = true;
+		scalarArray.array.nelems = 1;
+
+		pushJsonbValue(&state, WJB_BEGIN_ARRAY, &scalarArray);
+		pushJsonbValue(&state, WJB_ELEM, v);
+		res = pushJsonbValue(&state, WJB_END_ARRAY, NULL);
+
+		out = palloc(VARHDRSZ + res->size);
+		sz = compressJsonb(res, VARDATA(out));
+		Assert(sz <= res->size);
+		SET_VARSIZE(out, sz + VARHDRSZ);
+	}
+	else if (v->type == jbvHash || v->type == jbvArray)
+	{
+		uint32		sz;
+
+		out = palloc(VARHDRSZ + v->size);
+		sz = compressJsonb(v, VARDATA(out));
+		Assert(sz <= v->size);
+		SET_VARSIZE(out, VARHDRSZ + sz);
+	}
+	else
+	{
+		out = palloc(VARHDRSZ + v->binary.len);
+
+		Assert(v->type == jbvBinary);
+		SET_VARSIZE(out, VARHDRSZ + v->binary.len);
+		memcpy(VARDATA(out), v->binary.data, v->binary.len);
+	}
+
+	return out;
+}
+
+/*
+ * Sort and unique pairs in hash-like JsonbValue
+ */
+void
+uniqueJsonbValue(JsonbValue *v)
+{
+	bool		hasNonUniq = false;
+
+	Assert(v->type == jbvHash);
+
+	if (v->hash.npairs > 1)
+		qsort_arg(v->hash.pairs, v->hash.npairs, sizeof(*v->hash.pairs),
+				  compareJsonbPair, &hasNonUniq);
+
+	if (hasNonUniq)
+	{
+		JsonbPair  *ptr = v->hash.pairs + 1,
+				   *res = v->hash.pairs;
+
+		while (ptr - v->hash.pairs < v->hash.npairs)
+		{
+			if (ptr->key.string.len == res->key.string.len &&
+				memcmp(ptr->key.string.val, res->key.string.val,
+					   ptr->key.string.len) == 0)
+			{
+				v->size -= ptr->key.size + ptr->value.size;
+			}
+			else
+			{
+				res++;
+				if (ptr != res)
+					memcpy(res, ptr, sizeof(*res));
+			}
+			ptr++;
+		}
+
+		v->hash.npairs = res + 1 - v->hash.pairs;
+	}
+}
+
+/****************************************************************************
+ *						   Compare Functions								*
+ ****************************************************************************/
+
+/*
+ * Compare two jbvString JsonbValue values, third argument
+ * 'arg', if it's not null, should be a pointer to bool
+ * value which will be set to true if strings are equal and
+ * untouched otherwise.
+ */
+int
+compareJsonbStringValue(const void *a, const void *b, void *arg)
+{
+	const JsonbValue *va = a;
+	const JsonbValue *vb = b;
+	int			res;
+
+	Assert(va->type == jbvString);
+	Assert(vb->type == jbvString);
+
+	if (va->string.len == vb->string.len)
+	{
+		res = memcmp(va->string.val, vb->string.val, va->string.len);
+		if (res == 0 && arg)
+			*(bool *) arg = true;
+	}
+	else
+	{
+		res = (va->string.len > vb->string.len) ? 1 : -1;
+	}
+
+	return res;
+}
+
+/*
+ * qsort helper to compare JsonbPair values, third argument
+ * arg will be trasferred as is to subsequent
+ * compareJsonbStringValue() call. Pairs with equals keys are
+ * ordered with respect of order field.
+ */
+int
+compareJsonbPair(const void *a, const void *b, void *arg)
+{
+	const JsonbPair *pa = a;
+	const JsonbPair *pb = b;
+	int			res;
+
+	res = compareJsonbStringValue(&pa->key, &pb->key, arg);
+
+	/*
+	 * guarantee keeping order of equal pair. Unique algorithm will prefer
+	 * first element as value
+	 */
+
+	if (res == 0)
+		res = (pa->order > pb->order) ? -1 : 1;
+
+	return res;
+}
+
+/*
+ * some constant order of JsonbValue
+ */
+int
+compareJsonbValue(JsonbValue *a, JsonbValue *b)
+{
+	if (a->type == b->type)
+	{
+		switch (a->type)
+		{
+			case jbvNull:
+				return 0;
+			case jbvString:
+				return compareJsonbStringValue(a, b, NULL);
+			case jbvBool:
+				if (a->boolean == b->boolean)
+					return 0;
+				return (a->boolean > b->boolean) ? 1 : -1;
+			case jbvNumeric:
+				return DatumGetInt32(DirectFunctionCall2(numeric_cmp,
+												 PointerGetDatum(a->numeric),
+											   PointerGetDatum(b->numeric)));
+			case jbvArray:
+				if (a->array.nelems == b->array.nelems)
+				{
+					int			i,
+								r;
+
+					for (i = 0; i < a->array.nelems; i++)
+						if ((r = compareJsonbValue(a->array.elems + i,
+												   b->array.elems + i)) != 0)
+							return r;
+
+					return 0;
+				}
+
+				return (a->array.nelems > b->array.nelems) ? 1 : -1;
+			case jbvHash:
+				if (a->hash.npairs == b->hash.npairs)
+				{
+					int			i,
+								r;
+
+					for (i = 0; i < a->hash.npairs; i++)
+					{
+						if ((r = compareJsonbStringValue(&a->hash.pairs[i].key,
+													   &b->hash.pairs[i].key,
+														 NULL)) != 0)
+							return r;
+						if ((r = compareJsonbValue(&a->hash.pairs[i].value,
+											  &b->hash.pairs[i].value)) != 0)
+							return r;
+					}
+
+					return 0;
+				}
+
+				return (a->hash.npairs > b->hash.npairs) ? 1 : -1;
+			case jbvBinary:
+				return compareJsonbBinaryValue(a->binary.data, b->binary.data);
+			default:
+				elog(PANIC, "unknown JsonbValue->type: %d", a->type);
+		}
+	}
+
+	return (a->type > b->type) ? 1 : -1;
+}
+
+/*
+ * Some order for Jsonb values
+ */
+int
+compareJsonbBinaryValue(char *a, char *b)
+{
+	JsonbIterator *it1,
+			   *it2;
+	int			res = 0;
+
+	it1 = JsonbIteratorInit(a);
+	it2 = JsonbIteratorInit(b);
+
+	while (res == 0)
+	{
+		JsonbValue	v1,
+					v2;
+		int			r1,
+					r2;
+
+		r1 = JsonbIteratorGet(&it1, &v1, false);
+		r2 = JsonbIteratorGet(&it2, &v2, false);
+
+		if (r1 == r2)
+		{
+			if (r1 == 0)
+				break;			/* equal */
+
+			if (v1.type == v2.type)
+			{
+				switch (v1.type)
+				{
+					case jbvString:
+						res = compareJsonbStringValue(&v1, &v2, NULL);
+						break;
+					case jbvBool:
+						if (v1.boolean == v2.boolean)
+							res = 0;
+						else
+							res = (v1.boolean > v2.boolean) ? 1 : -1;
+						break;
+					case jbvNumeric:
+						res = DatumGetInt32(DirectFunctionCall2(numeric_cmp,
+												 PointerGetDatum(v1.numeric),
+											   PointerGetDatum(v2.numeric)));
+						break;
+					case jbvArray:
+						if (v1.array.nelems != v2.array.nelems)
+							res = (v1.array.nelems > v2.array.nelems) ? 1 : -1;
+						break;
+					case jbvHash:
+						if (v1.hash.npairs != v2.hash.npairs)
+							res = (v1.hash.npairs > v2.hash.npairs) ? 1 : -1;
+						break;
+					default:
+						break;
+				}
+			}
+			else
+			{
+				res = (v1.type > v2.type) ? 1 : -1;		/* dummy order */
+			}
+		}
+		else
+		{
+			res = (r1 > r2) ? 1 : -1;	/* dummy order */
+		}
+	}
+
+	return res;
+}
+
+/****************************************************************************
+ *			find string key in hash or element by value in array			*
+ ****************************************************************************/
+JsonbValue *
+findUncompressedJsonbValueByValue(char *buffer, uint32 flags,
+								  uint32 *lowbound, JsonbValue *key)
+{
+	uint32		header = *(uint32 *) buffer;
+	static JsonbValue r;
+
+	Assert((header & (JB_FLAG_ARRAY | JB_FLAG_OBJECT)) !=
+		   (JB_FLAG_ARRAY | JB_FLAG_OBJECT));
+
+	if (flags & JB_FLAG_ARRAY & header)
+	{
+		JEntry	   *array = (JEntry *) (buffer + sizeof(header));
+		char	   *data = (char *) (array + (header & JB_COUNT_MASK));
+		int			i;
+
+		for (i = (lowbound) ? *lowbound : 0; i < (header & JB_COUNT_MASK); i++)
+		{
+			JEntry	   *e = array + i;
+
+			if (JBE_ISNULL(*e) && key->type == jbvNull)
+			{
+				r.type = jbvNull;
+				if (lowbound)
+					*lowbound = i;
+				r.size = sizeof(JEntry);
+
+				return &r;
+			}
+			else if (JBE_ISSTRING(*e) && key->type == jbvString)
+			{
+				if (key->string.len == JBE_LEN(*e) &&
+					memcmp(key->string.val, data + JBE_OFF(*e),
+						   key->string.len) == 0)
+				{
+					r.type = jbvString;
+					r.string.val = data + JBE_OFF(*e);
+					r.string.len = key->string.len;
+					r.size = sizeof(JEntry) + r.string.len;
+					if (lowbound)
+						*lowbound = i;
+
+					return &r;
+				}
+			}
+			else if (JBE_ISBOOL(*e) && key->type == jbvBool)
+			{
+				if ((JBE_ISBOOL_TRUE(*e) && key->boolean == true) ||
+					(JBE_ISBOOL_FALSE(*e) && key->boolean == false))
+				{
+					r = *key;
+					r.size = sizeof(JEntry);
+					if (lowbound)
+						*lowbound = i;
+
+					return &r;
+				}
+			}
+			else if (JBE_ISNUMERIC(*e) && key->type == jbvNumeric)
+			{
+				if (DatumGetBool(DirectFunctionCall2(numeric_eq,
+							   PointerGetDatum(data + INTALIGN(JBE_OFF(*e))),
+									 PointerGetDatum(key->numeric))) == true)
+				{
+					r.type = jbvNumeric;
+					r.numeric = (Numeric) (data + INTALIGN(JBE_OFF(*e)));
+
+					if (lowbound)
+						*lowbound = i;
+
+					return &r;
+				}
+			}
+		}
+	}
+	else if (flags & JB_FLAG_OBJECT & header)
+	{
+		JEntry	   *array = (JEntry *) (buffer + sizeof(header));
+		char	   *data = (char *) (array + (header & JB_COUNT_MASK) * 2);
+		uint32		stopLow = lowbound ? *lowbound : 0,
+					stopHigh = (header & JB_COUNT_MASK),
+					stopMiddle;
+
+		if (key->type != jbvString)
+			return NULL;
+
+		while (stopLow < stopHigh)
+		{
+			int			difference;
+			JEntry	   *e;
+
+			stopMiddle = stopLow + (stopHigh - stopLow) / 2;
+
+			e = array + stopMiddle * 2;
+
+			if (key->string.len == JBE_LEN(*e))
+				difference = memcmp(data + JBE_OFF(*e), key->string.val,
+									key->string.len);
+			else
+				difference = (JBE_LEN(*e) > key->string.len) ? 1 : -1;
+
+			if (difference == 0)
+			{
+				JEntry	   *v = e + 1;
+
+				if (lowbound)
+					*lowbound = stopMiddle + 1;
+
+				if (JBE_ISSTRING(*v))
+				{
+					r.type = jbvString;
+					r.string.val = data + JBE_OFF(*v);
+					r.string.len = JBE_LEN(*v);
+					r.size = sizeof(JEntry) + r.string.len;
+				}
+				else if (JBE_ISBOOL(*v))
+				{
+					r.type = jbvBool;
+					r.boolean = (JBE_ISBOOL_TRUE(*v)) ? true : false;
+					r.size = sizeof(JEntry);
+				}
+				else if (JBE_ISNUMERIC(*v))
+				{
+					r.type = jbvNumeric;
+					r.numeric = (Numeric) (data + INTALIGN(JBE_OFF(*v)));
+
+					r.size = 2 * sizeof(JEntry) + VARSIZE_ANY(r.numeric);
+				}
+				else if (JBE_ISNULL(*v))
+				{
+					r.type = jbvNull;
+					r.size = sizeof(JEntry);
+				}
+				else
+				{
+					r.type = jbvBinary;
+					r.binary.data = data + INTALIGN(JBE_OFF(*v));
+					r.binary.len = JBE_LEN(*v) -
+						(INTALIGN(JBE_OFF(*v)) - JBE_OFF(*v));
+					r.size = 2 * sizeof(JEntry) + r.binary.len;
+				}
+
+				return &r;
+			}
+			else if (difference < 0)
+			{
+				stopLow = stopMiddle + 1;
+			}
+			else
+			{
+				stopHigh = stopMiddle;
+			}
+		}
+
+		if (lowbound)
+			*lowbound = stopLow;
+	}
+
+	return NULL;
+}
+
+/*
+ * Just wrapped for findUncompressedJsonbValueByValue()
+ * with simple string key representation
+ */
+JsonbValue *
+findUncompressedJsonbValue(char *buffer, uint32 flags, uint32 *lowbound,
+						   char *key, uint32 keylen)
+{
+	JsonbValue	v;
+
+	if (key == NULL)
+	{
+		v.type = jbvNull;
+	}
+	else
+	{
+		v.type = jbvString;
+		v.string.val = key;
+		v.string.len = keylen;
+	}
+
+	return findUncompressedJsonbValueByValue(buffer, flags, lowbound, &v);
+}
+
+/*
+ * Get i-th value of array or hash. if i < 0 then it counts from
+ * the end of array/hash. Note: returns pointer to statically
+ * allocated JsonbValue.
+ */
+JsonbValue *
+getJsonbValue(char *buffer, uint32 flags, int32 i)
+{
+	uint32		header = *(uint32 *) buffer;
+	static JsonbValue r;
+	JEntry	   *array,
+			   *e;
+	char	   *data;
+
+	Assert((header & (JB_FLAG_ARRAY | JB_FLAG_OBJECT)) !=
+		   (JB_FLAG_ARRAY | JB_FLAG_OBJECT));
+
+	if (i >= 0)
+	{
+		if (i >= (header & JB_COUNT_MASK))
+			return NULL;
+	}
+	else
+	{
+		if (-i > (header & JB_COUNT_MASK))
+			return NULL;
+
+		i = (header & JB_COUNT_MASK) + i;
+	}
+
+	array = (JEntry *) (buffer + sizeof(header));
+
+	if (flags & JB_FLAG_ARRAY & header)
+	{
+		e = array + i;
+		data = (char *) (array + (header & JB_COUNT_MASK));
+	}
+	else if (flags & JB_FLAG_OBJECT & header)
+	{
+		e = array + i * 2 + 1;
+		data = (char *) (array + (header & JB_COUNT_MASK) * 2);
+	}
+	else
+	{
+		return NULL;
+	}
+
+	if (JBE_ISSTRING(*e))
+	{
+		r.type = jbvString;
+		r.string.val = data + JBE_OFF(*e);
+		r.string.len = JBE_LEN(*e);
+		r.size = sizeof(JEntry) + r.string.len;
+	}
+	else if (JBE_ISBOOL(*e))
+	{
+		r.type = jbvBool;
+		r.boolean = (JBE_ISBOOL_TRUE(*e)) ? true : false;
+		r.size = sizeof(JEntry);
+	}
+	else if (JBE_ISNUMERIC(*e))
+	{
+		r.type = jbvNumeric;
+		r.numeric = (Numeric) (data + INTALIGN(JBE_OFF(*e)));
+
+		r.size = 2 * sizeof(JEntry) + VARSIZE_ANY(r.numeric);
+	}
+	else if (JBE_ISNULL(*e))
+	{
+		r.type = jbvNull;
+		r.size = sizeof(JEntry);
+	}
+	else
+	{
+		r.type = jbvBinary;
+		r.binary.data = data + INTALIGN(JBE_OFF(*e));
+		r.binary.len = JBE_LEN(*e) - (INTALIGN(JBE_OFF(*e)) - JBE_OFF(*e));
+		r.size = r.binary.len + 2 * sizeof(JEntry);
+	}
+
+	return &r;
+}
+
+/****************************************************************************
+ *					  Walk on tree representation of jsonb					*
+ ****************************************************************************/
+static void
+walkUncompressedJsonbDo(JsonbValue *v, walk_jsonb_cb cb, void *cb_arg, uint32 level)
+{
+	int			i;
+
+	switch (v->type)
+	{
+		case jbvArray:
+			cb(cb_arg, v, WJB_BEGIN_ARRAY, level);
+			for (i = 0; i < v->array.nelems; i++)
+			{
+				if (v->array.elems[i].type == jbvNull ||
+					v->array.elems[i].type == jbvString ||
+					v->array.elems[i].type == jbvBool ||
+					v->array.elems[i].type == jbvNumeric ||
+					v->array.elems[i].type == jbvBinary)
+					cb(cb_arg, v->array.elems + i, WJB_ELEM, level);
+				else
+					walkUncompressedJsonbDo(v->array.elems + i, cb, cb_arg,
+											level + 1);
+			}
+			cb(cb_arg, v, WJB_END_ARRAY, level);
+			break;
+		case jbvHash:
+			cb(cb_arg, v, WJB_BEGIN_OBJECT, level);
+
+			for (i = 0; i < v->hash.npairs; i++)
+			{
+				cb(cb_arg, &v->hash.pairs[i].key, WJB_KEY, level);
+
+				if (v->hash.pairs[i].value.type == jbvNull ||
+					v->hash.pairs[i].value.type == jbvString ||
+					v->hash.pairs[i].value.type == jbvBool ||
+					v->hash.pairs[i].value.type == jbvNumeric ||
+					v->hash.pairs[i].value.type == jbvBinary)
+					cb(cb_arg, &v->hash.pairs[i].value, WJB_VALUE, level);
+				else
+					walkUncompressedJsonbDo(&v->hash.pairs[i].value, cb, cb_arg,
+											level + 1);
+			}
+
+			cb(cb_arg, v, WJB_END_OBJECT, level);
+			break;
+		default:
+			elog(PANIC, "impossible JsonbValue->type: %d", v->type);
+	}
+}
+
+void
+walkUncompressedJsonb(JsonbValue *v, walk_jsonb_cb cb, void *cb_arg)
+{
+	if (v)
+		walkUncompressedJsonbDo(v, cb, cb_arg, 0);
+}
+
+/****************************************************************************
+ *						   Iteration over binary jsonb						*
+ ****************************************************************************/
+static void
+parseBuffer(JsonbIterator *it, char *buffer)
+{
+	uint32		header = *(uint32 *) buffer;
+
+	it->type = header & (JB_FLAG_ARRAY | JB_FLAG_OBJECT);
+	it->nelems = header & JB_COUNT_MASK;
+	it->buffer = buffer;
+
+
+	buffer += sizeof(uint32);
+	it->array = (JEntry *) buffer;
+
+	it->state = jbi_start;
+
+	switch (it->type)
+	{
+		case JB_FLAG_ARRAY:
+			it->data = buffer + it->nelems * sizeof(JEntry);
+			it->isScalar = (header & JB_FLAG_SCALAR) ? true : false;
+			Assert(it->isScalar == false || it->nelems == 1);
+			break;
+		case JB_FLAG_OBJECT:
+			it->data = buffer + it->nelems * sizeof(JEntry) * 2;
+			break;
+		default:
+			elog(PANIC, "impossible type: %08x", it->type);
+	}
+}
+
+JsonbIterator *
+JsonbIteratorInit(char *buffer)
+{
+	JsonbIterator *it = palloc(sizeof(*it));
+
+	parseBuffer(it, buffer);
+	it->next = NULL;
+
+	return it;
+}
+
+static bool
+formAnswer(JsonbIterator **it, JsonbValue *v, JEntry * e, bool skipNested)
+{
+	if (JBE_ISSTRING(*e))
+	{
+		v->type = jbvString;
+		v->string.val = (*it)->data + JBE_OFF(*e);
+		v->string.len = JBE_LEN(*e);
+		v->size = sizeof(JEntry) + v->string.len;
+
+		return false;
+	}
+	else if (JBE_ISBOOL(*e))
+	{
+		v->type = jbvBool;
+		v->boolean = (JBE_ISBOOL_TRUE(*e)) ? true : false;
+		v->size = sizeof(JEntry);
+
+		return false;
+	}
+	else if (JBE_ISNUMERIC(*e))
+	{
+		v->type = jbvNumeric;
+		v->numeric = (Numeric) ((*it)->data + INTALIGN(JBE_OFF(*e)));
+
+		v->size = 2 * sizeof(JEntry) + VARSIZE_ANY(v->numeric);
+
+		return false;
+	}
+	else if (JBE_ISNULL(*e))
+	{
+		v->type = jbvNull;
+		v->size = sizeof(JEntry);
+
+		return false;
+	}
+	else if (skipNested)
+	{
+		v->type = jbvBinary;
+		v->binary.data = (*it)->data + INTALIGN(JBE_OFF(*e));
+		v->binary.len = JBE_LEN(*e) - (INTALIGN(JBE_OFF(*e)) - JBE_OFF(*e));
+		v->size = v->binary.len + 2 * sizeof(JEntry);
+
+		return false;
+	}
+	else
+	{
+		JsonbIterator *nit = palloc(sizeof(*nit));
+
+		parseBuffer(nit, (*it)->data + INTALIGN(JBE_OFF(*e)));
+		nit->next = *it;
+		*it = nit;
+
+		return true;
+	}
+}
+
+static JsonbIterator *
+up(JsonbIterator *it)
+{
+	JsonbIterator *v = it->next;
+
+	pfree(it);
+
+	return v;
+}
+
+int
+JsonbIteratorGet(JsonbIterator **it, JsonbValue *v, bool skipNested)
+{
+	int			res;
+
+	if (*it == NULL)
+		return 0;
+
+	/*
+	 * Encode all possible states by one integer. That's possible because enum
+	 * members of JsonbIterator->state uses different bits than
+	 * JB_FLAG_ARRAY/JB_FLAG_OBJECT. See definition of JsonbIterator
+	 */
+
+	switch ((*it)->type | (*it)->state)
+	{
+		case JB_FLAG_ARRAY | jbi_start:
+			(*it)->state = jbi_elem;
+			(*it)->i = 0;
+			v->type = jbvArray;
+			v->array.nelems = (*it)->nelems;
+			res = WJB_BEGIN_ARRAY;
+			v->array.scalar = (*it)->isScalar;
+			break;
+		case JB_FLAG_ARRAY | jbi_elem:
+			if ((*it)->i >= (*it)->nelems)
+			{
+				*it = up(*it);
+				res = WJB_END_ARRAY;
+			}
+			else if (formAnswer(it, v, &(*it)->array[(*it)->i++], skipNested))
+			{
+				res = JsonbIteratorGet(it, v, skipNested);
+			}
+			else
+			{
+				res = WJB_ELEM;
+			}
+			break;
+		case JB_FLAG_OBJECT | jbi_start:
+			(*it)->state = jbi_key;
+			(*it)->i = 0;
+			v->type = jbvHash;
+			v->hash.npairs = (*it)->nelems;
+			res = WJB_BEGIN_OBJECT;
+			break;
+		case JB_FLAG_OBJECT | jbi_key:
+			if ((*it)->i >= (*it)->nelems)
+			{
+				*it = up(*it);
+				res = WJB_END_OBJECT;
+			}
+			else
+			{
+				formAnswer(it, v, &(*it)->array[(*it)->i * 2], false);
+				(*it)->state = jbi_value;
+				res = WJB_KEY;
+			}
+			break;
+		case JB_FLAG_OBJECT | jbi_value:
+			(*it)->state = jbi_key;
+			if (formAnswer(it, v, &(*it)->array[((*it)->i++) * 2 + 1], skipNested))
+				res = JsonbIteratorGet(it, v, skipNested);
+			else
+				res = WJB_VALUE;
+			break;
+		default:
+			elog(PANIC, "unknown state %08x", (*it)->type & (*it)->state);
+	}
+
+	return res;
+}
+
+/****************************************************************************
+ *		  Transformation from tree to binary representation of jsonb		*
+ ****************************************************************************/
+typedef struct CompressState
+{
+	char	   *begin;
+	char	   *ptr;
+
+	struct
+	{
+		uint32		i;
+		uint32	   *header;
+		JEntry	   *array;
+		char	   *begin;
+	}		   *levelstate, *lptr, *pptr;
+
+	uint32		maxlevel;
+
+}	CompressState;
+
+#define curLevelState	state->lptr
+#define prevLevelState	state->pptr
+
+static void
+putJEntryString(CompressState * state, JsonbValue *value, uint32 level, uint32 i)
+{
+	curLevelState = state->levelstate + level;
+
+	if (i == 0)
+		curLevelState->array[0].entry = JENTRY_ISFIRST;
+	else
+		curLevelState->array[i].entry = 0;
+
+	switch (value->type)
+	{
+		case jbvNull:
+			curLevelState->array[i].entry |= JENTRY_ISNULL;
+
+			if (i > 0)
+				curLevelState->array[i].entry |=
+					curLevelState->array[i - 1].entry & JENTRY_POSMASK;
+			break;
+		case jbvString:
+			memcpy(state->ptr, value->string.val, value->string.len);
+			state->ptr += value->string.len;
+
+			if (i == 0)
+				curLevelState->array[i].entry |= value->string.len;
+			else
+				curLevelState->array[i].entry |=
+					(curLevelState->array[i - 1].entry & JENTRY_POSMASK) +
+					value->string.len;
+			break;
+		case jbvBool:
+			curLevelState->array[i].entry |= (value->boolean) ?
+				JENTRY_ISTRUE : JENTRY_ISFALSE;
+
+			if (i > 0)
+				curLevelState->array[i].entry |=
+					curLevelState->array[i - 1].entry & JENTRY_POSMASK;
+			break;
+		case jbvNumeric:
+			{
+				int			addlen = INTALIGN(state->ptr - state->begin) -
+				(state->ptr - state->begin);
+				int			numlen = VARSIZE_ANY(value->numeric);
+
+				switch (addlen)
+				{
+					case 3:
+						*state->ptr = '\0';
+						state->ptr++;
+					case 2:
+						*state->ptr = '\0';
+						state->ptr++;
+					case 1:
+						*state->ptr = '\0';
+						state->ptr++;
+					case 0:
+					default:
+						break;
+				}
+
+				memcpy(state->ptr, value->numeric, numlen);
+				state->ptr += numlen;
+
+				curLevelState->array[i].entry |= JENTRY_ISNUMERIC;
+				if (i == 0)
+					curLevelState->array[i].entry |= addlen + numlen;
+				else
+					curLevelState->array[i].entry |=
+						(curLevelState->array[i - 1].entry & JENTRY_POSMASK) +
+						addlen + numlen;
+				break;
+			}
+		case jbvBinary:
+			{
+				int			addlen = INTALIGN(state->ptr - state->begin) -
+				(state->ptr - state->begin);
+
+				switch (addlen)
+				{
+					case 3:
+						*state->ptr = '\0';
+						state->ptr++;
+					case 2:
+						*state->ptr = '\0';
+						state->ptr++;
+					case 1:
+						*state->ptr = '\0';
+						state->ptr++;
+					case 0:
+					default:
+						break;
+				}
+
+				memcpy(state->ptr, value->binary.data, value->binary.len);
+				state->ptr += value->binary.len;
+
+				curLevelState->array[i].entry |= JENTRY_ISNEST;
+
+				if (i == 0)
+					curLevelState->array[i].entry |= addlen + value->binary.len;
+				else
+					curLevelState->array[i].entry |=
+						(curLevelState->array[i - 1].entry & JENTRY_POSMASK) +
+						addlen + value->binary.len;
+			}
+			break;
+		default:
+			elog(PANIC, "Unsupported JsonbValue type: %d", value->type);
+	}
+}
+
+static void
+compressCallback(void *arg, JsonbValue *value, uint32 flags, uint32 level)
+{
+	CompressState *state = arg;
+
+	if (level == state->maxlevel)
+	{
+		state->maxlevel *= 2;
+		state->levelstate = repalloc(state->levelstate,
+							   sizeof(*state->levelstate) * state->maxlevel);
+	}
+
+	curLevelState = state->levelstate + level;
+
+	if (flags & (WJB_BEGIN_ARRAY | WJB_BEGIN_OBJECT))
+	{
+		Assert(((flags & WJB_BEGIN_ARRAY) && value->type == jbvArray) ||
+			   ((flags & WJB_BEGIN_OBJECT) && value->type == jbvHash));
+
+		curLevelState->begin = state->ptr;
+
+		switch (INTALIGN(state->ptr - state->begin) -
+				(state->ptr - state->begin))
+		{
+			case 3:
+				*state->ptr = '\0';
+				state->ptr++;
+			case 2:
+				*state->ptr = '\0';
+				state->ptr++;
+			case 1:
+				*state->ptr = '\0';
+				state->ptr++;
+			case 0:
+			default:
+				break;
+		}
+
+		curLevelState->header = (uint32 *) state->ptr;
+		state->ptr += sizeof(*curLevelState->header);
+
+		curLevelState->array = (JEntry *) state->ptr;
+		curLevelState->i = 0;
+
+		if (value->type == jbvArray)
+		{
+			*curLevelState->header = value->array.nelems | JB_FLAG_ARRAY;
+			state->ptr += sizeof(JEntry) * value->array.nelems;
+
+			if (value->array.scalar)
+			{
+				Assert(value->array.nelems == 1);
+				Assert(level == 0);
+				*curLevelState->header |= JB_FLAG_SCALAR;
+			}
+		}
+		else
+		{
+			*curLevelState->header = value->hash.npairs | JB_FLAG_OBJECT;
+			state->ptr += sizeof(JEntry) * value->hash.npairs * 2;
+		}
+	}
+	else if (flags & WJB_ELEM)
+	{
+		putJEntryString(state, value, level, curLevelState->i);
+		curLevelState->i++;
+	}
+	else if (flags & WJB_KEY)
+	{
+		Assert(value->type == jbvString);
+
+		putJEntryString(state, value, level, curLevelState->i * 2);
+	}
+	else if (flags & WJB_VALUE)
+	{
+		putJEntryString(state, value, level, curLevelState->i * 2 + 1);
+		curLevelState->i++;
+	}
+	else if (flags & (WJB_END_ARRAY | WJB_END_OBJECT))
+	{
+		uint32		len,
+					i;
+
+		Assert(((flags & WJB_END_ARRAY) && value->type == jbvArray) ||
+			   ((flags & WJB_END_OBJECT) && value->type == jbvHash));
+		if (level == 0)
+			return;
+
+		len = state->ptr - (char *) curLevelState->begin;
+
+		prevLevelState = curLevelState - 1;
+
+		if (*prevLevelState->header & JB_FLAG_ARRAY)
+		{
+			i = prevLevelState->i;
+
+			prevLevelState->array[i].entry = JENTRY_ISNEST;
+
+			if (i == 0)
+				prevLevelState->array[0].entry |= JENTRY_ISFIRST | len;
+			else
+				prevLevelState->array[i].entry |=
+					(prevLevelState->array[i - 1].entry & JENTRY_POSMASK) + len;
+		}
+		else if (*prevLevelState->header & JB_FLAG_OBJECT)
+		{
+			i = 2 * prevLevelState->i + 1;		/* VALUE, not a KEY */
+
+			prevLevelState->array[i].entry = JENTRY_ISNEST;
+
+			prevLevelState->array[i].entry |=
+				(prevLevelState->array[i - 1].entry & JENTRY_POSMASK) + len;
+		}
+		else
+		{
+			elog(PANIC, "Wrong parent");
+		}
+
+		Assert(state->ptr - curLevelState->begin <= value->size);
+		prevLevelState->i++;
+	}
+	else
+	{
+		elog(PANIC, "Wrong flags");
+	}
+}
+
+/*
+ * puts JsonbValue tree into preallocated buffer
+ */
+uint32
+compressJsonb(JsonbValue *v, char *buffer)
+{
+	uint32		l = 0;
+	CompressState state;
+
+	state.begin = state.ptr = buffer;
+	state.maxlevel = 8;
+	state.levelstate = palloc(sizeof(*state.levelstate) * state.maxlevel);
+
+	walkUncompressedJsonb(v, compressCallback, &state);
+
+	l = state.ptr - buffer;
+	Assert(l <= v->size);
+
+	return l;
+}
+
+/****************************************************************************
+ *					Iteration-like forming jsonb							*
+ ****************************************************************************/
+static ToJsonbState *
+pushState(ToJsonbState ** state)
+{
+	ToJsonbState *ns = palloc(sizeof(*ns));
+
+	ns->next = *state;
+	return ns;
+}
+
+static void
+appendArray(ToJsonbState * state, JsonbValue *v)
+{
+	JsonbValue *a = &state->v;
+
+	Assert(a->type == jbvArray);
+
+	if (a->array.nelems >= state->size)
+	{
+		state->size *= 2;
+		a->array.elems = repalloc(a->array.elems,
+								  sizeof(*a->array.elems) * state->size);
+	}
+
+	a->array.elems[a->array.nelems++] = *v;
+
+	a->size += v->size;
+}
+
+static void
+appendKey(ToJsonbState * state, JsonbValue *v)
+{
+	JsonbValue *h = &state->v;
+
+	Assert(h->type == jbvHash);
+
+	if (h->hash.npairs >= state->size)
+	{
+		state->size *= 2;
+		h->hash.pairs = repalloc(h->hash.pairs,
+								 sizeof(*h->hash.pairs) * state->size);
+	}
+
+	h->hash.pairs[h->hash.npairs].key = *v;
+	h->hash.pairs[h->hash.npairs].order = h->hash.npairs;
+
+	h->size += v->size;
+}
+
+static void
+appendValue(ToJsonbState * state, JsonbValue *v)
+{
+	JsonbValue *h = &state->v;
+
+	Assert(h->type == jbvHash);
+
+	h->hash.pairs[h->hash.npairs++].value = *v;
+
+	h->size += v->size;
+}
+
+/*
+ * Pushes the value into state. With r = WJB_END_OBJECT and v = NULL
+ * it will order and unique hash's keys otherwise we believe that
+ * pushed keys was ordered and unique.
+ * Initial state of ToJsonbState is NULL.
+ */
+JsonbValue *
+pushJsonbValue(ToJsonbState ** state, int r /* WJB_* */ , JsonbValue *v)
+{
+	JsonbValue *h = NULL;
+
+	switch (r)
+	{
+		case WJB_BEGIN_ARRAY:
+			*state = pushState(state);
+			h = &(*state)->v;
+			(*state)->v.type = jbvArray;
+			(*state)->v.size = 3 * sizeof(JEntry);
+			(*state)->v.array.nelems = 0;
+			(*state)->v.array.scalar = (v && v->array.scalar) ? true : false;
+			(*state)->size = (v && v->type == jbvArray && v->array.nelems > 0)
+				? v->array.nelems : 4;
+			(*state)->v.array.elems = palloc(sizeof(*(*state)->v.array.elems) *
+											 (*state)->size);
+			break;
+		case WJB_BEGIN_OBJECT:
+			*state = pushState(state);
+			h = &(*state)->v;
+			(*state)->v.type = jbvHash;
+			(*state)->v.size = 3 * sizeof(JEntry);
+			(*state)->v.hash.npairs = 0;
+			(*state)->size = (v && v->type == jbvHash && v->hash.npairs > 0) ?
+				v->hash.npairs : 4;
+			(*state)->v.hash.pairs = palloc(sizeof(*(*state)->v.hash.pairs) *
+											(*state)->size);
+			break;
+		case WJB_ELEM:
+			Assert(v->type == jbvNull || v->type == jbvString ||
+				   v->type == jbvBool || v->type == jbvNumeric ||
+				   v->type == jbvBinary);
+			appendArray(*state, v);
+			break;
+		case WJB_KEY:
+			Assert(v->type == jbvString);
+			appendKey(*state, v);
+			break;
+		case WJB_VALUE:
+			Assert(v->type == jbvNull || v->type == jbvString ||
+				   v->type == jbvBool || v->type == jbvNumeric ||
+				   v->type == jbvBinary);
+			appendValue(*state, v);
+			break;
+		case WJB_END_OBJECT:
+			h = &(*state)->v;
+			/* v != NULL => we believe that keys was already sorted */
+			if (v == NULL)
+				uniqueJsonbValue(h);
+
+			/*
+			 * no break here - end of hash requres some extra work but rest is
+			 * the same as for array
+			 */
+		case WJB_END_ARRAY:
+			h = &(*state)->v;
+
+			/*
+			 * pop stack and push current array/hash as value in parent
+			 * array/hash
+			 */
+			*state = (*state)->next;
+			if (*state)
+			{
+				switch ((*state)->v.type)
+				{
+					case jbvArray:
+						appendArray(*state, h);
+						break;
+					case jbvHash:
+						appendValue(*state, h);
+						break;
+					default:
+						elog(PANIC, "wrong parent type: %d", (*state)->v.type);
+				}
+			}
+			break;
+		default:
+			elog(PANIC, "wrong type: %08x", r);
+	}
+
+	return h;
+}
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index e5b093e..76da613 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -27,6 +27,7 @@
 #include "utils/builtins.h"
 #include "utils/hsearch.h"
 #include "utils/json.h"
+#include "utils/jsonb.h"
 #include "utils/jsonapi.h"
 #include "utils/lsyscache.h"
 #include "utils/memutils.h"
@@ -51,6 +52,7 @@ static inline Datum get_path_all(PG_FUNCTION_ARGS, bool as_text);
 static inline text *get_worker(text *json, char *field, int elem_index,
 		   char **tpath, int *ipath, int npath,
 		   bool normalize_results);
+static inline Datum get_jsonb_path_all(PG_FUNCTION_ARGS, bool as_text);
 
 /* semantic action functions for json_array_length */
 static void alen_object_start(void *state);
@@ -59,6 +61,7 @@ static void alen_array_element_start(void *state, bool isnull);
 
 /* common worker for json_each* functions */
 static inline Datum each_worker(PG_FUNCTION_ARGS, bool as_text);
+static inline Datum each_worker_jsonb(PG_FUNCTION_ARGS, bool as_text);
 
 /* semantic action functions for json_each */
 static void each_object_field_start(void *state, char *fname, bool isnull);
@@ -211,6 +214,9 @@ typedef struct PopulateRecordsetState
 	MemoryContext fn_mcxt;		/* used to stash IO funcs */
 } PopulateRecordsetState;
 
+/* turn a jsonb object into a record */
+static inline void make_row_from_rec_and_jsonb(Jsonb *element, PopulateRecordsetState *state);
+
 /*
  * SQL function json_object-keys
  *
@@ -218,12 +224,89 @@ typedef struct PopulateRecordsetState
  *
  * This SRF operates in value-per-call mode. It processes the
  * object during the first call, and the keys are simply stashed
- * in an array, whise size is expanded as necessary. This is probably
+ * in an array, whose size is expanded as necessary. This is probably
  * safe enough for a list of keys of a single object, since they are
  * limited in size to NAMEDATALEN and the number of keys is unlikely to
  * be so huge that it has major memory implications.
  */
 
+Datum
+jsonb_object_keys(PG_FUNCTION_ARGS)
+{
+	FuncCallContext *funcctx;
+	OkeysState *state;
+	int			i;
+
+	if (SRF_IS_FIRSTCALL())
+	{
+		MemoryContext oldcontext;
+		Jsonb	   *jb = PG_GETARG_JSONB(0);
+		bool		skipNested = false;
+		JsonbIterator *it;
+		JsonbValue	v;
+		int			r = 0;
+
+		if (JB_ROOT_IS_SCALAR(jb))
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("cannot call jsonb_object_keys on a scalar")));
+		else if (JB_ROOT_IS_ARRAY(jb))
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+					 errmsg("cannot call jsonb_object_keys on an array")));
+
+		funcctx = SRF_FIRSTCALL_INIT();
+		oldcontext = MemoryContextSwitchTo(funcctx->multi_call_memory_ctx);
+
+		state = palloc(sizeof(OkeysState));
+
+		state->result_size = JB_ROOT_COUNT(jb);
+		state->result_count = 0;
+		state->sent_count = 0;
+		state->result = palloc(state->result_size * sizeof(char *));
+
+		it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+		while ((r = JsonbIteratorGet(&it, &v, skipNested)) != 0)
+		{
+			skipNested = true;
+
+			if (r == WJB_KEY)
+			{
+				char	   *cstr;
+
+				cstr = palloc(v.string.len + 1 * sizeof(char));
+				memcpy(cstr, v.string.val, v.string.len);
+				cstr[v.string.len] = '\0';
+				state->result[state->result_count++] = cstr;
+			}
+		}
+
+
+		MemoryContextSwitchTo(oldcontext);
+		funcctx->user_fctx = (void *) state;
+
+	}
+
+	funcctx = SRF_PERCALL_SETUP();
+	state = (OkeysState *) funcctx->user_fctx;
+
+	if (state->sent_count < state->result_count)
+	{
+		char	   *nxt = state->result[state->sent_count++];
+
+		SRF_RETURN_NEXT(funcctx, CStringGetTextDatum(nxt));
+	}
+
+	/* cleanup to reduce or eliminate memory leaks */
+	for (i = 0; i < state->result_count; i++)
+		pfree(state->result[i]);
+	pfree(state->result);
+	pfree(state);
+
+	SRF_RETURN_DONE(funcctx);
+}
+
 
 Datum
 json_object_keys(PG_FUNCTION_ARGS)
@@ -336,9 +419,9 @@ okeys_scalar(void *state, char *token, JsonTokenType tokentype)
 }
 
 /*
- * json getter functions
+ * json and jsonb getter functions
  * these implement the -> ->> #> and #>> operators
- * and the json_extract_path*(json, text, ...) functions
+ * and the json{b?}_extract_path*(json, text, ...) functions
  */
 
 
@@ -359,6 +442,51 @@ json_object_field(PG_FUNCTION_ARGS)
 }
 
 Datum
+jsonb_object_field(PG_FUNCTION_ARGS)
+{
+	Jsonb	   *jb = PG_GETARG_JSONB(0);
+	char	   *key = text_to_cstring(PG_GETARG_TEXT_P(1));
+	int			klen = strlen(key);
+	JsonbIterator *it;
+	JsonbValue	v;
+	int			r = 0;
+	bool		skipNested = false;
+
+	if (JB_ROOT_IS_SCALAR(jb))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("cannot call jsonb_object_field on a scalar")));
+	else if (JB_ROOT_IS_ARRAY(jb))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("cannot call jsonb_object_field on an array")));
+
+	Assert(JB_ROOT_IS_OBJECT(jb));
+
+	it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+	while ((r = JsonbIteratorGet(&it, &v, skipNested)) != 0)
+	{
+		skipNested = true;
+
+		if (r == WJB_KEY)
+		{
+			if (klen == v.string.len && strncmp(key, v.string.val, klen) == 0)
+			{
+				/*
+				 * The next thing the iterator fetches should be the value, no
+				 * matter what shape it is.
+				 */
+				r = JsonbIteratorGet(&it, &v, skipNested);
+				PG_RETURN_JSONB(JsonbValueToJsonb(&v));
+			}
+		}
+	}
+
+	PG_RETURN_NULL();
+}
+
+Datum
 json_object_field_text(PG_FUNCTION_ARGS)
 {
 	text	   *json = PG_GETARG_TEXT_P(0);
@@ -375,6 +503,74 @@ json_object_field_text(PG_FUNCTION_ARGS)
 }
 
 Datum
+jsonb_object_field_text(PG_FUNCTION_ARGS)
+{
+	Jsonb	   *jb = PG_GETARG_JSONB(0);
+	char	   *key = text_to_cstring(PG_GETARG_TEXT_P(1));
+	int			klen = strlen(key);
+	JsonbIterator *it;
+	JsonbValue	v;
+	int			r = 0;
+	bool		skipNested = false;
+
+	if (JB_ROOT_IS_SCALAR(jb))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("cannot call jsonb_object_field_text on a scalar")));
+	else if (JB_ROOT_IS_ARRAY(jb))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("cannot call jsonb_object_field_text on an array")));
+
+	Assert(JB_ROOT_IS_OBJECT(jb));
+
+	it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+	while ((r = JsonbIteratorGet(&it, &v, skipNested)) != 0)
+	{
+		skipNested = true;
+
+		if (r == WJB_KEY)
+		{
+			if (klen == v.string.len && strncmp(key, v.string.val, klen) == 0)
+			{
+				text	   *result;
+
+				/*
+				 * The next thing the iterator fetches should be the value, no
+				 * matter what shape it is.
+				 */
+				r = JsonbIteratorGet(&it, &v, skipNested);
+
+				/*
+				 * if it's a scalar string it needs to be de-escaped,
+				 * otherwise just return the text
+				 */
+				if (v.type == jbvString)
+				{
+					result = cstring_to_text_with_len(v.string.val, v.string.len);
+				}
+				else if (v.type == jbvNull)
+				{
+					PG_RETURN_NULL();
+				}
+				else
+				{
+					StringInfo	jtext = makeStringInfo();
+					Jsonb	   *tjb = JsonbValueToJsonb(&v);
+
+					(void) JsonbToCString(jtext, VARDATA(tjb), -1);
+					result = cstring_to_text_with_len(jtext->data, jtext->len);
+				}
+				PG_RETURN_TEXT_P(result);
+			}
+		}
+	}
+
+	PG_RETURN_NULL();
+}
+
+Datum
 json_array_element(PG_FUNCTION_ARGS)
 {
 	text	   *json = PG_GETARG_TEXT_P(0);
@@ -390,6 +586,44 @@ json_array_element(PG_FUNCTION_ARGS)
 }
 
 Datum
+jsonb_array_element(PG_FUNCTION_ARGS)
+{
+	Jsonb	   *jb = PG_GETARG_JSONB(0);
+	int			element = PG_GETARG_INT32(1);
+	JsonbIterator *it;
+	JsonbValue	v;
+	int			r = 0;
+	bool		skipNested = false;
+	int			element_number = 0;
+
+	if (JB_ROOT_IS_SCALAR(jb))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("cannot call jsonb_array_element on a scalar")));
+	else if (JB_ROOT_IS_OBJECT(jb))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("cannot call jsonb_array_element on an object")));
+
+	Assert(JB_ROOT_IS_ARRAY(jb));
+
+	it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+	while ((r = JsonbIteratorGet(&it, &v, skipNested)) != 0)
+	{
+		skipNested = true;
+
+		if (r == WJB_ELEM)
+		{
+			if (element_number++ == element)
+				PG_RETURN_JSONB(JsonbValueToJsonb(&v));
+		}
+	}
+
+	PG_RETURN_NULL();
+}
+
+Datum
 json_array_element_text(PG_FUNCTION_ARGS)
 {
 	text	   *json = PG_GETARG_TEXT_P(0);
@@ -405,6 +639,69 @@ json_array_element_text(PG_FUNCTION_ARGS)
 }
 
 Datum
+jsonb_array_element_text(PG_FUNCTION_ARGS)
+{
+	Jsonb	   *jb = PG_GETARG_JSONB(0);
+	int			element = PG_GETARG_INT32(1);
+	JsonbIterator *it;
+	JsonbValue	v;
+	int			r = 0;
+	bool		skipNested = false;
+	int			element_number = 0;
+
+
+	if (JB_ROOT_IS_SCALAR(jb))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("cannot call jsonb_array_element_text on a scalar")));
+	else if (JB_ROOT_IS_OBJECT(jb))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+			   errmsg("cannot call jsonb_array_element_text on an object")));
+
+	Assert(JB_ROOT_IS_ARRAY(jb));
+
+	it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+	while ((r = JsonbIteratorGet(&it, &v, skipNested)) != 0)
+	{
+		skipNested = true;
+
+		if (r == WJB_ELEM)
+		{
+			if (element_number++ == element)
+			{
+				/*
+				 * if it's a scalar string it needs to be de-escaped,
+				 * otherwise just return the text
+				 */
+				text	   *result;
+
+				if (v.type == jbvString)
+				{
+					result = cstring_to_text_with_len(v.string.val, v.string.len);
+				}
+				else if (v.type == jbvNull)
+				{
+					PG_RETURN_NULL();
+				}
+				else
+				{
+					StringInfo	jtext = makeStringInfo();
+					Jsonb	   *tjb = JsonbValueToJsonb(&v);
+
+					(void) JsonbToCString(jtext, VARDATA(tjb), -1);
+					result = cstring_to_text_with_len(jtext->data, jtext->len);
+				}
+				PG_RETURN_TEXT_P(result);
+			}
+		}
+	}
+
+	PG_RETURN_NULL();
+}
+
+Datum
 json_extract_path(PG_FUNCTION_ARGS)
 {
 	return get_path_all(fcinfo, false);
@@ -422,7 +719,8 @@ json_extract_path_text(PG_FUNCTION_ARGS)
 static inline Datum
 get_path_all(PG_FUNCTION_ARGS, bool as_text)
 {
-	text	   *json = PG_GETARG_TEXT_P(0);
+	Oid			val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
+	text	   *json;
 	ArrayType  *path = PG_GETARG_ARRAYTYPE_P(1);
 	text	   *result;
 	Datum	   *pathtext;
@@ -434,6 +732,19 @@ get_path_all(PG_FUNCTION_ARGS, bool as_text)
 	long		ind;
 	char	   *endptr;
 
+	Assert(val_type == JSONOID || val_type == JSONBOID);
+	if (val_type == JSONOID)
+	{
+		/* just get the text */
+		json = PG_GETARG_TEXT_P(0);
+	}
+	else
+	{
+		Jsonb	   *jb = PG_GETARG_JSONB(0);
+
+		json = cstring_to_text(JsonbToCString(NULL, (JB_ISEMPTY(jb)) ? NULL : VARDATA(jb), VARSIZE(jb)));
+	}
+
 	if (array_contains_nulls(path))
 		ereport(ERROR,
 				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
@@ -472,9 +783,17 @@ get_path_all(PG_FUNCTION_ARGS, bool as_text)
 	result = get_worker(json, NULL, -1, tpath, ipath, npath, as_text);
 
 	if (result != NULL)
-		PG_RETURN_TEXT_P(result);
+	{
+		if (val_type == JSONOID || as_text)
+			PG_RETURN_TEXT_P(result);
+		else
+			PG_RETURN_JSONB(DirectFunctionCall1(jsonb_in, CStringGetDatum(text_to_cstring(result))));
+	}
 	else
+	{
+		/* null is null regardless */
 		PG_RETURN_NULL();
+	}
 }
 
 /*
@@ -654,7 +973,7 @@ get_object_field_end(void *state, char *fname, bool isnull)
 		/*
 		 * make a text object from the string from the prevously noted json
 		 * start up to the end of the previous token (the lexer is by now
-		 * ahead of us on whatevere came after what we're interested in).
+		 * ahead of us on whatever came after what we're interested in).
 		 */
 		int			len = _state->lex->prev_token_terminator - _state->result_start;
 
@@ -808,18 +1127,134 @@ get_scalar(void *state, char *token, JsonTokenType tokentype)
 
 }
 
+Datum
+jsonb_extract_path(PG_FUNCTION_ARGS)
+{
+	return get_jsonb_path_all(fcinfo, false);
+}
+
+Datum
+jsonb_extract_path_text(PG_FUNCTION_ARGS)
+{
+	return get_jsonb_path_all(fcinfo, true);
+}
+
+static inline Datum
+get_jsonb_path_all(PG_FUNCTION_ARGS, bool as_text)
+{
+	Jsonb	   *jb = PG_GETARG_JSONB(0);
+	ArrayType  *path = PG_GETARG_ARRAYTYPE_P(1);
+	Datum	   *pathtext;
+	bool	   *pathnulls;
+	int			npath;
+	int			i;
+	Jsonb	   *res;
+	bool		have_object = false,
+				have_array = false;
+	JsonbValue *jbvp;
+	JsonbValue	tv;
+
+
+	if (array_contains_nulls(path))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("cannot call function with null path elements")));
+
+	deconstruct_array(path, TEXTOID, -1, false, 'i',
+					  &pathtext, &pathnulls, &npath);
+
+	if (JB_ROOT_IS_OBJECT(jb))
+		have_object = true;
+	else if (JB_ROOT_IS_ARRAY(jb) && !JB_ROOT_IS_SCALAR(jb))
+		have_array = true;
+
+	jbvp = (JsonbValue *) VARDATA(jb);
+
+	for (i = 0; i < npath; i++)
+	{
+		if (have_object)
+		{
+			jbvp = findUncompressedJsonbValue((char *) jbvp, JB_FLAG_OBJECT, NULL,
+											  VARDATA_ANY(pathtext[i]),
+											  VARSIZE_ANY_EXHDR(pathtext[i]));
+		}
+		else if (have_array)
+		{
+			long		lindex;
+			uint32		index;
+			char	   *indextext = TextDatumGetCString(pathtext[i]);
+			char	   *endptr;
+
+			lindex = strtol(indextext, &endptr, 10);
+			if (*endptr != '\0' || lindex > INT_MAX || lindex < 0)
+				PG_RETURN_NULL();
+			index = (uint32) lindex;
+			jbvp = getJsonbValue((char *) jbvp, JB_FLAG_ARRAY, index);
+		}
+		else
+		{
+			if (i == 0)
+				ereport(ERROR,
+						(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+						 errmsg("cannot call extract path from a scalar")));
+			PG_RETURN_NULL();
+		}
+		if (jbvp == NULL)
+			PG_RETURN_NULL();
+		if (i == npath - 1)
+			break;
+		if (jbvp->type == jbvBinary)
+		{
+			JsonbIterator *it = JsonbIteratorInit(jbvp->binary.data);
+			int			r;
+
+			r = JsonbIteratorGet(&it, &tv, true);
+			jbvp = (JsonbValue *) jbvp->binary.data;
+			have_object = r == WJB_BEGIN_OBJECT;
+			have_array = r == WJB_BEGIN_ARRAY;
+		}
+		else
+		{
+			have_object = jbvp->type == jbvHash;
+			have_array = jbvp->type == jbvArray;
+		}
+	}
+
+	if (as_text)
+	{
+		if (jbvp->type == jbvString)
+			PG_RETURN_TEXT_P(cstring_to_text_with_len(jbvp->string.val, jbvp->string.len));
+		else if (jbvp->type == jbvNull)
+			PG_RETURN_NULL();
+	}
+
+	res = JsonbValueToJsonb(jbvp);
+
+	if (as_text)
+	{
+		PG_RETURN_TEXT_P(cstring_to_text(JsonbToCString(NULL, (JB_ISEMPTY(res)) ? NULL : VARDATA(res), VARSIZE(res))));
+	}
+	else
+	{
+		/* not text mode - just hand back the jsonb */
+		PG_RETURN_JSONB(res);
+	}
+}
+
 /*
  * SQL function json_array_length(json) -> int
  */
 Datum
 json_array_length(PG_FUNCTION_ARGS)
 {
-	text	   *json = PG_GETARG_TEXT_P(0);
+	text	   *json;
 
 	AlenState  *state;
-	JsonLexContext *lex = makeJsonLexContext(json, false);
+	JsonLexContext *lex;
 	JsonSemAction *sem;
 
+	json = PG_GETARG_TEXT_P(0);
+	lex = makeJsonLexContext(json, false);
 	state = palloc0(sizeof(AlenState));
 	sem = palloc0(sizeof(JsonSemAction));
 
@@ -839,6 +1274,23 @@ json_array_length(PG_FUNCTION_ARGS)
 	PG_RETURN_INT32(state->count);
 }
 
+Datum
+jsonb_array_length(PG_FUNCTION_ARGS)
+{
+	Jsonb	   *jb = PG_GETARG_JSONB(0);
+
+	if (JB_ROOT_IS_SCALAR(jb))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("cannot get array length of a scalar")));
+	else if (!JB_ROOT_IS_ARRAY(jb))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("cannot get array length of a non-array")));
+
+	PG_RETURN_INT32(JB_ROOT_COUNT(jb));
+}
+
 /*
  * These next two check ensure that the json is an array (since it can't be
  * a scalar or an object).
@@ -895,24 +1347,43 @@ json_each(PG_FUNCTION_ARGS)
 }
 
 Datum
+jsonb_each(PG_FUNCTION_ARGS)
+{
+	return each_worker_jsonb(fcinfo, false);
+}
+
+Datum
 json_each_text(PG_FUNCTION_ARGS)
 {
 	return each_worker(fcinfo, true);
 }
 
+Datum
+jsonb_each_text(PG_FUNCTION_ARGS)
+{
+	return each_worker_jsonb(fcinfo, true);
+}
+
 static inline Datum
-each_worker(PG_FUNCTION_ARGS, bool as_text)
+each_worker_jsonb(PG_FUNCTION_ARGS, bool as_text)
 {
-	text	   *json = PG_GETARG_TEXT_P(0);
-	JsonLexContext *lex = makeJsonLexContext(json, true);
-	JsonSemAction *sem;
+	Jsonb	   *jb = PG_GETARG_JSONB(0);
 	ReturnSetInfo *rsi;
-	MemoryContext old_cxt;
+	Tuplestorestate *tuple_store;
 	TupleDesc	tupdesc;
-	EachState  *state;
-
-	state = palloc0(sizeof(EachState));
-	sem = palloc0(sizeof(JsonSemAction));
+	TupleDesc	ret_tdesc;
+	MemoryContext old_cxt,
+				tmp_cxt;
+	bool		skipNested = false;
+	JsonbIterator *it;
+	JsonbValue	v;
+	int			r = 0;
+
+	if (!JB_ROOT_IS_OBJECT(jb))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("cannot call jsonb_each%s on a non-object",
+						as_text ? "_text" : "")));
 
 	rsi = (ReturnSetInfo *) fcinfo->resultinfo;
 
@@ -929,20 +1400,150 @@ each_worker(PG_FUNCTION_ARGS, bool as_text)
 
 	(void) get_call_result_type(fcinfo, NULL, &tupdesc);
 
-	/* make these in a sufficiently long-lived memory context */
 	old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);
 
-	state->ret_tdesc = CreateTupleDescCopy(tupdesc);
-	BlessTupleDesc(state->ret_tdesc);
-	state->tuple_store =
+	ret_tdesc = CreateTupleDescCopy(tupdesc);
+	BlessTupleDesc(ret_tdesc);
+	tuple_store =
 		tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random,
 							  false, work_mem);
 
 	MemoryContextSwitchTo(old_cxt);
 
-	sem->semstate = (void *) state;
-	sem->array_start = each_array_start;
-	sem->scalar = each_scalar;
+	tmp_cxt = AllocSetContextCreate(CurrentMemoryContext,
+									"jsonb_each temporary cxt",
+									ALLOCSET_DEFAULT_MINSIZE,
+									ALLOCSET_DEFAULT_INITSIZE,
+									ALLOCSET_DEFAULT_MAXSIZE);
+
+
+	it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+	while ((r = JsonbIteratorGet(&it, &v, skipNested)) != 0)
+	{
+		skipNested = true;
+
+		if (r == WJB_KEY)
+		{
+			text	   *key;
+			HeapTuple	tuple;
+			Datum		values[2];
+			bool		nulls[2] = {false, false};
+
+			/* use the tmp context so we can clean up after each tuple is done */
+			old_cxt = MemoryContextSwitchTo(tmp_cxt);
+
+			key = cstring_to_text_with_len(v.string.val, v.string.len);
+
+			/*
+			 * The next thing the iterator fetches should be the value, no
+			 * matter what shape it is.
+			 */
+			r = JsonbIteratorGet(&it, &v, skipNested);
+
+			values[0] = PointerGetDatum(key);
+
+			if (as_text)
+			{
+				if (v.type == jbvNull)
+				{
+					/* a json null is an sql null in text mode */
+					nulls[1] = true;
+					values[1] = (Datum) NULL;
+				}
+				else
+				{
+					text	   *sv;
+
+					if (v.type == jbvString)
+					{
+						/* in text mode scalar strings should be dequoted */
+						sv = cstring_to_text_with_len(v.string.val, v.string.len);
+					}
+					else
+					{
+						/* turn anything else into a json string */
+						StringInfo	jtext = makeStringInfo();
+						Jsonb	   *jb = JsonbValueToJsonb(&v);
+
+						(void) JsonbToCString(jtext, VARDATA(jb), 2 * v.size);
+						sv = cstring_to_text_with_len(jtext->data, jtext->len);
+					}
+
+					values[1] = PointerGetDatum(sv);
+				}
+			}
+			else
+			{
+				/* not in text mode, just return the Jsonb */
+				Jsonb	   *val = JsonbValueToJsonb(&v);
+
+				values[1] = PointerGetDatum(val);
+			}
+
+			tuple = heap_form_tuple(ret_tdesc, values, nulls);
+
+			tuplestore_puttuple(tuple_store, tuple);
+
+			/* clean up and switch back */
+			MemoryContextSwitchTo(old_cxt);
+			MemoryContextReset(tmp_cxt);
+		}
+	}
+
+	rsi->setResult = tuple_store;
+	rsi->setDesc = ret_tdesc;
+
+	PG_RETURN_NULL();
+}
+
+
+static inline Datum
+each_worker(PG_FUNCTION_ARGS, bool as_text)
+{
+	text	   *json;
+	JsonLexContext *lex;
+	JsonSemAction *sem;
+	ReturnSetInfo *rsi;
+	MemoryContext old_cxt;
+	TupleDesc	tupdesc;
+	EachState  *state;
+
+	json = PG_GETARG_TEXT_P(0);
+
+	lex = makeJsonLexContext(json, true);
+	state = palloc0(sizeof(EachState));
+	sem = palloc0(sizeof(JsonSemAction));
+
+	rsi = (ReturnSetInfo *) fcinfo->resultinfo;
+
+	if (!rsi || !IsA(rsi, ReturnSetInfo) ||
+		(rsi->allowedModes & SFRM_Materialize) == 0 ||
+		rsi->expectedDesc == NULL)
+		ereport(ERROR,
+				(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
+				 errmsg("set-valued function called in context that "
+						"cannot accept a set")));
+
+
+	rsi->returnMode = SFRM_Materialize;
+
+	(void) get_call_result_type(fcinfo, NULL, &tupdesc);
+
+	/* make these in a sufficiently long-lived memory context */
+	old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);
+
+	state->ret_tdesc = CreateTupleDescCopy(tupdesc);
+	BlessTupleDesc(state->ret_tdesc);
+	state->tuple_store =
+		tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random,
+							  false, work_mem);
+
+	MemoryContextSwitchTo(old_cxt);
+
+	sem->semstate = (void *) state;
+	sem->array_start = each_array_start;
+	sem->scalar = each_scalar;
 	sem->object_field_start = each_object_field_start;
 	sem->object_field_end = each_object_field_end;
 
@@ -1067,19 +1668,114 @@ each_scalar(void *state, char *token, JsonTokenType tokentype)
  *
  * a lot of this processing is similar to the json_each* functions
  */
+
 Datum
-json_array_elements(PG_FUNCTION_ARGS)
+jsonb_array_elements(PG_FUNCTION_ARGS)
 {
-	text	   *json = PG_GETARG_TEXT_P(0);
+	Jsonb	   *jb = PG_GETARG_JSONB(0);
+	ReturnSetInfo *rsi;
+	Tuplestorestate *tuple_store;
+	TupleDesc	tupdesc;
+	TupleDesc	ret_tdesc;
+	MemoryContext old_cxt,
+				tmp_cxt;
+	bool		skipNested = false;
+	JsonbIterator *it;
+	JsonbValue	v;
+	int			r = 0;
+
+	if (JB_ROOT_IS_SCALAR(jb))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("cannot extract elements from a scalar")));
+	else if (!JB_ROOT_IS_ARRAY(jb))
+		ereport(ERROR,
+				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+				 errmsg("cannot extract elements from an object")));
 
-	/* elements doesn't need any escaped strings, so use false here */
-	JsonLexContext *lex = makeJsonLexContext(json, false);
+	rsi = (ReturnSetInfo *) fcinfo->resultinfo;
+
+	if (!rsi || !IsA(rsi, ReturnSetInfo) ||
+		(rsi->allowedModes & SFRM_Materialize) == 0 ||
+		rsi->expectedDesc == NULL)
+		ereport(ERROR,
+				(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
+				 errmsg("set-valued function called in context that "
+						"cannot accept a set")));
+
+
+	rsi->returnMode = SFRM_Materialize;
+
+	/* it's a simple type, so don't use get_call_result_type() */
+	tupdesc = rsi->expectedDesc;
+
+	old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);
+
+	ret_tdesc = CreateTupleDescCopy(tupdesc);
+	BlessTupleDesc(ret_tdesc);
+	tuple_store =
+		tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random,
+							  false, work_mem);
+
+	MemoryContextSwitchTo(old_cxt);
+
+	tmp_cxt = AllocSetContextCreate(CurrentMemoryContext,
+									"jsonb_each temporary cxt",
+									ALLOCSET_DEFAULT_MINSIZE,
+									ALLOCSET_DEFAULT_INITSIZE,
+									ALLOCSET_DEFAULT_MAXSIZE);
+
+
+	it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+	while ((r = JsonbIteratorGet(&it, &v, skipNested)) != 0)
+	{
+		skipNested = true;
+
+		if (r == WJB_ELEM)
+		{
+			HeapTuple	tuple;
+			Datum		values[1];
+			bool		nulls[1] = {false};
+			Jsonb	   *val;
+
+			/* use the tmp context so we can clean up after each tuple is done */
+			old_cxt = MemoryContextSwitchTo(tmp_cxt);
+
+			val = JsonbValueToJsonb(&v);
+			values[0] = PointerGetDatum(val);
+
+			tuple = heap_form_tuple(ret_tdesc, values, nulls);
+
+			tuplestore_puttuple(tuple_store, tuple);
+
+			/* clean up and switch back */
+			MemoryContextSwitchTo(old_cxt);
+			MemoryContextReset(tmp_cxt);
+		}
+	}
+
+	rsi->setResult = tuple_store;
+	rsi->setDesc = ret_tdesc;
+
+	PG_RETURN_NULL();
+}
+
+Datum
+json_array_elements(PG_FUNCTION_ARGS)
+{
+	text	   *json;
+	JsonLexContext *lex;
 	JsonSemAction *sem;
 	ReturnSetInfo *rsi;
 	MemoryContext old_cxt;
 	TupleDesc	tupdesc;
 	ElementsState *state;
 
+	json = PG_GETARG_TEXT_P(0);
+
+	/* elements doesn't need any escaped strings, so use false here */
+	lex = makeJsonLexContext(json, false);
 	state = palloc0(sizeof(ElementsState));
 	sem = palloc0(sizeof(JsonSemAction));
 
@@ -1211,15 +1907,24 @@ elements_scalar(void *state, char *token, JsonTokenType tokentype)
  * which is in turn partly adapted from record_out.
  *
  * The json is decomposed into a hash table, in which each
- * field in the record is then looked up by name.
+ * field in the record is then looked up by name. For jsonb
+ * we fetch the values direct from the object.
  */
 Datum
+jsonb_populate_record(PG_FUNCTION_ARGS)
+{
+	return json_populate_record(fcinfo);
+}
+
+Datum
 json_populate_record(PG_FUNCTION_ARGS)
 {
 	Oid			argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
+	Oid			jtype = get_fn_expr_argtype(fcinfo->flinfo, 1);
 	text	   *json;
+	Jsonb	   *jb = NULL;
 	bool		use_json_as_text;
-	HTAB	   *json_hash;
+	HTAB	   *json_hash = NULL;
 	HeapTupleHeader rec;
 	Oid			tupType;
 	int32		tupTypmod;
@@ -1231,15 +1936,15 @@ json_populate_record(PG_FUNCTION_ARGS)
 	int			i;
 	Datum	   *values;
 	bool	   *nulls;
-	char		fname[NAMEDATALEN];
-	JsonHashEntry *hashentry;
+
+	Assert(jtype == JSONOID || jtype == JSONBOID);
 
 	use_json_as_text = PG_ARGISNULL(2) ? false : PG_GETARG_BOOL(2);
 
 	if (!type_is_rowtype(argtype))
 		ereport(ERROR,
 				(errcode(ERRCODE_DATATYPE_MISMATCH),
-		errmsg("first argument of json_populate_record must be a row type")));
+				 errmsg("first argument of json%s_populate_record must be a row type", jtype == JSONBOID ? "b" : "")));
 
 	if (PG_ARGISNULL(0))
 	{
@@ -1268,18 +1973,31 @@ json_populate_record(PG_FUNCTION_ARGS)
 		tupTypmod = HeapTupleHeaderGetTypMod(rec);
 	}
 
-	json = PG_GETARG_TEXT_P(1);
+	if (jtype == JSONOID)
+	{
+		/* just get the text */
+		json = PG_GETARG_TEXT_P(1);
 
-	json_hash = get_json_object_as_hash(json, "json_populate_record", use_json_as_text);
+		json_hash = get_json_object_as_hash(json, "json_populate_record", use_json_as_text);
 
-	/*
-	 * if the input json is empty, we can only skip the rest if we were passed
-	 * in a non-null record, since otherwise there may be issues with domain
-	 * nulls.
-	 */
-	if (hash_get_num_entries(json_hash) == 0 && rec)
-		PG_RETURN_POINTER(rec);
+		/*
+		 * if the input json is empty, we can only skip the rest if we were
+		 * passed in a non-null record, since otherwise there may be issues
+		 * with domain nulls.
+		 */
+		if (hash_get_num_entries(json_hash) == 0 && rec)
+			PG_RETURN_POINTER(rec);
+
+	}
+	else
+	{
+		jb = PG_GETARG_JSONB(1);
 
+		/* same logic as for json */
+		if (JB_ISEMPTY(jb) && rec)
+			PG_RETURN_POINTER(rec);
+
+	}
 
 	tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
 	ncolumns = tupdesc->natts;
@@ -1342,7 +2060,9 @@ json_populate_record(PG_FUNCTION_ARGS)
 	{
 		ColumnIOData *column_info = &my_extra->columns[i];
 		Oid			column_type = tupdesc->attrs[i]->atttypid;
-		char	   *value;
+		JsonbValue *v = NULL;
+		char		fname[NAMEDATALEN];
+		JsonHashEntry *hashentry = NULL;
 
 		/* Ignore dropped columns in datatype */
 		if (tupdesc->attrs[i]->attisdropped)
@@ -1351,9 +2071,22 @@ json_populate_record(PG_FUNCTION_ARGS)
 			continue;
 		}
 
-		memset(fname, 0, NAMEDATALEN);
-		strncpy(fname, NameStr(tupdesc->attrs[i]->attname), NAMEDATALEN);
-		hashentry = hash_search(json_hash, fname, HASH_FIND, NULL);
+		if (jtype == JSONOID)
+		{
+
+			memset(fname, 0, NAMEDATALEN);
+			strncpy(fname, NameStr(tupdesc->attrs[i]->attname), NAMEDATALEN);
+			hashentry = hash_search(json_hash, fname, HASH_FIND, NULL);
+		}
+		else
+		{
+			if (!JB_ISEMPTY(jb))
+			{
+				char	   *key = NameStr(tupdesc->attrs[i]->attname);
+
+				v = findUncompressedJsonbValue(VARDATA(jb), JB_FLAG_OBJECT, NULL, key, strlen(key));
+			}
+		}
 
 		/*
 		 * we can't just skip here if the key wasn't found since we might have
@@ -1363,7 +2096,8 @@ json_populate_record(PG_FUNCTION_ARGS)
 		 * then every field which we don't populate needs to be run through
 		 * the input function just in case it's a domain type.
 		 */
-		if (hashentry == NULL && rec)
+		if (((jtype == JSONOID && hashentry == NULL) ||
+			 (jtype == JSONBOID && v == NULL)) && rec)
 			continue;
 
 		/*
@@ -1378,7 +2112,8 @@ json_populate_record(PG_FUNCTION_ARGS)
 						  fcinfo->flinfo->fn_mcxt);
 			column_info->column_type = column_type;
 		}
-		if (hashentry == NULL || hashentry->isnull)
+		if ((jtype == JSONOID && (hashentry == NULL || hashentry->isnull)) ||
+			(jtype == JSONBOID && (v == NULL || v->type == jbvNull)))
 		{
 			/*
 			 * need InputFunctionCall to happen even for nulls, so that domain
@@ -1388,12 +2123,39 @@ json_populate_record(PG_FUNCTION_ARGS)
 										  column_info->typioparam,
 										  tupdesc->attrs[i]->atttypmod);
 			nulls[i] = true;
+
 		}
 		else
 		{
-			value = hashentry->val;
 
-			values[i] = InputFunctionCall(&column_info->proc, value,
+			char	   *s = NULL;
+
+			if (jtype == JSONOID)
+			{
+				/* already done the hard work in the json case */
+				s = hashentry->val;
+			}
+			else
+			{
+				if (v->type == jbvString)
+					s = pnstrdup(v->string.val, v->string.len);
+				else if (v->type == jbvBool)
+					s = pnstrdup((v->boolean) ? "t" : "f", 1);
+				else if (v->type == jbvNumeric)
+					s = DatumGetCString(DirectFunctionCall1(numeric_out,
+											   PointerGetDatum(v->numeric)));
+				else if (!use_json_as_text)
+					ereport(ERROR,
+							(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+							 errmsg("cannot populate with a nested object unless use_json_as_text is true")));
+				else if (v->type == jbvBinary)
+					s = JsonbToCString(NULL, v->binary.data, v->binary.len);
+				else
+					/* not expected to happen */
+					elog(ERROR, "Wrong jsonb");
+			}
+
+			values[i] = InputFunctionCall(&column_info->proc, s,
 										  column_info->typioparam,
 										  tupdesc->attrs[i]->atttypmod);
 			nulls[i] = false;
@@ -1559,10 +2321,140 @@ hash_scalar(void *state, char *token, JsonTokenType tokentype)
  * per object in the array.
  */
 Datum
+jsonb_populate_recordset(PG_FUNCTION_ARGS)
+{
+	return json_populate_recordset(fcinfo);
+}
+
+static inline void
+make_row_from_rec_and_jsonb(Jsonb *element, PopulateRecordsetState *state)
+{
+	Datum	   *values;
+	bool	   *nulls;
+	int			i;
+	RecordIOData *my_extra = state->my_extra;
+	int			ncolumns = my_extra->ncolumns;
+	TupleDesc	tupdesc = state->ret_tdesc;
+	HeapTupleHeader rec = state->rec;
+	HeapTuple	rettuple;
+
+	values = (Datum *) palloc(ncolumns * sizeof(Datum));
+	nulls = (bool *) palloc(ncolumns * sizeof(bool));
+
+	if (state->rec)
+	{
+		HeapTupleData tuple;
+
+		/* Build a temporary HeapTuple control structure */
+		tuple.t_len = HeapTupleHeaderGetDatumLength(state->rec);
+		ItemPointerSetInvalid(&(tuple.t_self));
+		tuple.t_tableOid = InvalidOid;
+		tuple.t_data = state->rec;
+
+		/* Break down the tuple into fields */
+		heap_deform_tuple(&tuple, tupdesc, values, nulls);
+	}
+	else
+	{
+		for (i = 0; i < ncolumns; ++i)
+		{
+			values[i] = (Datum) 0;
+			nulls[i] = true;
+		}
+	}
+
+	for (i = 0; i < ncolumns; ++i)
+	{
+		ColumnIOData *column_info = &my_extra->columns[i];
+		Oid			column_type = tupdesc->attrs[i]->atttypid;
+		JsonbValue *v = NULL;
+
+		/* Ignore dropped columns in datatype */
+		if (tupdesc->attrs[i]->attisdropped)
+		{
+			nulls[i] = true;
+			continue;
+		}
+
+		if (!JB_ISEMPTY(element))
+		{
+			char	   *key = NameStr(tupdesc->attrs[i]->attname);
+
+			v = findUncompressedJsonbValue(VARDATA(element), JB_FLAG_OBJECT, NULL, key, strlen(key));
+		}
+
+		/*
+		 * we can't just skip here if the key wasn't found since we might have
+		 * a domain to deal with. If we were passed in a non-null record
+		 * datum, we assume that the existing values are valid (if they're
+		 * not, then it's not our fault), but if we were passed in a null,
+		 * then every field which we don't populate needs to be run through
+		 * the input function just in case it's a domain type.
+		 */
+		if (v == NULL && rec)
+			continue;
+
+		/*
+		 * Prepare to convert the column value from text
+		 */
+		if (column_info->column_type != column_type)
+		{
+			getTypeInputInfo(column_type,
+							 &column_info->typiofunc,
+							 &column_info->typioparam);
+			fmgr_info_cxt(column_info->typiofunc, &column_info->proc,
+						  state->fn_mcxt);
+			column_info->column_type = column_type;
+		}
+		if (v == NULL || v->type == jbvNull)
+		{
+			/*
+			 * need InputFunctionCall to happen even for nulls, so that domain
+			 * checks are done
+			 */
+			values[i] = InputFunctionCall(&column_info->proc, NULL,
+										  column_info->typioparam,
+										  tupdesc->attrs[i]->atttypmod);
+			nulls[i] = true;
+		}
+		else
+		{
+			char	   *s = NULL;
+
+			if (v->type == jbvString)
+				s = pnstrdup(v->string.val, v->string.len);
+			else if (v->type == jbvBool)
+				s = pnstrdup((v->boolean) ? "t" : "f", 1);
+			else if (v->type == jbvNumeric)
+				s = DatumGetCString(DirectFunctionCall1(numeric_out,
+											   PointerGetDatum(v->numeric)));
+			else if (!state->use_json_as_text)
+				ereport(ERROR,
+						(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+						 errmsg("cannot populate with a nested object unless use_json_as_text is true")));
+			else if (v->type == jbvBinary)
+				s = JsonbToCString(NULL, v->binary.data, v->binary.len);
+			else
+				/* not expected to happen */
+				elog(ERROR, "Wrong jsonb");
+
+			values[i] = InputFunctionCall(&column_info->proc, s,
+										  column_info->typioparam,
+										  tupdesc->attrs[i]->atttypmod);
+			nulls[i] = false;
+		}
+	}
+
+	rettuple = heap_form_tuple(tupdesc, values, nulls);
+
+	tuplestore_puttuple(state->tuple_store, rettuple);
+}
+
+Datum
 json_populate_recordset(PG_FUNCTION_ARGS)
 {
 	Oid			argtype = get_fn_expr_argtype(fcinfo->flinfo, 0);
-	text	   *json;
+	Oid			jtype = get_fn_expr_argtype(fcinfo->flinfo, 1);
 	bool		use_json_as_text;
 	ReturnSetInfo *rsi;
 	MemoryContext old_cxt;
@@ -1572,8 +2464,6 @@ json_populate_recordset(PG_FUNCTION_ARGS)
 	TupleDesc	tupdesc;
 	RecordIOData *my_extra;
 	int			ncolumns;
-	JsonLexContext *lex;
-	JsonSemAction *sem;
 	PopulateRecordsetState *state;
 
 	use_json_as_text = PG_ARGISNULL(2) ? false : PG_GETARG_BOOL(2);
@@ -1602,27 +2492,10 @@ json_populate_recordset(PG_FUNCTION_ARGS)
 	 */
 	(void) get_call_result_type(fcinfo, NULL, &tupdesc);
 
-	state = palloc0(sizeof(PopulateRecordsetState));
-	sem = palloc0(sizeof(JsonSemAction));
-
-
-	/* make these in a sufficiently long-lived memory context */
-	old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);
-
-	state->ret_tdesc = CreateTupleDescCopy(tupdesc);
-	BlessTupleDesc(state->ret_tdesc);
-	state->tuple_store =
-		tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize_Random,
-							  false, work_mem);
-
-	MemoryContextSwitchTo(old_cxt);
-
 	/* if the json is null send back an empty set */
 	if (PG_ARGISNULL(1))
 		PG_RETURN_NULL();
 
-	json = PG_GETARG_TEXT_P(1);
-
 	if (PG_ARGISNULL(0))
 		rec = NULL;
 	else
@@ -1632,8 +2505,6 @@ json_populate_recordset(PG_FUNCTION_ARGS)
 	tupTypmod = tupdesc->tdtypmod;
 	ncolumns = tupdesc->natts;
 
-	lex = makeJsonLexContext(json, true);
-
 	/*
 	 * We arrange to look up the needed I/O info just once per series of
 	 * calls, assuming the record type doesn't change underneath us.
@@ -1662,23 +2533,81 @@ json_populate_recordset(PG_FUNCTION_ARGS)
 		my_extra->ncolumns = ncolumns;
 	}
 
-	sem->semstate = (void *) state;
-	sem->array_start = populate_recordset_array_start;
-	sem->array_element_start = populate_recordset_array_element_start;
-	sem->scalar = populate_recordset_scalar;
-	sem->object_field_start = populate_recordset_object_field_start;
-	sem->object_field_end = populate_recordset_object_field_end;
-	sem->object_start = populate_recordset_object_start;
-	sem->object_end = populate_recordset_object_end;
+	state = palloc0(sizeof(PopulateRecordsetState));
 
-	state->lex = lex;
+	/* make these in a sufficiently long-lived memory context */
+	old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);
+	state->ret_tdesc = CreateTupleDescCopy(tupdesc);;
+	BlessTupleDesc(state->ret_tdesc);
+	state->tuple_store = tuplestore_begin_heap(rsi->allowedModes &
+											   SFRM_Materialize_Random,
+											   false, work_mem);
+	MemoryContextSwitchTo(old_cxt);
 
 	state->my_extra = my_extra;
 	state->rec = rec;
 	state->use_json_as_text = use_json_as_text;
 	state->fn_mcxt = fcinfo->flinfo->fn_mcxt;
 
-	pg_parse_json(lex, sem);
+
+	if (jtype == JSONOID)
+	{
+		text	   *json = PG_GETARG_TEXT_P(1);
+		JsonLexContext *lex;
+		JsonSemAction *sem;
+
+		sem = palloc0(sizeof(JsonSemAction));
+
+		lex = makeJsonLexContext(json, true);
+
+		sem->semstate = (void *) state;
+		sem->array_start = populate_recordset_array_start;
+		sem->array_element_start = populate_recordset_array_element_start;
+		sem->scalar = populate_recordset_scalar;
+		sem->object_field_start = populate_recordset_object_field_start;
+		sem->object_field_end = populate_recordset_object_field_end;
+		sem->object_start = populate_recordset_object_start;
+		sem->object_end = populate_recordset_object_end;
+
+		state->lex = lex;
+
+		pg_parse_json(lex, sem);
+
+	}
+	else
+	{
+		Jsonb	   *jb;
+		JsonbIterator *it;
+		JsonbValue	v;
+		bool		skipNested = false;
+		int			r;
+
+		Assert(jtype == JSONBOID);
+		jb = PG_GETARG_JSONB(1);
+
+		if (JB_ROOT_IS_SCALAR(jb) || !JB_ROOT_IS_ARRAY(jb))
+			ereport(ERROR,
+					(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+			   errmsg("cannot call jsonb_populate_recordset on non-array")));
+
+		it = JsonbIteratorInit(VARDATA_ANY(jb));
+
+		while ((r = JsonbIteratorGet(&it, &v, skipNested)) != 0)
+		{
+			skipNested = true;
+
+			if (r == WJB_ELEM)
+			{
+				Jsonb	   *element = JsonbValueToJsonb(&v);
+
+				if (!JB_ROOT_IS_OBJECT(element))
+					ereport(ERROR,
+							(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
+							 errmsg("jsonb_populate_recordset argument must be an array of objects")));
+				make_row_from_rec_and_jsonb(element, state);
+			}
+		}
+	}
 
 	rsi->setResult = state->tuple_store;
 	rsi->setDesc = state->ret_tdesc;
diff --git a/src/include/catalog/pg_operator.h b/src/include/catalog/pg_operator.h
index 6aa4890..143a451 100644
--- a/src/include/catalog/pg_operator.h
+++ b/src/include/catalog/pg_operator.h
@@ -1753,6 +1753,18 @@ DATA(insert OID = 3966 (  "#>"	   PGNSP PGUID b f f 114 1009 114 0 0 json_extrac
 DESCR("get value from json with path elements");
 DATA(insert OID = 3967 (  "#>>"    PGNSP PGUID b f f 114 1009 25 0 0 json_extract_path_text_op - - ));
 DESCR("get value from json as text with path elements");
+DATA(insert OID = 3211 (  "->"	   PGNSP PGUID b f f 3802 25 3802 0 0 jsonb_object_field - - ));
+DESCR("get jsonb object field");
+DATA(insert OID = 3204 (  "->>"    PGNSP PGUID b f f 3802 25 25 0 0 jsonb_object_field_text - - ));
+DESCR("get jsonb object field as text");
+DATA(insert OID = 3212 (  "->"	   PGNSP PGUID b f f 3802 23 3802 0 0 jsonb_array_element - - ));
+DESCR("get jsonb array element");
+DATA(insert OID = 3205 (  "->>"    PGNSP PGUID b f f 3802 23 25 0 0 jsonb_array_element_text - - ));
+DESCR("get jsonb array element as text");
+DATA(insert OID = 3213 (  "#>"	   PGNSP PGUID b f f 3802 1009 3802 0 0 jsonb_extract_path_op - - ));
+DESCR("get value from jsonb with path elements");
+DATA(insert OID = 3206 (  "#>>"    PGNSP PGUID b f f 3802 1009 25 0 0 jsonb_extract_path_text_op - - ));
+DESCR("get value from jsonb as text with path elements");
 
 
 
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index ad9774c..1e12d8e 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4459,6 +4459,44 @@ DESCR("I/O");
 DATA(insert OID = 3774 (  regdictionarysend PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 17 "3769" _null_ _null_ _null_ _null_ regdictionarysend _null_ _null_ _null_ ));
 DESCR("I/O");
 
+/* jsonb */
+DATA(insert OID =  3806 (  jsonb_in			PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 3802 "2275" _null_ _null_ _null_ _null_ jsonb_in _null_ _null_ _null_ ));
+DESCR("I/O");
+DATA(insert OID =  3805 (  jsonb_recv		PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 3802 "2281" _null_ _null_ _null_ _null_ jsonb_recv _null_ _null_ _null_ ));
+DESCR("I/O");
+DATA(insert OID =  3804 (  jsonb_out		PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 2275 "3802" _null_ _null_ _null_ _null_ jsonb_out _null_ _null_ _null_ ));
+DESCR("I/O");
+DATA(insert OID =  3803 (  jsonb_send		PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 17 "3802" _null_ _null_ _null_ _null_	jsonb_send _null_ _null_ _null_ ));
+DESCR("I/O");
+
+DATA(insert OID = 3969 (  jsonb_object_field			PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field _null_ _null_ _null_ ));
+DATA(insert OID = 3214 (  jsonb_object_field_text	PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 25  "3802 25" _null_ _null_ "{from_json, field_name}" _null_ jsonb_object_field_text _null_ _null_ _null_ ));
+DATA(insert OID = 3180 (  jsonb_array_element		PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 3802 "3802 23" _null_ _null_ "{from_json, element_index}" _null_ jsonb_array_element _null_ _null_ _null_ ));
+DATA(insert OID = 3195 (  jsonb_array_element_text	PGNSP PGUID 12 1 0 0 0 f f f f t f i 2 0 25  "3802 23" _null_ _null_ "{from_json, element_index}" _null_ jsonb_array_element_text _null_ _null_ _null_ ));
+DATA(insert OID = 3196 (  jsonb_extract_path			PGNSP PGUID 12 1 0 25 0 f f f f t f i 2 0 3802 "3802 1009" "{3802,1009}" "{i,v}" "{from_json,path_elems}" _null_ jsonb_extract_path _null_ _null_ _null_ ));
+DESCR("get value from jsonb with path elements");
+DATA(insert OID = 3199 (  jsonb_extract_path_op		PGNSP PGUID 12 1 0 0 0	f f f f t f i 2 0 3802 "3802 1009" _null_ _null_ "{from_json,path_elems}" _null_ jsonb_extract_path _null_ _null_ _null_ ));
+DATA(insert OID = 3200 (  jsonb_extract_path_text	PGNSP PGUID 12 1 0 25 0 f f f f t f i 2 0 25 "3802 1009" "{3802,1009}" "{i,v}" "{from_json,path_elems}" _null_ jsonb_extract_path_text _null_ _null_ _null_ ));
+DESCR("get value from jsonb as text with path elements");
+DATA(insert OID = 3197 (  jsonb_extract_path_text_op PGNSP PGUID 12 1 0 0 0	f f f f t f i 2 0 25 "3802 1009" _null_ _null_ "{from_json,path_elems}" _null_ jsonb_extract_path_text _null_ _null_ _null_ ));
+DATA(insert OID = 3198 (  jsonb_array_elements		PGNSP PGUID 12 1 100 0 0 f f f f t t i 1 0 3802 "3802" "{3802,3802}" "{i,o}" "{from_json,value}" _null_ jsonb_array_elements _null_ _null_ _null_ ));
+DESCR("key value pairs of a jsonb object");
+DATA(insert OID = 3207 (  jsonb_array_length			PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 23 "3802" _null_ _null_ _null_ _null_ jsonb_array_length _null_ _null_ _null_ ));
+DESCR("length of jsonb array");
+DATA(insert OID = 3201 (  jsonb_object_keys			PGNSP PGUID 12 1 100 0 0 f f f f t t i 1 0 25 "3802" _null_ _null_ _null_ _null_ jsonb_object_keys _null_ _null_ _null_ ));
+DESCR("get jsonb object keys");
+DATA(insert OID = 3208 (  jsonb_each				   PGNSP PGUID 12 1 100 0 0 f f f f t t i 1 0 2249 "3802" "{3802,25,3802}" "{i,o,o}" "{from_json,key,value}" _null_ jsonb_each _null_ _null_ _null_ ));
+DESCR("key value pairs of a jsonb object");
+DATA(insert OID = 3202 (  jsonb_each_text		   PGNSP PGUID 12 1 100 0 0 f f f f t t i 1 0 2249 "3802" "{3802,25,25}" "{i,o,o}" "{from_json,key,value}" _null_ jsonb_each_text _null_ _null_ _null_ ));
+DESCR("key value pairs of a jsonb object");
+DATA(insert OID = 3209 (  jsonb_populate_record	   PGNSP PGUID 12 1 0 0 0 f f f f f f s 3 0 2283 "2283 3802 16" _null_ _null_ _null_ _null_ jsonb_populate_record _null_ _null_ _null_ ));
+DESCR("get record fields from a jsonb object");
+DATA(insert OID = 3203 (  jsonb_populate_recordset  PGNSP PGUID 12 1 100 0 0 f f f f f t s 3 0 2283 "2283 3802 16" _null_ _null_ _null_ _null_ jsonb_populate_recordset _null_ _null_ _null_ ));
+DESCR("get set of records with fields from a jsonb array of objects");
+DATA(insert OID = 3210 (  jsonb_typeof              PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 25 "3802" _null_ _null_ _null_ _null_ jsonb_typeof _null_ _null_ _null_ ));
+DESCR("get the type of a jsonb value");
+
+
 /* txid */
 DATA(insert OID = 2939 (  txid_snapshot_in			PGNSP PGUID 12 1  0 0 0 f f f f t f i 1 0 2970 "2275" _null_ _null_ _null_ _null_ txid_snapshot_in _null_ _null_ _null_ ));
 DESCR("I/O");
diff --git a/src/include/catalog/pg_type.h b/src/include/catalog/pg_type.h
index 3fc20c6..7fb8999 100644
--- a/src/include/catalog/pg_type.h
+++ b/src/include/catalog/pg_type.h
@@ -600,6 +600,12 @@ DATA(insert OID = 3645 ( _tsquery		PGNSP PGUID -1 f b A f t \054 0 3615 0 array_
 DATA(insert OID = 3735 ( _regconfig		PGNSP PGUID -1 f b A f t \054 0 3734 0 array_in array_out array_recv array_send - - array_typanalyze i x f 0 -1 0 0 _null_ _null_ _null_ ));
 DATA(insert OID = 3770 ( _regdictionary PGNSP PGUID -1 f b A f t \054 0 3769 0 array_in array_out array_recv array_send - - array_typanalyze i x f 0 -1 0 0 _null_ _null_ _null_ ));
 
+/* jsonb */
+DATA(insert OID = 3802 ( jsonb			PGNSP PGUID -1 f b U f t \054 0 0 3807 jsonb_in jsonb_out jsonb_recv jsonb_send - - - i x f 0 -1 0 0 _null_ _null_ _null_ ));
+DESCR("Binary JSON");
+#define JSONBOID 3802
+DATA(insert OID = 3807 ( _jsonb			PGNSP PGUID -1 f b A f t \054 0 3802 0 array_in array_out array_recv array_send - - array_typanalyze i x f 0 -1 0 0 _null_ _null_ _null_ ));
+
 DATA(insert OID = 2970 ( txid_snapshot	PGNSP PGUID -1 f b U f t \054 0 0 2949 txid_snapshot_in txid_snapshot_out txid_snapshot_recv txid_snapshot_send - - - d x f 0 -1 0 0 _null_ _null_ _null_ ));
 DESCR("txid snapshot");
 DATA(insert OID = 2949 ( _txid_snapshot PGNSP PGUID -1 f b A f t \054 0 2970 0 array_in array_out array_recv array_send - - array_typanalyze d x f 0 -1 0 0 _null_ _null_ _null_ ));
diff --git a/src/include/funcapi.h b/src/include/funcapi.h
index 9982e59..3610fc8 100644
--- a/src/include/funcapi.h
+++ b/src/include/funcapi.h
@@ -293,6 +293,15 @@ extern void end_MultiFuncCall(PG_FUNCTION_ARGS, FuncCallContext *funcctx);
 		PG_RETURN_DATUM(_result); \
 	} while (0)
 
+#define SRF_RETURN_NEXT_NULL(_funcctx) \
+	do { \
+		ReturnSetInfo	   *rsi; \
+		(_funcctx)->call_cntr++; \
+		rsi = (ReturnSetInfo *) fcinfo->resultinfo; \
+		rsi->isDone = ExprMultipleResult; \
+		PG_RETURN_NULL(); \
+	} while (0)
+
 #define  SRF_RETURN_DONE(_funcctx) \
 	do { \
 		ReturnSetInfo	   *rsi; \
diff --git a/src/include/utils/json.h b/src/include/utils/json.h
index 25bfafb..c2fc7ee 100644
--- a/src/include/utils/json.h
+++ b/src/include/utils/json.h
@@ -50,4 +50,18 @@ extern Datum json_array_elements(PG_FUNCTION_ARGS);
 extern Datum json_populate_record(PG_FUNCTION_ARGS);
 extern Datum json_populate_recordset(PG_FUNCTION_ARGS);
 
+extern Datum jsonb_object_field(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_field_text(PG_FUNCTION_ARGS);
+extern Datum jsonb_array_element(PG_FUNCTION_ARGS);
+extern Datum jsonb_array_element_text(PG_FUNCTION_ARGS);
+extern Datum jsonb_extract_path(PG_FUNCTION_ARGS);
+extern Datum jsonb_extract_path_text(PG_FUNCTION_ARGS);
+extern Datum jsonb_object_keys(PG_FUNCTION_ARGS);
+extern Datum jsonb_array_length(PG_FUNCTION_ARGS);
+extern Datum jsonb_each(PG_FUNCTION_ARGS);
+extern Datum jsonb_each_text(PG_FUNCTION_ARGS);
+extern Datum jsonb_array_elements(PG_FUNCTION_ARGS);
+extern Datum jsonb_populate_record(PG_FUNCTION_ARGS);
+extern Datum jsonb_populate_recordset(PG_FUNCTION_ARGS);
+
 #endif   /* JSON_H */
diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h
new file mode 100644
index 0000000..50509e2
--- /dev/null
+++ b/src/include/utils/jsonb.h
@@ -0,0 +1,241 @@
+/*-------------------------------------------------------------------------
+ *
+ * jsonb.h
+ *	  Declarations for JSONB data type support.
+ *
+ * Portions Copyright (c) 1996-2013, PostgreSQL Global Development Group
+ *
+ * NOTE. JSONB type is designed to be binary compatible with hstore.
+ *
+ * src/include/utils/jsonb.h
+ *
+ *-------------------------------------------------------------------------
+ */
+#ifndef __JSONB_H__
+#define __JSONB_H__
+
+#include "fmgr.h"
+#include "lib/stringinfo.h"
+#include "utils/array.h"
+#include "utils/numeric.h"
+
+/*
+ * JEntry: there is one of these for each key _and_ value in an jsonb
+ *
+ * the position offset points to the _end_ so that we can get the length
+ * by subtraction from the previous entry.	the ISFIRST flag lets us tell
+ * whether there is a previous entry.
+ */
+typedef struct
+{
+	uint32		entry;
+}	JEntry;
+
+#define JENTRY_ISFIRST		0x80000000
+#define JENTRY_ISSTRING		(0x00000000)		/* keep binary compatibility */
+#define JENTRY_ISNUMERIC	(0x10000000)
+#define JENTRY_ISNEST		(0x20000000)
+#define JENTRY_ISNULL		(0x40000000)		/* keep binary compatibility */
+#define JENTRY_ISBOOL		(0x10000000 | 0x20000000)
+#define JENTRY_ISFALSE		JENTRY_ISBOOL
+#define JENTRY_ISTRUE		(0x10000000 | 0x20000000 | 0x40000000)
+
+/* JENTRY_ISOBJECT, JENTRY_ISARRAY and JENTRY_ISCALAR is only used in send/recv */
+#define JENTRY_ISOBJECT		(0x20000000)
+#define JENTRY_ISARRAY		(0x20000000 | 0x40000000)
+#define JENTRY_ISCALAR		(0x10000000 | 0x40000000)
+
+#define JENTRY_POSMASK	0x0FFFFFFF
+#define JENTRY_TYPEMASK (~(JENTRY_POSMASK | JENTRY_ISFIRST))
+
+/* note possible multiple evaluations, also access to prior array element */
+#define JBE_ISFIRST(he_)		(((he_).entry & JENTRY_ISFIRST) != 0)
+#define JBE_ISSTRING(he_)		(((he_).entry & JENTRY_TYPEMASK) == JENTRY_ISSTRING)
+#define JBE_ISNUMERIC(he_)		(((he_).entry & JENTRY_TYPEMASK) == JENTRY_ISNUMERIC)
+#define JBE_ISNEST(he_)			(((he_).entry & JENTRY_TYPEMASK) == JENTRY_ISNEST)
+#define JBE_ISNULL(he_)			(((he_).entry & JENTRY_TYPEMASK) == JENTRY_ISNULL)
+#define JBE_ISBOOL(he_)			(((he_).entry & JENTRY_TYPEMASK & JENTRY_ISBOOL) == JENTRY_ISBOOL)
+#define JBE_ISBOOL_TRUE(he_)	(((he_).entry & JENTRY_TYPEMASK) == JENTRY_ISTRUE)
+#define JBE_ISBOOL_FALSE(he_)	(JBE_ISBOOL(he_) && !JBE_ISBOOL_TRUE(he_))
+
+#define JBE_ENDPOS(he_) ((he_).entry & JENTRY_POSMASK)
+#define JBE_OFF(he_) (JBE_ISFIRST(he_) ? 0 : JBE_ENDPOS((&(he_))[-1]))
+#define JBE_LEN(he_) (JBE_ISFIRST(he_)	\
+					  ? JBE_ENDPOS(he_) \
+					  : JBE_ENDPOS(he_) - JBE_ENDPOS((&(he_))[-1]))
+
+/*
+ * determined by the size of "endpos" (ie JENTRY_POSMASK)
+ */
+#define JSONB_MAX_STRING_LEN		JENTRY_POSMASK
+
+typedef struct
+{
+	int32		vl_len_;		/* varlena header (do not touch directly!) */
+	/* header of hash or array jsonb type */
+	/* array of JEntry follows */
+} Jsonb;
+
+/*
+ * it's not possible to get more than 2^28 items into an jsonb.
+ */
+#define JB_FLAG_UNUSED			0x80000000
+#define JB_FLAG_ARRAY			0x40000000
+#define JB_FLAG_OBJECT			0x20000000
+#define JB_FLAG_SCALAR			0x10000000
+
+#define JB_COUNT_MASK			0x0FFFFFFF
+
+#define JB_ISEMPTY(jbp_)		(VARSIZE(jbp_) <= VARHDRSZ)
+#define JB_ROOT_COUNT(jbp_)		(JB_ISEMPTY(jbp_) ? 0 : ( *(uint32*)VARDATA(jbp_) & JB_COUNT_MASK))
+#define JB_ROOT_IS_OBJECT(jbp_) (JB_ISEMPTY(jbp_) ? 0 : ( *(uint32*)VARDATA(jbp_) & JB_FLAG_OBJECT))
+#define JB_ROOT_IS_ARRAY(jbp_)	(JB_ISEMPTY(jbp_) ? 0 : ( *(uint32*)VARDATA(jbp_) & JB_FLAG_ARRAY))
+#define JB_ROOT_IS_SCALAR(jbp_) (JB_ISEMPTY(jbp_) ? 0 : ( *(uint32*)VARDATA(jbp_) & JB_FLAG_SCALAR))
+
+#define DatumGetJsonb(d)	((Jsonb*) PG_DETOAST_DATUM(d))
+#define JsonbGetDatum(p)	PointerGetDatum(p)
+
+#define PG_GETARG_JSONB(x) DatumGetJsonb(PG_GETARG_DATUM(x))
+#define PG_RETURN_JSONB(x) PG_RETURN_POINTER(x)
+
+typedef struct JsonbPair JsonbPair;
+typedef struct JsonbValue JsonbValue;
+
+struct JsonbValue
+{
+	enum
+	{
+		jbvNull,
+		jbvString,
+		jbvNumeric,
+		jbvBool,
+		jbvArray,
+		jbvHash,
+		jbvBinary				/* binary form of jbvArray/jbvHash */
+	}			type;
+
+	uint32		size;			/* estimation size of node (including
+								 * subnodes) */
+
+	union
+	{
+		Numeric numeric;
+		bool		boolean;
+		struct
+		{
+			uint32		len;
+			char	   *val;	/* could be not null-terminated */
+		}			string;
+
+		struct
+		{
+			int			nelems;
+			JsonbValue *elems;
+			bool		scalar; /* scalar actually shares representation with
+								 * array */
+		}			array;
+
+		struct
+		{
+			int			npairs;
+			JsonbPair  *pairs;
+		}			hash;
+
+		struct
+		{
+			uint32		len;
+			char	   *data;
+		}			binary;
+	};
+
+};
+
+struct JsonbPair
+{
+	JsonbValue	key;
+	JsonbValue	value;
+	uint32		order;			/* to keep order of pairs with equal key */
+};
+
+/*
+ * jsonb support functios
+ */
+
+#define WJB_KEY				(0x001)
+#define WJB_VALUE			(0x002)
+#define WJB_ELEM			(0x004)
+#define WJB_BEGIN_ARRAY		(0x008)
+#define WJB_END_ARRAY		(0x010)
+#define WJB_BEGIN_OBJECT	(0x020)
+#define WJB_END_OBJECT		(0x040)
+
+typedef void (*walk_jsonb_cb) (void * /* arg */ , JsonbValue * /* value */ ,
+								   uint32 /* flags */ , uint32 /* level */ );
+extern void walkUncompressedJsonb(JsonbValue *v, walk_jsonb_cb cb, void *cb_arg);
+
+extern int	compareJsonbStringValue(const void *a, const void *b, void *arg);
+extern int	compareJsonbPair(const void *a, const void *b, void *arg);
+
+extern int	compareJsonbBinaryValue(char *a, char *b);
+extern int	compareJsonbValue(JsonbValue *a, JsonbValue *b);
+
+extern JsonbValue *findUncompressedJsonbValueByValue(char *buffer, uint32 flags,
+								  uint32 *lowbound, JsonbValue *key);
+extern JsonbValue *findUncompressedJsonbValue(char *buffer, uint32 flags,
+						   uint32 *lowbound, char *key, uint32 keylen);
+
+extern JsonbValue *getJsonbValue(char *buffer, uint32 flags, int32 i);
+
+typedef struct ToJsonbState
+{
+	JsonbValue	v;
+	uint32		size;
+	struct ToJsonbState *next;
+}	ToJsonbState;
+
+extern JsonbValue *pushJsonbValue(ToJsonbState ** state, int r /* WJB_* */ , JsonbValue *v);
+
+extern void uniqueJsonbValue(JsonbValue *v);
+
+extern uint32 compressJsonb(JsonbValue *v, char *buffer);
+
+typedef struct JsonbIterator
+{
+	uint32		type;
+	uint32		nelems;
+	JEntry	   *array;
+	bool		isScalar;
+	char	   *data;
+	char	   *buffer;			/* unparsed buffer */
+
+	int			i;
+
+	/*
+	 * enum members should be freely OR'ed with JB_FLAG_ARRAY/JB_FLAG_JSONB
+	 * with possiblity of decoding. See optimization in JsonbIteratorGet()
+	 */
+	enum
+	{
+		jbi_start = 0x00,
+		jbi_key = 0x01,
+		jbi_value = 0x02,
+		jbi_elem = 0x04
+	} state;
+
+	struct JsonbIterator *next;
+} JsonbIterator;
+
+extern JsonbIterator *JsonbIteratorInit(char *buffer);
+extern int /* WJB_* */ JsonbIteratorGet(JsonbIterator **it, JsonbValue *v, bool skipNested);
+
+extern Datum jsonb_in(PG_FUNCTION_ARGS);
+extern Datum jsonb_out(PG_FUNCTION_ARGS);
+extern Datum jsonb_recv(PG_FUNCTION_ARGS);
+extern Datum jsonb_send(PG_FUNCTION_ARGS);
+
+extern Datum jsonb_typeof(PG_FUNCTION_ARGS);
+
+extern char *JsonbToCString(StringInfo out, char *in, int estimated_len);
+extern Jsonb *JsonbValueToJsonb(JsonbValue *v);
+
+#endif   /* __JSONB_H__ */
diff --git a/src/test/regress/expected/jsonb.out b/src/test/regress/expected/jsonb.out
new file mode 100644
index 0000000..cb6b4a3
--- /dev/null
+++ b/src/test/regress/expected/jsonb.out
@@ -0,0 +1,845 @@
+-- Strings.
+SELECT '""'::jsonb;				-- OK.
+ jsonb 
+-------
+ ""
+(1 row)
+
+SELECT $$''$$::jsonb;			-- ERROR, single quotes are not allowed
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT $$''$$::jsonb;
+               ^
+DETAIL:  Token "'" is invalid.
+CONTEXT:  JSON data, line 1: '...
+SELECT '"abc"'::jsonb;			-- OK
+ jsonb 
+-------
+ "abc"
+(1 row)
+
+SELECT '"abc'::jsonb;			-- ERROR, quotes not closed
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '"abc'::jsonb;
+               ^
+DETAIL:  Token ""abc" is invalid.
+CONTEXT:  JSON data, line 1: "abc
+SELECT '"abc
+def"'::jsonb;					-- ERROR, unescaped newline in string constant
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '"abc
+               ^
+DETAIL:  Character with value 0x0a must be escaped.
+CONTEXT:  JSON data, line 1: "abc
+SELECT '"\n\"\\"'::jsonb;		-- OK, legal escapes
+  jsonb   
+----------
+ "\n\"\\"
+(1 row)
+
+SELECT '"\v"'::jsonb;			-- ERROR, not a valid JSON escape
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '"\v"'::jsonb;
+               ^
+DETAIL:  Escape sequence "\v" is invalid.
+CONTEXT:  JSON data, line 1: "\v...
+SELECT '"\u"'::jsonb;			-- ERROR, incomplete escape
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '"\u"'::jsonb;
+               ^
+DETAIL:  "\u" must be followed by four hexadecimal digits.
+CONTEXT:  JSON data, line 1: "\u"
+SELECT '"\u00"'::jsonb;			-- ERROR, incomplete escape
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '"\u00"'::jsonb;
+               ^
+DETAIL:  "\u" must be followed by four hexadecimal digits.
+CONTEXT:  JSON data, line 1: "\u00"
+SELECT '"\u000g"'::jsonb;		-- ERROR, g is not a hex digit
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '"\u000g"'::jsonb;
+               ^
+DETAIL:  "\u" must be followed by four hexadecimal digits.
+CONTEXT:  JSON data, line 1: "\u000g...
+SELECT '"\u0000"'::jsonb;		-- OK, legal escape
+   jsonb   
+-----------
+ "\\u0000"
+(1 row)
+
+-- use octet_length here so we don't get an odd unicode char in the
+-- output
+SELECT octet_length('"\uaBcD"'::jsonb::text); -- OK, uppercase and lower case both OK
+ octet_length 
+--------------
+            5
+(1 row)
+
+-- Numbers.
+SELECT '1'::jsonb;				-- OK
+ jsonb 
+-------
+ 1
+(1 row)
+
+SELECT '0'::jsonb;				-- OK
+ jsonb 
+-------
+ 0
+(1 row)
+
+SELECT '01'::jsonb;				-- ERROR, not valid according to JSON spec
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '01'::jsonb;
+               ^
+DETAIL:  Token "01" is invalid.
+CONTEXT:  JSON data, line 1: 01
+SELECT '0.1'::jsonb;				-- OK
+ jsonb 
+-------
+ 0.1
+(1 row)
+
+SELECT '9223372036854775808'::jsonb;	-- OK, even though it's too large for int8
+        jsonb        
+---------------------
+ 9223372036854775808
+(1 row)
+
+SELECT '1e100'::jsonb;			-- OK
+                                                 jsonb                                                 
+-------------------------------------------------------------------------------------------------------
+ 10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
+(1 row)
+
+SELECT '1.3e100'::jsonb;			-- OK
+                                                 jsonb                                                 
+-------------------------------------------------------------------------------------------------------
+ 13000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
+(1 row)
+
+SELECT '1f2'::jsonb;				-- ERROR
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '1f2'::jsonb;
+               ^
+DETAIL:  Token "1f2" is invalid.
+CONTEXT:  JSON data, line 1: 1f2
+SELECT '0.x1'::jsonb;			-- ERROR
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '0.x1'::jsonb;
+               ^
+DETAIL:  Token "0.x1" is invalid.
+CONTEXT:  JSON data, line 1: 0.x1
+SELECT '1.3ex100'::jsonb;		-- ERROR
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '1.3ex100'::jsonb;
+               ^
+DETAIL:  Token "1.3ex100" is invalid.
+CONTEXT:  JSON data, line 1: 1.3ex100
+-- Arrays.
+SELECT '[]'::jsonb;				-- OK
+ jsonb 
+-------
+ []
+(1 row)
+
+SELECT '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'::jsonb;  -- OK
+                                                                                                  jsonb                                                                                                   
+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]
+(1 row)
+
+SELECT '[1,2]'::jsonb;			-- OK
+ jsonb  
+--------
+ [1, 2]
+(1 row)
+
+SELECT '[1,2,]'::jsonb;			-- ERROR, trailing comma
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '[1,2,]'::jsonb;
+               ^
+DETAIL:  Expected JSON value, but found "]".
+CONTEXT:  JSON data, line 1: [1,2,]
+SELECT '[1,2'::jsonb;			-- ERROR, no closing bracket
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '[1,2'::jsonb;
+               ^
+DETAIL:  The input string ended unexpectedly.
+CONTEXT:  JSON data, line 1: [1,2
+SELECT '[1,[2]'::jsonb;			-- ERROR, no closing bracket
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '[1,[2]'::jsonb;
+               ^
+DETAIL:  The input string ended unexpectedly.
+CONTEXT:  JSON data, line 1: [1,[2]
+-- Objects.
+SELECT '{}'::jsonb;				-- OK
+ jsonb 
+-------
+ {}
+(1 row)
+
+SELECT '{"abc"}'::jsonb;			-- ERROR, no value
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{"abc"}'::jsonb;
+               ^
+DETAIL:  Expected ":", but found "}".
+CONTEXT:  JSON data, line 1: {"abc"}
+SELECT '{"abc":1}'::jsonb;		-- OK
+   jsonb    
+------------
+ {"abc": 1}
+(1 row)
+
+SELECT '{1:"abc"}'::jsonb;		-- ERROR, keys must be strings
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{1:"abc"}'::jsonb;
+               ^
+DETAIL:  Expected string or "}", but found "1".
+CONTEXT:  JSON data, line 1: {1...
+SELECT '{"abc",1}'::jsonb;		-- ERROR, wrong separator
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{"abc",1}'::jsonb;
+               ^
+DETAIL:  Expected ":", but found ",".
+CONTEXT:  JSON data, line 1: {"abc",...
+SELECT '{"abc"=1}'::jsonb;		-- ERROR, totally wrong separator
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{"abc"=1}'::jsonb;
+               ^
+DETAIL:  Token "=" is invalid.
+CONTEXT:  JSON data, line 1: {"abc"=...
+SELECT '{"abc"::1}'::jsonb;		-- ERROR, another wrong separator
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{"abc"::1}'::jsonb;
+               ^
+DETAIL:  Expected JSON value, but found ":".
+CONTEXT:  JSON data, line 1: {"abc"::...
+SELECT '{"abc":1,"def":2,"ghi":[3,4],"hij":{"klm":5,"nop":[6]}}'::jsonb; -- OK
+                               jsonb                                
+--------------------------------------------------------------------
+ {"abc": 1, "def": 2, "ghi": [3, 4], "hij": {"klm": 5, "nop": [6]}}
+(1 row)
+
+SELECT '{"abc":1:2}'::jsonb;		-- ERROR, colon in wrong spot
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{"abc":1:2}'::jsonb;
+               ^
+DETAIL:  Expected "," or "}", but found ":".
+CONTEXT:  JSON data, line 1: {"abc":1:...
+SELECT '{"abc":1,3}'::jsonb;		-- ERROR, no value
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{"abc":1,3}'::jsonb;
+               ^
+DETAIL:  Expected string, but found "3".
+CONTEXT:  JSON data, line 1: {"abc":1,3...
+-- Miscellaneous stuff.
+SELECT 'true'::jsonb;			-- OK
+ jsonb 
+-------
+ true
+(1 row)
+
+SELECT 'false'::jsonb;			-- OK
+ jsonb 
+-------
+ false
+(1 row)
+
+SELECT 'null'::jsonb;			-- OK
+ jsonb 
+-------
+ null
+(1 row)
+
+SELECT ' true '::jsonb;			-- OK, even with extra whitespace
+ jsonb 
+-------
+ true
+(1 row)
+
+SELECT 'true false'::jsonb;		-- ERROR, too many values
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT 'true false'::jsonb;
+               ^
+DETAIL:  Expected end of input, but found "false".
+CONTEXT:  JSON data, line 1: true false
+SELECT 'true, false'::jsonb;		-- ERROR, too many values
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT 'true, false'::jsonb;
+               ^
+DETAIL:  Expected end of input, but found ",".
+CONTEXT:  JSON data, line 1: true,...
+SELECT 'truf'::jsonb;			-- ERROR, not a keyword
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT 'truf'::jsonb;
+               ^
+DETAIL:  Token "truf" is invalid.
+CONTEXT:  JSON data, line 1: truf
+SELECT 'trues'::jsonb;			-- ERROR, not a keyword
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT 'trues'::jsonb;
+               ^
+DETAIL:  Token "trues" is invalid.
+CONTEXT:  JSON data, line 1: trues
+SELECT ''::jsonb;				-- ERROR, no value
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT ''::jsonb;
+               ^
+DETAIL:  The input string ended unexpectedly.
+CONTEXT:  JSON data, line 1: 
+SELECT '    '::jsonb;			-- ERROR, no value
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '    '::jsonb;
+               ^
+DETAIL:  The input string ended unexpectedly.
+CONTEXT:  JSON data, line 1:     
+-- make sure jsonb is passed throught json generators without being escaped
+select array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
+      array_to_json       
+--------------------------
+ [{"a": 1},{"b": [2, 3]}]
+(1 row)
+
+-- jsonb extraction functions
+CREATE TEMP TABLE test_jsonb (
+       json_type text,
+       test_json jsonb
+);
+INSERT INTO test_jsonb VALUES
+('scalar','"a scalar"'),
+('array','["zero", "one","two",null,"four","five"]'),
+('object','{"field1":"val1","field2":"val2","field3":null}');
+SELECT test_json -> 'x'
+FROM test_jsonb
+WHERE json_type = 'scalar';
+ERROR:  cannot call jsonb_object_field on a scalar
+SELECT test_json -> 'x'
+FROM test_jsonb
+WHERE json_type = 'array';
+ERROR:  cannot call jsonb_object_field on an array
+SELECT test_json -> 'x'
+FROM test_jsonb
+WHERE json_type = 'object';
+ ?column? 
+----------
+ 
+(1 row)
+
+SELECT test_json->'field2'
+FROM test_jsonb
+WHERE json_type = 'object';
+ ?column? 
+----------
+ "val2"
+(1 row)
+
+SELECT test_json->>'field2'
+FROM test_jsonb
+WHERE json_type = 'object';
+ ?column? 
+----------
+ val2
+(1 row)
+
+SELECT test_json -> 2
+FROM test_jsonb
+WHERE json_type = 'scalar';
+ERROR:  cannot call jsonb_array_element on a scalar
+SELECT test_json -> 2
+FROM test_jsonb
+WHERE json_type = 'array';
+ ?column? 
+----------
+ "two"
+(1 row)
+
+SELECT test_json -> 2
+FROM test_jsonb
+WHERE json_type = 'object';
+ERROR:  cannot call jsonb_array_element on an object
+SELECT test_json->>2
+FROM test_jsonb
+WHERE json_type = 'array';
+ ?column? 
+----------
+ two
+(1 row)
+
+SELECT jsonb_object_keys(test_json)
+FROM test_jsonb
+WHERE json_type = 'scalar';
+ERROR:  cannot call jsonb_object_keys on a scalar
+SELECT jsonb_object_keys(test_json)
+FROM test_jsonb
+WHERE json_type = 'array';
+ERROR:  cannot call jsonb_object_keys on an array
+SELECT jsonb_object_keys(test_json)
+FROM test_jsonb
+WHERE json_type = 'object';
+ jsonb_object_keys 
+-------------------
+ field1
+ field2
+ field3
+(3 rows)
+
+-- nulls
+select (test_json->'field3') is null as expect_false
+from test_jsonb
+where json_type = 'object';
+ expect_false 
+--------------
+ f
+(1 row)
+
+select (test_json->>'field3') is null as expect_true
+from test_jsonb
+where json_type = 'object';
+ expect_true 
+-------------
+ t
+(1 row)
+
+select (test_json->3) is null as expect_false
+from test_jsonb
+where json_type = 'array';
+ expect_false 
+--------------
+ f
+(1 row)
+
+select (test_json->>3) is null as expect_true
+from test_jsonb
+where json_type = 'array';
+ expect_true 
+-------------
+ t
+(1 row)
+
+-- array length
+SELECT jsonb_array_length('[1,2,3,{"f1":1,"f2":[5,6]},4]');
+ jsonb_array_length 
+--------------------
+                  5
+(1 row)
+
+SELECT jsonb_array_length('[]');
+ jsonb_array_length 
+--------------------
+                  0
+(1 row)
+
+SELECT jsonb_array_length('{"f1":1,"f2":[5,6]}');
+ERROR:  cannot get array length of a non-array
+SELECT jsonb_array_length('4');
+ERROR:  cannot get array length of a scalar
+-- each
+select jsonb_each('{"f1":[1,2,3],"f2":{"f3":1},"f4":null}');
+     jsonb_each     
+--------------------
+ (f1,"[1, 2, 3]")
+ (f2,"{""f3"": 1}")
+ (f4,null)
+(3 rows)
+
+select * from jsonb_each('{"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":99,"f6":"stringy"}') q;
+ key |   value   
+-----+-----------
+ f1  | [1, 2, 3]
+ f2  | {"f3": 1}
+ f4  | null
+ f5  | 99
+ f6  | "stringy"
+(5 rows)
+
+select jsonb_each_text('{"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":"null"}');
+  jsonb_each_text   
+--------------------
+ (f1,"[1, 2, 3]")
+ (f2,"{""f3"": 1}")
+ (f4,)
+ (f5,null)
+(4 rows)
+
+select * from jsonb_each_text('{"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":99,"f6":"stringy"}') q;
+ key |   value   
+-----+-----------
+ f1  | [1, 2, 3]
+ f2  | {"f3": 1}
+ f4  | 
+ f5  | 99
+ f6  | stringy
+(5 rows)
+
+-- extract_path, extract_path_as_text
+select jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
+ jsonb_extract_path 
+--------------------
+ "stringy"
+(1 row)
+
+select jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
+ jsonb_extract_path 
+--------------------
+ {"f3": 1}
+(1 row)
+
+select jsonb_extract_path('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',0::text);
+ jsonb_extract_path 
+--------------------
+ "f3"
+(1 row)
+
+select jsonb_extract_path('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',1::text);
+ jsonb_extract_path 
+--------------------
+ 1
+(1 row)
+
+select jsonb_extract_path_text('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
+ jsonb_extract_path_text 
+-------------------------
+ stringy
+(1 row)
+
+select jsonb_extract_path_text('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
+ jsonb_extract_path_text 
+-------------------------
+ {"f3": 1}
+(1 row)
+
+select jsonb_extract_path_text('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',0::text);
+ jsonb_extract_path_text 
+-------------------------
+ f3
+(1 row)
+
+select jsonb_extract_path_text('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',1::text);
+ jsonb_extract_path_text 
+-------------------------
+ 1
+(1 row)
+
+-- extract_path nulls
+select jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":null,"f6":"stringy"}}','f4','f5') is null as expect_false;
+ expect_false 
+--------------
+ f
+(1 row)
+
+select jsonb_extract_path_text('{"f2":{"f3":1},"f4":{"f5":null,"f6":"stringy"}}','f4','f5') is null as expect_true;
+ expect_true 
+-------------
+ t
+(1 row)
+
+select jsonb_extract_path('{"f2":{"f3":1},"f4":[0,1,2,null]}','f4','3') is null as expect_false;
+ expect_false 
+--------------
+ f
+(1 row)
+
+select jsonb_extract_path_text('{"f2":{"f3":1},"f4":[0,1,2,null]}','f4','3') is null as expect_true;
+ expect_true 
+-------------
+ t
+(1 row)
+
+-- extract_path operators
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>array['f4','f6'];
+ ?column?  
+-----------
+ "stringy"
+(1 row)
+
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>array['f2'];
+ ?column?  
+-----------
+ {"f3": 1}
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>array['f2','0'];
+ ?column? 
+----------
+ "f3"
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>array['f2','1'];
+ ?column? 
+----------
+ 1
+(1 row)
+
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>array['f4','f6'];
+ ?column? 
+----------
+ stringy
+(1 row)
+
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>array['f2'];
+ ?column?  
+-----------
+ {"f3": 1}
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>array['f2','0'];
+ ?column? 
+----------
+ f3
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>array['f2','1'];
+ ?column? 
+----------
+ 1
+(1 row)
+
+-- same using array literals
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>'{f4,f6}';
+ ?column?  
+-----------
+ "stringy"
+(1 row)
+
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>'{f2}';
+ ?column?  
+-----------
+ {"f3": 1}
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>'{f2,0}';
+ ?column? 
+----------
+ "f3"
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>'{f2,1}';
+ ?column? 
+----------
+ 1
+(1 row)
+
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>'{f4,f6}';
+ ?column? 
+----------
+ stringy
+(1 row)
+
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>'{f2}';
+ ?column?  
+-----------
+ {"f3": 1}
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>'{f2,0}';
+ ?column? 
+----------
+ f3
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>'{f2,1}';
+ ?column? 
+----------
+ 1
+(1 row)
+
+-- array_elements
+select jsonb_array_elements('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false]');
+    jsonb_array_elements    
+----------------------------
+ 1
+ true
+ [1, [2, 3]]
+ null
+ {"f1": 1, "f2": [7, 8, 9]}
+ false
+(6 rows)
+
+select * from jsonb_array_elements('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false]') q;
+           value            
+----------------------------
+ 1
+ true
+ [1, [2, 3]]
+ null
+ {"f1": 1, "f2": [7, 8, 9]}
+ false
+(6 rows)
+
+-- populate_record
+create type jbpop as (a text, b int, c timestamp);
+select * from jsonb_populate_record(null::jbpop,'{"a":"blurfl","x":43.2}') q;
+   a    | b | c 
+--------+---+---
+ blurfl |   | 
+(1 row)
+
+select * from jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":"blurfl","x":43.2}') q;
+   a    | b |            c             
+--------+---+--------------------------
+ blurfl | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+select * from jsonb_populate_record(null::jbpop,'{"a":"blurfl","x":43.2}', true) q;
+   a    | b | c 
+--------+---+---
+ blurfl |   | 
+(1 row)
+
+select * from jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":"blurfl","x":43.2}', true) q;
+   a    | b |            c             
+--------+---+--------------------------
+ blurfl | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+select * from jsonb_populate_record(null::jbpop,'{"a":[100,200,false],"x":43.2}', true) q;
+         a         | b | c 
+-------------------+---+---
+ [100, 200, false] |   | 
+(1 row)
+
+select * from jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":[100,200,false],"x":43.2}', true) q;
+         a         | b |            c             
+-------------------+---+--------------------------
+ [100, 200, false] | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+select * from jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"c":[100,200,false],"x":43.2}', true) q;
+ERROR:  invalid input syntax for type timestamp: "[100, 200, false]"
+-- populate_recordset
+select * from jsonb_populate_recordset(null::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]',false) q;
+   a    | b |            c             
+--------+---+--------------------------
+ blurfl |   | 
+        | 3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]',false) q;
+   a    | b  |            c             
+--------+----+--------------------------
+ blurfl | 99 | 
+ def    |  3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(null::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]',true) q;
+   a    | b |            c             
+--------+---+--------------------------
+ blurfl |   | 
+        | 3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]',true) q;
+   a    | b  |            c             
+--------+----+--------------------------
+ blurfl | 99 | 
+ def    |  3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]',true) q;
+        a        | b  |            c             
+-----------------+----+--------------------------
+ [100, 200, 300] | 99 | 
+ {"z": true}     |  3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"c":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]',true) q;
+ERROR:  invalid input syntax for type timestamp: "[100, 200, 300]"
+-- using the default use_json_as_text argument
+select * from jsonb_populate_recordset(null::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
+   a    | b |            c             
+--------+---+--------------------------
+ blurfl |   | 
+        | 3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
+   a    | b  |            c             
+--------+----+--------------------------
+ blurfl | 99 | 
+ def    |  3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
+ERROR:  cannot populate with a nested object unless use_json_as_text is true
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"c":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
+ERROR:  cannot populate with a nested object unless use_json_as_text is true
+-- handling of unicode surrogate pairs
+select octet_length((jsonb '{ "a":  "\ud83d\ude04\ud83d\udc36" }' -> 'a')::text)  as correct_in_utf8;
+ correct_in_utf8 
+-----------------
+              10
+(1 row)
+
+select jsonb '{ "a":  "\ud83d\ud83d" }' -> 'a'; -- 2 high surrogates in a row
+ERROR:  invalid input syntax for type json
+LINE 1: select jsonb '{ "a":  "\ud83d\ud83d" }' -> 'a';
+                     ^
+DETAIL:  Unicode high surrogate must not follow a high surrogate.
+CONTEXT:  JSON data, line 1: { "a":...
+select jsonb '{ "a":  "\ude04\ud83d" }' -> 'a'; -- surrogates in wrong order
+ERROR:  invalid input syntax for type json
+LINE 1: select jsonb '{ "a":  "\ude04\ud83d" }' -> 'a';
+                     ^
+DETAIL:  Unicode low surrogate must follow a high surrogate.
+CONTEXT:  JSON data, line 1: { "a":...
+select jsonb '{ "a":  "\ud83dX" }' -> 'a'; -- orphan high surrogate
+ERROR:  invalid input syntax for type json
+LINE 1: select jsonb '{ "a":  "\ud83dX" }' -> 'a';
+                     ^
+DETAIL:  Unicode low surrogate must follow a high surrogate.
+CONTEXT:  JSON data, line 1: { "a":...
+select jsonb '{ "a":  "\ude04X" }' -> 'a'; -- orphan low surrogate
+ERROR:  invalid input syntax for type json
+LINE 1: select jsonb '{ "a":  "\ude04X" }' -> 'a';
+                     ^
+DETAIL:  Unicode low surrogate must follow a high surrogate.
+CONTEXT:  JSON data, line 1: { "a":...
+--handling of simple unicode escapes
+select jsonb '{ "a":  "the Copyright \u00a9 sign" }' ->> 'a' as correct_in_utf8;
+   correct_in_utf8    
+----------------------
+ the Copyright © sign
+(1 row)
+
+select jsonb '{ "a":  "dollar \u0024 character" }' ->> 'a' as correct_everywhere;
+ correct_everywhere 
+--------------------
+ dollar $ character
+(1 row)
+
+select jsonb '{ "a":  "null \u0000 escape" }' ->> 'a' as not_unescaped;
+   not_unescaped    
+--------------------
+ null \u0000 escape
+(1 row)
+
+--jsonb_typeof() function
+select value, jsonb_typeof(value)
+  from (values (jsonb '123.4'),
+               (jsonb '-1'),
+               (jsonb '"foo"'),
+               (jsonb 'true'),
+               (jsonb 'false'),
+               (jsonb 'null'),
+               (jsonb '[1, 2, 3]'),
+               (jsonb '[]'),
+               (jsonb '{"x":"foo", "y":123}'),
+               (jsonb '{}'),
+               (NULL::jsonb))
+      as data(value);
+         value          | jsonb_typeof 
+------------------------+--------------
+ 123.4                  | number
+ -1                     | number
+ "foo"                  | string
+ true                   | boolean
+ false                  | boolean
+ null                   | null
+ [1, 2, 3]              | array
+ []                     | array
+ {"x": "foo", "y": 123} | object
+ {}                     | object
+                        | 
+(11 rows)
+
diff --git a/src/test/regress/expected/jsonb_1.out b/src/test/regress/expected/jsonb_1.out
new file mode 100644
index 0000000..8fae7c2
--- /dev/null
+++ b/src/test/regress/expected/jsonb_1.out
@@ -0,0 +1,845 @@
+-- Strings.
+SELECT '""'::jsonb;				-- OK.
+ jsonb 
+-------
+ ""
+(1 row)
+
+SELECT $$''$$::jsonb;			-- ERROR, single quotes are not allowed
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT $$''$$::jsonb;
+               ^
+DETAIL:  Token "'" is invalid.
+CONTEXT:  JSON data, line 1: '...
+SELECT '"abc"'::jsonb;			-- OK
+ jsonb 
+-------
+ "abc"
+(1 row)
+
+SELECT '"abc'::jsonb;			-- ERROR, quotes not closed
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '"abc'::jsonb;
+               ^
+DETAIL:  Token ""abc" is invalid.
+CONTEXT:  JSON data, line 1: "abc
+SELECT '"abc
+def"'::jsonb;					-- ERROR, unescaped newline in string constant
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '"abc
+               ^
+DETAIL:  Character with value 0x0a must be escaped.
+CONTEXT:  JSON data, line 1: "abc
+SELECT '"\n\"\\"'::jsonb;		-- OK, legal escapes
+  jsonb   
+----------
+ "\n\"\\"
+(1 row)
+
+SELECT '"\v"'::jsonb;			-- ERROR, not a valid JSON escape
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '"\v"'::jsonb;
+               ^
+DETAIL:  Escape sequence "\v" is invalid.
+CONTEXT:  JSON data, line 1: "\v...
+SELECT '"\u"'::jsonb;			-- ERROR, incomplete escape
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '"\u"'::jsonb;
+               ^
+DETAIL:  "\u" must be followed by four hexadecimal digits.
+CONTEXT:  JSON data, line 1: "\u"
+SELECT '"\u00"'::jsonb;			-- ERROR, incomplete escape
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '"\u00"'::jsonb;
+               ^
+DETAIL:  "\u" must be followed by four hexadecimal digits.
+CONTEXT:  JSON data, line 1: "\u00"
+SELECT '"\u000g"'::jsonb;		-- ERROR, g is not a hex digit
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '"\u000g"'::jsonb;
+               ^
+DETAIL:  "\u" must be followed by four hexadecimal digits.
+CONTEXT:  JSON data, line 1: "\u000g...
+SELECT '"\u0000"'::jsonb;		-- OK, legal escape
+   jsonb   
+-----------
+ "\\u0000"
+(1 row)
+
+-- use octet_length here so we don't get an odd unicode char in the
+-- output
+SELECT octet_length('"\uaBcD"'::jsonb::text); -- OK, uppercase and lower case both OK
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT octet_length('"\uaBcD"'::jsonb::text);
+                            ^
+DETAIL:  Unicode escape values cannot be used for code point values above 007F when the server encoding is not UTF8.
+CONTEXT:  JSON data, line 1: ...
+-- Numbers.
+SELECT '1'::jsonb;				-- OK
+ jsonb 
+-------
+ 1
+(1 row)
+
+SELECT '0'::jsonb;				-- OK
+ jsonb 
+-------
+ 0
+(1 row)
+
+SELECT '01'::jsonb;				-- ERROR, not valid according to JSON spec
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '01'::jsonb;
+               ^
+DETAIL:  Token "01" is invalid.
+CONTEXT:  JSON data, line 1: 01
+SELECT '0.1'::jsonb;				-- OK
+ jsonb 
+-------
+ 0.1
+(1 row)
+
+SELECT '9223372036854775808'::jsonb;	-- OK, even though it's too large for int8
+        jsonb        
+---------------------
+ 9223372036854775808
+(1 row)
+
+SELECT '1e100'::jsonb;			-- OK
+                                                 jsonb                                                 
+-------------------------------------------------------------------------------------------------------
+ 10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
+(1 row)
+
+SELECT '1.3e100'::jsonb;			-- OK
+                                                 jsonb                                                 
+-------------------------------------------------------------------------------------------------------
+ 13000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
+(1 row)
+
+SELECT '1f2'::jsonb;				-- ERROR
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '1f2'::jsonb;
+               ^
+DETAIL:  Token "1f2" is invalid.
+CONTEXT:  JSON data, line 1: 1f2
+SELECT '0.x1'::jsonb;			-- ERROR
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '0.x1'::jsonb;
+               ^
+DETAIL:  Token "0.x1" is invalid.
+CONTEXT:  JSON data, line 1: 0.x1
+SELECT '1.3ex100'::jsonb;		-- ERROR
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '1.3ex100'::jsonb;
+               ^
+DETAIL:  Token "1.3ex100" is invalid.
+CONTEXT:  JSON data, line 1: 1.3ex100
+-- Arrays.
+SELECT '[]'::jsonb;				-- OK
+ jsonb 
+-------
+ []
+(1 row)
+
+SELECT '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'::jsonb;  -- OK
+                                                                                                  jsonb                                                                                                   
+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ [[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]
+(1 row)
+
+SELECT '[1,2]'::jsonb;			-- OK
+ jsonb  
+--------
+ [1, 2]
+(1 row)
+
+SELECT '[1,2,]'::jsonb;			-- ERROR, trailing comma
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '[1,2,]'::jsonb;
+               ^
+DETAIL:  Expected JSON value, but found "]".
+CONTEXT:  JSON data, line 1: [1,2,]
+SELECT '[1,2'::jsonb;			-- ERROR, no closing bracket
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '[1,2'::jsonb;
+               ^
+DETAIL:  The input string ended unexpectedly.
+CONTEXT:  JSON data, line 1: [1,2
+SELECT '[1,[2]'::jsonb;			-- ERROR, no closing bracket
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '[1,[2]'::jsonb;
+               ^
+DETAIL:  The input string ended unexpectedly.
+CONTEXT:  JSON data, line 1: [1,[2]
+-- Objects.
+SELECT '{}'::jsonb;				-- OK
+ jsonb 
+-------
+ {}
+(1 row)
+
+SELECT '{"abc"}'::jsonb;			-- ERROR, no value
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{"abc"}'::jsonb;
+               ^
+DETAIL:  Expected ":", but found "}".
+CONTEXT:  JSON data, line 1: {"abc"}
+SELECT '{"abc":1}'::jsonb;		-- OK
+   jsonb    
+------------
+ {"abc": 1}
+(1 row)
+
+SELECT '{1:"abc"}'::jsonb;		-- ERROR, keys must be strings
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{1:"abc"}'::jsonb;
+               ^
+DETAIL:  Expected string or "}", but found "1".
+CONTEXT:  JSON data, line 1: {1...
+SELECT '{"abc",1}'::jsonb;		-- ERROR, wrong separator
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{"abc",1}'::jsonb;
+               ^
+DETAIL:  Expected ":", but found ",".
+CONTEXT:  JSON data, line 1: {"abc",...
+SELECT '{"abc"=1}'::jsonb;		-- ERROR, totally wrong separator
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{"abc"=1}'::jsonb;
+               ^
+DETAIL:  Token "=" is invalid.
+CONTEXT:  JSON data, line 1: {"abc"=...
+SELECT '{"abc"::1}'::jsonb;		-- ERROR, another wrong separator
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{"abc"::1}'::jsonb;
+               ^
+DETAIL:  Expected JSON value, but found ":".
+CONTEXT:  JSON data, line 1: {"abc"::...
+SELECT '{"abc":1,"def":2,"ghi":[3,4],"hij":{"klm":5,"nop":[6]}}'::jsonb; -- OK
+                               jsonb                                
+--------------------------------------------------------------------
+ {"abc": 1, "def": 2, "ghi": [3, 4], "hij": {"klm": 5, "nop": [6]}}
+(1 row)
+
+SELECT '{"abc":1:2}'::jsonb;		-- ERROR, colon in wrong spot
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{"abc":1:2}'::jsonb;
+               ^
+DETAIL:  Expected "," or "}", but found ":".
+CONTEXT:  JSON data, line 1: {"abc":1:...
+SELECT '{"abc":1,3}'::jsonb;		-- ERROR, no value
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '{"abc":1,3}'::jsonb;
+               ^
+DETAIL:  Expected string, but found "3".
+CONTEXT:  JSON data, line 1: {"abc":1,3...
+-- Miscellaneous stuff.
+SELECT 'true'::jsonb;			-- OK
+ jsonb 
+-------
+ true
+(1 row)
+
+SELECT 'false'::jsonb;			-- OK
+ jsonb 
+-------
+ false
+(1 row)
+
+SELECT 'null'::jsonb;			-- OK
+ jsonb 
+-------
+ null
+(1 row)
+
+SELECT ' true '::jsonb;			-- OK, even with extra whitespace
+ jsonb 
+-------
+ true
+(1 row)
+
+SELECT 'true false'::jsonb;		-- ERROR, too many values
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT 'true false'::jsonb;
+               ^
+DETAIL:  Expected end of input, but found "false".
+CONTEXT:  JSON data, line 1: true false
+SELECT 'true, false'::jsonb;		-- ERROR, too many values
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT 'true, false'::jsonb;
+               ^
+DETAIL:  Expected end of input, but found ",".
+CONTEXT:  JSON data, line 1: true,...
+SELECT 'truf'::jsonb;			-- ERROR, not a keyword
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT 'truf'::jsonb;
+               ^
+DETAIL:  Token "truf" is invalid.
+CONTEXT:  JSON data, line 1: truf
+SELECT 'trues'::jsonb;			-- ERROR, not a keyword
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT 'trues'::jsonb;
+               ^
+DETAIL:  Token "trues" is invalid.
+CONTEXT:  JSON data, line 1: trues
+SELECT ''::jsonb;				-- ERROR, no value
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT ''::jsonb;
+               ^
+DETAIL:  The input string ended unexpectedly.
+CONTEXT:  JSON data, line 1: 
+SELECT '    '::jsonb;			-- ERROR, no value
+ERROR:  invalid input syntax for type json
+LINE 1: SELECT '    '::jsonb;
+               ^
+DETAIL:  The input string ended unexpectedly.
+CONTEXT:  JSON data, line 1:     
+-- make sure jsonb is passed throught json generators without being escaped
+select array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
+      array_to_json       
+--------------------------
+ [{"a": 1},{"b": [2, 3]}]
+(1 row)
+
+-- jsonb extraction functions
+CREATE TEMP TABLE test_jsonb (
+       json_type text,
+       test_json jsonb
+);
+INSERT INTO test_jsonb VALUES
+('scalar','"a scalar"'),
+('array','["zero", "one","two",null,"four","five"]'),
+('object','{"field1":"val1","field2":"val2","field3":null}');
+SELECT test_json -> 'x'
+FROM test_jsonb
+WHERE json_type = 'scalar';
+ERROR:  cannot call jsonb_object_field on a scalar
+SELECT test_json -> 'x'
+FROM test_jsonb
+WHERE json_type = 'array';
+ERROR:  cannot call jsonb_object_field on an array
+SELECT test_json -> 'x'
+FROM test_jsonb
+WHERE json_type = 'object';
+ ?column? 
+----------
+ 
+(1 row)
+
+SELECT test_json->'field2'
+FROM test_jsonb
+WHERE json_type = 'object';
+ ?column? 
+----------
+ "val2"
+(1 row)
+
+SELECT test_json->>'field2'
+FROM test_jsonb
+WHERE json_type = 'object';
+ ?column? 
+----------
+ val2
+(1 row)
+
+SELECT test_json -> 2
+FROM test_jsonb
+WHERE json_type = 'scalar';
+ERROR:  cannot call jsonb_array_element on a scalar
+SELECT test_json -> 2
+FROM test_jsonb
+WHERE json_type = 'array';
+ ?column? 
+----------
+ "two"
+(1 row)
+
+SELECT test_json -> 2
+FROM test_jsonb
+WHERE json_type = 'object';
+ERROR:  cannot call jsonb_array_element on an object
+SELECT test_json->>2
+FROM test_jsonb
+WHERE json_type = 'array';
+ ?column? 
+----------
+ two
+(1 row)
+
+SELECT jsonb_object_keys(test_json)
+FROM test_jsonb
+WHERE json_type = 'scalar';
+ERROR:  cannot call jsonb_object_keys on a scalar
+SELECT jsonb_object_keys(test_json)
+FROM test_jsonb
+WHERE json_type = 'array';
+ERROR:  cannot call jsonb_object_keys on an array
+SELECT jsonb_object_keys(test_json)
+FROM test_jsonb
+WHERE json_type = 'object';
+ jsonb_object_keys 
+-------------------
+ field1
+ field2
+ field3
+(3 rows)
+
+-- nulls
+select (test_json->'field3') is null as expect_false
+from test_jsonb
+where json_type = 'object';
+ expect_false 
+--------------
+ f
+(1 row)
+
+select (test_json->>'field3') is null as expect_true
+from test_jsonb
+where json_type = 'object';
+ expect_true 
+-------------
+ t
+(1 row)
+
+select (test_json->3) is null as expect_false
+from test_jsonb
+where json_type = 'array';
+ expect_false 
+--------------
+ f
+(1 row)
+
+select (test_json->>3) is null as expect_true
+from test_jsonb
+where json_type = 'array';
+ expect_true 
+-------------
+ t
+(1 row)
+
+-- array length
+SELECT jsonb_array_length('[1,2,3,{"f1":1,"f2":[5,6]},4]');
+ jsonb_array_length 
+--------------------
+                  5
+(1 row)
+
+SELECT jsonb_array_length('[]');
+ jsonb_array_length 
+--------------------
+                  0
+(1 row)
+
+SELECT jsonb_array_length('{"f1":1,"f2":[5,6]}');
+ERROR:  cannot get array length of a non-array
+SELECT jsonb_array_length('4');
+ERROR:  cannot get array length of a scalar
+-- each
+select jsonb_each('{"f1":[1,2,3],"f2":{"f3":1},"f4":null}');
+     jsonb_each     
+--------------------
+ (f1,"[1, 2, 3]")
+ (f2,"{""f3"": 1}")
+ (f4,null)
+(3 rows)
+
+select * from jsonb_each('{"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":99,"f6":"stringy"}') q;
+ key |   value   
+-----+-----------
+ f1  | [1, 2, 3]
+ f2  | {"f3": 1}
+ f4  | null
+ f5  | 99
+ f6  | "stringy"
+(5 rows)
+
+select jsonb_each_text('{"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":"null"}');
+  jsonb_each_text   
+--------------------
+ (f1,"[1, 2, 3]")
+ (f2,"{""f3"": 1}")
+ (f4,)
+ (f5,null)
+(4 rows)
+
+select * from jsonb_each_text('{"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":99,"f6":"stringy"}') q;
+ key |   value   
+-----+-----------
+ f1  | [1, 2, 3]
+ f2  | {"f3": 1}
+ f4  | 
+ f5  | 99
+ f6  | stringy
+(5 rows)
+
+-- extract_path, extract_path_as_text
+select jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
+ jsonb_extract_path 
+--------------------
+ "stringy"
+(1 row)
+
+select jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
+ jsonb_extract_path 
+--------------------
+ {"f3": 1}
+(1 row)
+
+select jsonb_extract_path('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',0::text);
+ jsonb_extract_path 
+--------------------
+ "f3"
+(1 row)
+
+select jsonb_extract_path('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',1::text);
+ jsonb_extract_path 
+--------------------
+ 1
+(1 row)
+
+select jsonb_extract_path_text('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
+ jsonb_extract_path_text 
+-------------------------
+ stringy
+(1 row)
+
+select jsonb_extract_path_text('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
+ jsonb_extract_path_text 
+-------------------------
+ {"f3": 1}
+(1 row)
+
+select jsonb_extract_path_text('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',0::text);
+ jsonb_extract_path_text 
+-------------------------
+ f3
+(1 row)
+
+select jsonb_extract_path_text('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',1::text);
+ jsonb_extract_path_text 
+-------------------------
+ 1
+(1 row)
+
+-- extract_path nulls
+select jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":null,"f6":"stringy"}}','f4','f5') is null as expect_false;
+ expect_false 
+--------------
+ f
+(1 row)
+
+select jsonb_extract_path_text('{"f2":{"f3":1},"f4":{"f5":null,"f6":"stringy"}}','f4','f5') is null as expect_true;
+ expect_true 
+-------------
+ t
+(1 row)
+
+select jsonb_extract_path('{"f2":{"f3":1},"f4":[0,1,2,null]}','f4','3') is null as expect_false;
+ expect_false 
+--------------
+ f
+(1 row)
+
+select jsonb_extract_path_text('{"f2":{"f3":1},"f4":[0,1,2,null]}','f4','3') is null as expect_true;
+ expect_true 
+-------------
+ t
+(1 row)
+
+-- extract_path operators
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>array['f4','f6'];
+ ?column?  
+-----------
+ "stringy"
+(1 row)
+
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>array['f2'];
+ ?column?  
+-----------
+ {"f3": 1}
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>array['f2','0'];
+ ?column? 
+----------
+ "f3"
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>array['f2','1'];
+ ?column? 
+----------
+ 1
+(1 row)
+
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>array['f4','f6'];
+ ?column? 
+----------
+ stringy
+(1 row)
+
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>array['f2'];
+ ?column?  
+-----------
+ {"f3": 1}
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>array['f2','0'];
+ ?column? 
+----------
+ f3
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>array['f2','1'];
+ ?column? 
+----------
+ 1
+(1 row)
+
+-- same using array literals
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>'{f4,f6}';
+ ?column?  
+-----------
+ "stringy"
+(1 row)
+
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>'{f2}';
+ ?column?  
+-----------
+ {"f3": 1}
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>'{f2,0}';
+ ?column? 
+----------
+ "f3"
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>'{f2,1}';
+ ?column? 
+----------
+ 1
+(1 row)
+
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>'{f4,f6}';
+ ?column? 
+----------
+ stringy
+(1 row)
+
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>'{f2}';
+ ?column?  
+-----------
+ {"f3": 1}
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>'{f2,0}';
+ ?column? 
+----------
+ f3
+(1 row)
+
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>'{f2,1}';
+ ?column? 
+----------
+ 1
+(1 row)
+
+-- array_elements
+select jsonb_array_elements('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false]');
+    jsonb_array_elements    
+----------------------------
+ 1
+ true
+ [1, [2, 3]]
+ null
+ {"f1": 1, "f2": [7, 8, 9]}
+ false
+(6 rows)
+
+select * from jsonb_array_elements('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false]') q;
+           value            
+----------------------------
+ 1
+ true
+ [1, [2, 3]]
+ null
+ {"f1": 1, "f2": [7, 8, 9]}
+ false
+(6 rows)
+
+-- populate_record
+create type jbpop as (a text, b int, c timestamp);
+select * from jsonb_populate_record(null::jbpop,'{"a":"blurfl","x":43.2}') q;
+   a    | b | c 
+--------+---+---
+ blurfl |   | 
+(1 row)
+
+select * from jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":"blurfl","x":43.2}') q;
+   a    | b |            c             
+--------+---+--------------------------
+ blurfl | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+select * from jsonb_populate_record(null::jbpop,'{"a":"blurfl","x":43.2}', true) q;
+   a    | b | c 
+--------+---+---
+ blurfl |   | 
+(1 row)
+
+select * from jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":"blurfl","x":43.2}', true) q;
+   a    | b |            c             
+--------+---+--------------------------
+ blurfl | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+select * from jsonb_populate_record(null::jbpop,'{"a":[100,200,false],"x":43.2}', true) q;
+         a         | b | c 
+-------------------+---+---
+ [100, 200, false] |   | 
+(1 row)
+
+select * from jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":[100,200,false],"x":43.2}', true) q;
+         a         | b |            c             
+-------------------+---+--------------------------
+ [100, 200, false] | 3 | Mon Dec 31 15:30:56 2012
+(1 row)
+
+select * from jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"c":[100,200,false],"x":43.2}', true) q;
+ERROR:  invalid input syntax for type timestamp: "[100, 200, false]"
+-- populate_recordset
+select * from jsonb_populate_recordset(null::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]',false) q;
+   a    | b |            c             
+--------+---+--------------------------
+ blurfl |   | 
+        | 3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]',false) q;
+   a    | b  |            c             
+--------+----+--------------------------
+ blurfl | 99 | 
+ def    |  3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(null::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]',true) q;
+   a    | b |            c             
+--------+---+--------------------------
+ blurfl |   | 
+        | 3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]',true) q;
+   a    | b  |            c             
+--------+----+--------------------------
+ blurfl | 99 | 
+ def    |  3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]',true) q;
+        a        | b  |            c             
+-----------------+----+--------------------------
+ [100, 200, 300] | 99 | 
+ {"z": true}     |  3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"c":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]',true) q;
+ERROR:  invalid input syntax for type timestamp: "[100, 200, 300]"
+-- using the default use_json_as_text argument
+select * from jsonb_populate_recordset(null::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
+   a    | b |            c             
+--------+---+--------------------------
+ blurfl |   | 
+        | 3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
+   a    | b  |            c             
+--------+----+--------------------------
+ blurfl | 99 | 
+ def    |  3 | Fri Jan 20 10:42:53 2012
+(2 rows)
+
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
+ERROR:  cannot populate with a nested object unless use_json_as_text is true
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"c":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
+ERROR:  cannot populate with a nested object unless use_json_as_text is true
+-- handling of unicode surrogate pairs
+select octet_length((jsonb '{ "a":  "\ud83d\ude04\ud83d\udc36" }' -> 'a')::text)  as correct_in_utf8;
+ERROR:  invalid input syntax for type json
+LINE 1: select octet_length((jsonb '{ "a":  "\ud83d\ude04\ud83d\udc3...
+                                   ^
+DETAIL:  Unicode escape values cannot be used for code point values above 007F when the server encoding is not UTF8.
+CONTEXT:  JSON data, line 1: { "a":...
+select jsonb '{ "a":  "\ud83d\ud83d" }' -> 'a'; -- 2 high surrogates in a row
+ERROR:  invalid input syntax for type json
+LINE 1: select jsonb '{ "a":  "\ud83d\ud83d" }' -> 'a';
+                     ^
+DETAIL:  Unicode high surrogate must not follow a high surrogate.
+CONTEXT:  JSON data, line 1: { "a":...
+select jsonb '{ "a":  "\ude04\ud83d" }' -> 'a'; -- surrogates in wrong order
+ERROR:  invalid input syntax for type json
+LINE 1: select jsonb '{ "a":  "\ude04\ud83d" }' -> 'a';
+                     ^
+DETAIL:  Unicode low surrogate must follow a high surrogate.
+CONTEXT:  JSON data, line 1: { "a":...
+select jsonb '{ "a":  "\ud83dX" }' -> 'a'; -- orphan high surrogate
+ERROR:  invalid input syntax for type json
+LINE 1: select jsonb '{ "a":  "\ud83dX" }' -> 'a';
+                     ^
+DETAIL:  Unicode low surrogate must follow a high surrogate.
+CONTEXT:  JSON data, line 1: { "a":...
+select jsonb '{ "a":  "\ude04X" }' -> 'a'; -- orphan low surrogate
+ERROR:  invalid input syntax for type json
+LINE 1: select jsonb '{ "a":  "\ude04X" }' -> 'a';
+                     ^
+DETAIL:  Unicode low surrogate must follow a high surrogate.
+CONTEXT:  JSON data, line 1: { "a":...
+--handling of simple unicode escapes
+select jsonb '{ "a":  "the Copyright \u00a9 sign" }' ->> 'a' as correct_in_utf8;
+ERROR:  invalid input syntax for type json
+LINE 1: select jsonb '{ "a":  "the Copyright \u00a9 sign" }' ->> 'a'...
+                     ^
+DETAIL:  Unicode escape values cannot be used for code point values above 007F when the server encoding is not UTF8.
+CONTEXT:  JSON data, line 1: { "a":...
+select jsonb '{ "a":  "dollar \u0024 character" }' ->> 'a' as correct_everywhere;
+ correct_everywhere 
+--------------------
+ dollar $ character
+(1 row)
+
+select jsonb '{ "a":  "null \u0000 escape" }' ->> 'a' as not_unescaped;
+   not_unescaped    
+--------------------
+ null \u0000 escape
+(1 row)
+
+--jsonb_typeof() function
+select value, jsonb_typeof(value)
+  from (values (jsonb '123.4'),
+               (jsonb '-1'),
+               (jsonb '"foo"'),
+               (jsonb 'true'),
+               (jsonb 'false'),
+               (jsonb 'null'),
+               (jsonb '[1, 2, 3]'),
+               (jsonb '[]'),
+               (jsonb '{"x":"foo", "y":123}'),
+               (jsonb '{}'),
+               (NULL::jsonb))
+      as data(value);
+         value          | jsonb_typeof 
+------------------------+--------------
+ 123.4                  | number
+ -1                     | number
+ "foo"                  | string
+ true                   | boolean
+ false                  | boolean
+ null                   | null
+ [1, 2, 3]              | array
+ []                     | array
+ {"x": "foo", "y": 123} | object
+ {}                     | object
+                        | 
+(11 rows)
+
diff --git a/src/test/regress/parallel_schedule b/src/test/regress/parallel_schedule
index 5758b07..51238be 100644
--- a/src/test/regress/parallel_schedule
+++ b/src/test/regress/parallel_schedule
@@ -98,8 +98,7 @@ test: event_trigger
 # ----------
 # Another group of parallel tests
 # ----------
-test: select_views portals_p2 foreign_key cluster dependency guc bitmapops combocid tsearch tsdicts foreign_data window xmlmap functional_deps advisory_lock json indirect_toast
-
+test: select_views portals_p2 foreign_key cluster dependency guc bitmapops combocid tsearch tsdicts foreign_data window xmlmap functional_deps advisory_lock json jsonb indirect_toast
 # ----------
 # Another group of parallel tests
 # NB: temp.sql does a reconnect which transiently uses 2 connections,
diff --git a/src/test/regress/serial_schedule b/src/test/regress/serial_schedule
index 78348f5..e414ec1 100644
--- a/src/test/regress/serial_schedule
+++ b/src/test/regress/serial_schedule
@@ -121,6 +121,7 @@ test: xmlmap
 test: functional_deps
 test: advisory_lock
 test: json
+test: jsonb
 test: indirect_toast
 test: plancache
 test: limit
diff --git a/src/test/regress/sql/jsonb.sql b/src/test/regress/sql/jsonb.sql
new file mode 100644
index 0000000..38959a8
--- /dev/null
+++ b/src/test/regress/sql/jsonb.sql
@@ -0,0 +1,265 @@
+-- Strings.
+SELECT '""'::jsonb;				-- OK.
+SELECT $$''$$::jsonb;			-- ERROR, single quotes are not allowed
+SELECT '"abc"'::jsonb;			-- OK
+SELECT '"abc'::jsonb;			-- ERROR, quotes not closed
+SELECT '"abc
+def"'::jsonb;					-- ERROR, unescaped newline in string constant
+SELECT '"\n\"\\"'::jsonb;		-- OK, legal escapes
+SELECT '"\v"'::jsonb;			-- ERROR, not a valid JSON escape
+SELECT '"\u"'::jsonb;			-- ERROR, incomplete escape
+SELECT '"\u00"'::jsonb;			-- ERROR, incomplete escape
+SELECT '"\u000g"'::jsonb;		-- ERROR, g is not a hex digit
+SELECT '"\u0000"'::jsonb;		-- OK, legal escape
+-- use octet_length here so we don't get an odd unicode char in the
+-- output
+SELECT octet_length('"\uaBcD"'::jsonb::text); -- OK, uppercase and lower case both OK
+
+-- Numbers.
+SELECT '1'::jsonb;				-- OK
+SELECT '0'::jsonb;				-- OK
+SELECT '01'::jsonb;				-- ERROR, not valid according to JSON spec
+SELECT '0.1'::jsonb;				-- OK
+SELECT '9223372036854775808'::jsonb;	-- OK, even though it's too large for int8
+SELECT '1e100'::jsonb;			-- OK
+SELECT '1.3e100'::jsonb;			-- OK
+SELECT '1f2'::jsonb;				-- ERROR
+SELECT '0.x1'::jsonb;			-- ERROR
+SELECT '1.3ex100'::jsonb;		-- ERROR
+
+-- Arrays.
+SELECT '[]'::jsonb;				-- OK
+SELECT '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'::jsonb;  -- OK
+SELECT '[1,2]'::jsonb;			-- OK
+SELECT '[1,2,]'::jsonb;			-- ERROR, trailing comma
+SELECT '[1,2'::jsonb;			-- ERROR, no closing bracket
+SELECT '[1,[2]'::jsonb;			-- ERROR, no closing bracket
+
+-- Objects.
+SELECT '{}'::jsonb;				-- OK
+SELECT '{"abc"}'::jsonb;			-- ERROR, no value
+SELECT '{"abc":1}'::jsonb;		-- OK
+SELECT '{1:"abc"}'::jsonb;		-- ERROR, keys must be strings
+SELECT '{"abc",1}'::jsonb;		-- ERROR, wrong separator
+SELECT '{"abc"=1}'::jsonb;		-- ERROR, totally wrong separator
+SELECT '{"abc"::1}'::jsonb;		-- ERROR, another wrong separator
+SELECT '{"abc":1,"def":2,"ghi":[3,4],"hij":{"klm":5,"nop":[6]}}'::jsonb; -- OK
+SELECT '{"abc":1:2}'::jsonb;		-- ERROR, colon in wrong spot
+SELECT '{"abc":1,3}'::jsonb;		-- ERROR, no value
+
+-- Miscellaneous stuff.
+SELECT 'true'::jsonb;			-- OK
+SELECT 'false'::jsonb;			-- OK
+SELECT 'null'::jsonb;			-- OK
+SELECT ' true '::jsonb;			-- OK, even with extra whitespace
+SELECT 'true false'::jsonb;		-- ERROR, too many values
+SELECT 'true, false'::jsonb;		-- ERROR, too many values
+SELECT 'truf'::jsonb;			-- ERROR, not a keyword
+SELECT 'trues'::jsonb;			-- ERROR, not a keyword
+SELECT ''::jsonb;				-- ERROR, no value
+SELECT '    '::jsonb;			-- ERROR, no value
+
+-- make sure jsonb is passed throught json generators without being escaped
+select array_to_json(ARRAY [jsonb '{"a":1}', jsonb '{"b":[2,3]}']);
+
+
+-- jsonb extraction functions
+
+CREATE TEMP TABLE test_jsonb (
+       json_type text,
+       test_json jsonb
+);
+
+INSERT INTO test_jsonb VALUES
+('scalar','"a scalar"'),
+('array','["zero", "one","two",null,"four","five"]'),
+('object','{"field1":"val1","field2":"val2","field3":null}');
+
+SELECT test_json -> 'x'
+FROM test_jsonb
+WHERE json_type = 'scalar';
+
+SELECT test_json -> 'x'
+FROM test_jsonb
+WHERE json_type = 'array';
+
+SELECT test_json -> 'x'
+FROM test_jsonb
+WHERE json_type = 'object';
+
+SELECT test_json->'field2'
+FROM test_jsonb
+WHERE json_type = 'object';
+
+SELECT test_json->>'field2'
+FROM test_jsonb
+WHERE json_type = 'object';
+
+SELECT test_json -> 2
+FROM test_jsonb
+WHERE json_type = 'scalar';
+
+SELECT test_json -> 2
+FROM test_jsonb
+WHERE json_type = 'array';
+
+SELECT test_json -> 2
+FROM test_jsonb
+WHERE json_type = 'object';
+
+SELECT test_json->>2
+FROM test_jsonb
+WHERE json_type = 'array';
+
+SELECT jsonb_object_keys(test_json)
+FROM test_jsonb
+WHERE json_type = 'scalar';
+
+SELECT jsonb_object_keys(test_json)
+FROM test_jsonb
+WHERE json_type = 'array';
+
+SELECT jsonb_object_keys(test_json)
+FROM test_jsonb
+WHERE json_type = 'object';
+
+-- nulls
+
+select (test_json->'field3') is null as expect_false
+from test_jsonb
+where json_type = 'object';
+
+select (test_json->>'field3') is null as expect_true
+from test_jsonb
+where json_type = 'object';
+
+select (test_json->3) is null as expect_false
+from test_jsonb
+where json_type = 'array';
+
+select (test_json->>3) is null as expect_true
+from test_jsonb
+where json_type = 'array';
+
+
+-- array length
+
+SELECT jsonb_array_length('[1,2,3,{"f1":1,"f2":[5,6]},4]');
+
+SELECT jsonb_array_length('[]');
+
+SELECT jsonb_array_length('{"f1":1,"f2":[5,6]}');
+
+SELECT jsonb_array_length('4');
+
+-- each
+
+select jsonb_each('{"f1":[1,2,3],"f2":{"f3":1},"f4":null}');
+select * from jsonb_each('{"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":99,"f6":"stringy"}') q;
+
+select jsonb_each_text('{"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":"null"}');
+select * from jsonb_each_text('{"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":99,"f6":"stringy"}') q;
+
+-- extract_path, extract_path_as_text
+
+select jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
+select jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
+select jsonb_extract_path('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',0::text);
+select jsonb_extract_path('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',1::text);
+select jsonb_extract_path_text('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
+select jsonb_extract_path_text('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
+select jsonb_extract_path_text('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',0::text);
+select jsonb_extract_path_text('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',1::text);
+
+-- extract_path nulls
+
+select jsonb_extract_path('{"f2":{"f3":1},"f4":{"f5":null,"f6":"stringy"}}','f4','f5') is null as expect_false;
+select jsonb_extract_path_text('{"f2":{"f3":1},"f4":{"f5":null,"f6":"stringy"}}','f4','f5') is null as expect_true;
+select jsonb_extract_path('{"f2":{"f3":1},"f4":[0,1,2,null]}','f4','3') is null as expect_false;
+select jsonb_extract_path_text('{"f2":{"f3":1},"f4":[0,1,2,null]}','f4','3') is null as expect_true;
+
+-- extract_path operators
+
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>array['f4','f6'];
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>array['f2'];
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>array['f2','0'];
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>array['f2','1'];
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>array['f4','f6'];
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>array['f2'];
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>array['f2','0'];
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>array['f2','1'];
+
+-- same using array literals
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>'{f4,f6}';
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>'{f2}';
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>'{f2,0}';
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>'{f2,1}';
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>'{f4,f6}';
+select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>'{f2}';
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>'{f2,0}';
+select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::jsonb#>>'{f2,1}';
+
+-- array_elements
+
+select jsonb_array_elements('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false]');
+select * from jsonb_array_elements('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false]') q;
+
+
+-- populate_record
+create type jbpop as (a text, b int, c timestamp);
+
+select * from jsonb_populate_record(null::jbpop,'{"a":"blurfl","x":43.2}') q;
+select * from jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":"blurfl","x":43.2}') q;
+
+select * from jsonb_populate_record(null::jbpop,'{"a":"blurfl","x":43.2}', true) q;
+select * from jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":"blurfl","x":43.2}', true) q;
+
+select * from jsonb_populate_record(null::jbpop,'{"a":[100,200,false],"x":43.2}', true) q;
+select * from jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"a":[100,200,false],"x":43.2}', true) q;
+select * from jsonb_populate_record(row('x',3,'2012-12-31 15:30:56')::jbpop,'{"c":[100,200,false],"x":43.2}', true) q;
+
+-- populate_recordset
+
+select * from jsonb_populate_recordset(null::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]',false) q;
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]',false) q;
+select * from jsonb_populate_recordset(null::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]',true) q;
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]',true) q;
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]',true) q;
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"c":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]',true) q;
+
+-- using the default use_json_as_text argument
+
+select * from jsonb_populate_recordset(null::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
+select * from jsonb_populate_recordset(row('def',99,null)::jbpop,'[{"c":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
+
+
+-- handling of unicode surrogate pairs
+
+select octet_length((jsonb '{ "a":  "\ud83d\ude04\ud83d\udc36" }' -> 'a')::text)  as correct_in_utf8;
+select jsonb '{ "a":  "\ud83d\ud83d" }' -> 'a'; -- 2 high surrogates in a row
+select jsonb '{ "a":  "\ude04\ud83d" }' -> 'a'; -- surrogates in wrong order
+select jsonb '{ "a":  "\ud83dX" }' -> 'a'; -- orphan high surrogate
+select jsonb '{ "a":  "\ude04X" }' -> 'a'; -- orphan low surrogate
+
+--handling of simple unicode escapes
+
+select jsonb '{ "a":  "the Copyright \u00a9 sign" }' ->> 'a' as correct_in_utf8;
+select jsonb '{ "a":  "dollar \u0024 character" }' ->> 'a' as correct_everywhere;
+select jsonb '{ "a":  "null \u0000 escape" }' ->> 'a' as not_unescaped;
+
+--jsonb_typeof() function
+select value, jsonb_typeof(value)
+  from (values (jsonb '123.4'),
+               (jsonb '-1'),
+               (jsonb '"foo"'),
+               (jsonb 'true'),
+               (jsonb 'false'),
+               (jsonb 'null'),
+               (jsonb '[1, 2, 3]'),
+               (jsonb '[]'),
+               (jsonb '{"x":"foo", "y":123}'),
+               (jsonb '{}'),
+               (NULL::jsonb))
+      as data(value);
