proposal: ignore null fields in not relation type composite type based constructors
Hello
In Czech Postgres mailing list was user request for serialization to json
without null values.
He needs a similar behave like XMLFOREST has - it ignores NULLs
In some situations and conversions, when your table is +/- sparse matrix,
this request is valid
postgres=# select hstore(omega) from omega;
hstore
─────────────────────────────────
"a"=>"10", "b"=>"20", "c"=>NULL
"a"=>NULL, "b"=>"20", "c"=>"30"
(2 rows)
Proposed function
postgres=# select hstore(omega, ignore_nulls := true) from omega;
hstore
─────────────────────────────────
"a"=>"10", "b"=>"20"
"b"=>"20", "c"=>"30"
(2 rows)
What do you thinking about this proposal?
Regards
Pavel
Hello
I am sending small patch, that allows ignore nulls in row_to_json function.
It allow significant size reduction of generated JSON documents.
Regards
Pavel
2014-05-25 7:53 GMT+02:00 Pavel Stehule <pavel.stehule@gmail.com>:
Show quoted text
Hello
In Czech Postgres mailing list was user request for serialization to json
without null values.He needs a similar behave like XMLFOREST has - it ignores NULLs
In some situations and conversions, when your table is +/- sparse matrix,
this request is validpostgres=# select hstore(omega) from omega;
hstore
─────────────────────────────────
"a"=>"10", "b"=>"20", "c"=>NULL
"a"=>NULL, "b"=>"20", "c"=>"30"
(2 rows)Proposed function
postgres=# select hstore(omega, ignore_nulls := true) from omega;
hstore
─────────────────────────────────
"a"=>"10", "b"=>"20"
"b"=>"20", "c"=>"30"
(2 rows)What do you thinking about this proposal?
Regards
Pavel
Attachments:
row_to_json_choosy.patchtext/x-patch; charset=US-ASCII; name=row_to_json_choosy.patchDownload
commit c97bb5fd8b6c68a1141430d47964ebc3488f4429
Author: Pavel Stehule <pavel.stehule@gooddata.com>
Date: Sat Jun 28 17:40:47 2014 +0200
initial
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index f475458..d46b90d 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -10290,11 +10290,12 @@ table2-mapping
</row>
<row>
<entry>
- <literal>row_to_json(record [, pretty_bool])</literal>
+ <literal>row_to_json(record [, pretty_bool [, ignore_nulls] ])</literal>
</entry>
<entry>
Returns the row as a JSON object. Line feeds will be added between
- level-1 elements if <parameter>pretty_bool</parameter> is true.
+ level-1 elements if <parameter>pretty_bool</parameter> is true. Ignore
+ NULL when <parameter>ignore_nulls</parameter> is true.
</entry>
<entry><literal>row_to_json(row(1,'foo'))</literal></entry>
<entry><literal>{"f1":1,"f2":"foo"}</literal></entry>
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index 841dd1a..22e1dd6 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -77,7 +77,8 @@ static void report_invalid_token(JsonLexContext *lex);
static int report_json_context(JsonLexContext *lex);
static char *extract_mb_char(char *s);
static void composite_to_json(Datum composite, StringInfo result,
- bool use_line_feeds);
+ bool use_line_feeds,
+ bool ignore_nulls);
static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
Datum *vals, bool *nulls, int *valcount,
JsonTypeCategory tcategory, Oid outfuncoid,
@@ -1355,7 +1356,7 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
array_to_json_internal(val, result, false);
break;
case JSONTYPE_COMPOSITE:
- composite_to_json(val, result, false);
+ composite_to_json(val, result, false, false);
break;
case JSONTYPE_BOOL:
outputstr = DatumGetBool(val) ? "true" : "false";
@@ -1560,7 +1561,8 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
* Turn a composite / record into JSON.
*/
static void
-composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
+composite_to_json(Datum composite, StringInfo result, bool use_line_feeds,
+ bool ignore_nulls)
{
HeapTupleHeader td;
Oid tupType;
@@ -1599,6 +1601,12 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
if (tupdesc->attrs[i]->attisdropped)
continue;
+ val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+ /* Don't serialize NULL field when we don't want it */
+ if (isnull && ignore_nulls)
+ continue;
+
if (needsep)
appendStringInfoString(result, sep);
needsep = true;
@@ -1607,8 +1615,6 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
escape_json(result, attname);
appendStringInfoChar(result, ':');
- val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
-
if (isnull)
{
tcategory = JSONTYPE_NULL;
@@ -1700,7 +1706,7 @@ row_to_json(PG_FUNCTION_ARGS)
result = makeStringInfo();
- composite_to_json(array, result, false);
+ composite_to_json(array, result, false, false);
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
@@ -1717,7 +1723,25 @@ row_to_json_pretty(PG_FUNCTION_ARGS)
result = makeStringInfo();
- composite_to_json(array, result, use_line_feeds);
+ composite_to_json(array, result, use_line_feeds, false);
+
+ PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
+}
+
+/*
+ * SQL function row_to_json(row, prettybool, ignore_nulls)
+ */
+extern Datum
+row_to_json_pretty_choosy(PG_FUNCTION_ARGS)
+{
+ Datum array = PG_GETARG_DATUM(0);
+ bool use_line_feeds = PG_GETARG_BOOL(1);
+ bool ignore_nulls = PG_GETARG_BOOL(2);
+ StringInfo result;
+
+ result = makeStringInfo();
+
+ composite_to_json(array, result, use_line_feeds, ignore_nulls);
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index 762ce6c..ccd3db5 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4197,6 +4197,8 @@ DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 1
DESCR("map row to json");
DATA(insert OID = 3156 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2249 16" _null_ _null_ _null_ _null_ row_to_json_pretty _null_ _null_ _null_ ));
DESCR("map row to json with optional pretty printing");
+DATA(insert OID = 3255 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 3 0 114 "2249 16 16" _null_ _null_ _null_ _null_ row_to_json_pretty_choosy _null_ _null_ _null_ ));
+DESCR("map row to json with optional pretty printing and skipping NULLs");
DATA(insert OID = 3173 ( json_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ json_agg_transfn _null_ _null_ _null_ ));
DESCR("json aggregate transition function");
DATA(insert OID = 3174 ( json_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 114 "2281" _null_ _null_ _null_ _null_ json_agg_finalfn _null_ _null_ _null_ ));
diff --git a/src/include/utils/json.h b/src/include/utils/json.h
index 82cc48b..6e43dec 100644
--- a/src/include/utils/json.h
+++ b/src/include/utils/json.h
@@ -26,6 +26,7 @@ extern Datum array_to_json(PG_FUNCTION_ARGS);
extern Datum array_to_json_pretty(PG_FUNCTION_ARGS);
extern Datum row_to_json(PG_FUNCTION_ARGS);
extern Datum row_to_json_pretty(PG_FUNCTION_ARGS);
+extern Datum row_to_json_pretty_choosy(PG_FUNCTION_ARGS);
extern Datum to_json(PG_FUNCTION_ARGS);
extern Datum json_agg_transfn(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index d1e32a1..cd4d8fc 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -403,6 +403,28 @@ SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),
{"f1":[5,6,7,8,9,10]}
(1 row)
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, false) FROM x;
+ row_to_json
+------------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10,"b":null,"c":null}
+ {"a":null,"b":null,"c":null}
+(3 rows)
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, true) FROM x;
+ row_to_json
+------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10}
+ {}
+(3 rows)
+
-- to_json, timestamps
select to_json(timestamp '2014-05-28 12:22:35.614298');
to_json
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index bc8bb62..b687ec5 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -100,6 +100,16 @@ FROM rows q;
SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, false) FROM x;
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, true) FROM x;
+
-- to_json, timestamps
select to_json(timestamp '2014-05-28 12:22:35.614298');
Hi Pavel,
You have said that XMLFOREST has something which ignores nulls, what's that?
Will you please provide an example ?
I am NOT sure, but here you are trying to omit entire field from the output
when its value is NULL. But that will add an extra efforts at other end
which is using output of this. That application need to know all fields and
then need to replace NULL values for missing fields. However we have an
optional argument for ignoring nulls and thus we are safe. Application will
use as per its choice.
Well, apart from that, tried reviewing the patch. Patch was applied but make
failed with following error.
make[3]: Entering directory `/home/jeevan/pg_master/src/backend/catalog'
cd ../../../src/include/catalog && '/usr/bin/perl' ./duplicate_oids
3255
make[3]: *** [postgres.bki] Error 1
Please run unused_oids script to find unused oid.
However, I had a quick look over code changes. At first glance it looks
good. But still need to check on SQL level and then code walk-through.
Waiting for updated patch.
Thanks
--
Jeevan B Chalke
Principal Software Engineer, Product Development
EnterpriseDB Corporation
The Enterprise PostgreSQL Company
Hello
2014-08-22 12:21 GMT+02:00 Jeevan Chalke <jeevan.chalke@enterprisedb.com>:
Hi Pavel,
You have said that XMLFOREST has something which ignores nulls, what's
that?
Will you please provide an example ?
I was partially wrong - XMLFOREST ignore null always
postgres=# select xmlforest(10 as a,20 as b,null as c);
xmlforest
--------------------
<a>10</a><b>20</b>
(1 row)
so if you would to empty elements, you should to use xmlelement and
xmlconcat
postgres=# select xmlconcat(xmlforest(10 as a,20 as b), xmlelement(name c,
null));
xmlconcat
------------------------
<a>10</a><b>20</b><c/>
(1 row)
I am NOT sure, but here you are trying to omit entire field from the output
when its value is NULL. But that will add an extra efforts at other end
which is using output of this. That application need to know all fields and
then need to replace NULL values for missing fields. However we have an
optional argument for ignoring nulls and thus we are safe. Application will
use as per its choice.
with my patch, you can do decision - lot of REST services doesn't
distinguishes between empty and missing tag - and some developers prefer
remove empty tags due less size of message.
Well, apart from that, tried reviewing the patch. Patch was applied but
make
failed with following error.make[3]: Entering directory `/home/jeevan/pg_master/src/backend/catalog'
cd ../../../src/include/catalog && '/usr/bin/perl' ./duplicate_oids
3255
make[3]: *** [postgres.bki] Error 1Please run unused_oids script to find unused oid.
it needs remastering
update in attachemnt
However, I had a quick look over code changes. At first glance it looks
good. But still need to check on SQL level and then code walk-through.Waiting for updated patch.
thank you for review
Regards
Pavel
Show quoted text
Thanks
--
Jeevan B Chalke
Principal Software Engineer, Product Development
EnterpriseDB Corporation
The Enterprise PostgreSQL Company
Attachments:
row_to_json_choosy-2.patchtext/x-patch; charset=US-ASCII; name=row_to_json_choosy-2.patchDownload
commit e1dd47ca1f881a2ce41117526a536555c23c2d81
Author: Pavel Stehule <pavel.stehule@gooddata.com>
Date: Sat Aug 23 07:19:46 2014 +0200
remastering, change used oid in pg_proc
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index c715ca2..a27aff4 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -10294,11 +10294,12 @@ table2-mapping
</row>
<row>
<entry>
- <literal>row_to_json(record [, pretty_bool])</literal>
+ <literal>row_to_json(record [, pretty_bool [, ignore_nulls] ])</literal>
</entry>
<entry>
Returns the row as a JSON object. Line feeds will be added between
- level-1 elements if <parameter>pretty_bool</parameter> is true.
+ level-1 elements if <parameter>pretty_bool</parameter> is true. Ignore
+ NULL when <parameter>ignore_nulls</parameter> is true.
</entry>
<entry><literal>row_to_json(row(1,'foo'))</literal></entry>
<entry><literal>{"f1":1,"f2":"foo"}</literal></entry>
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index 494a028..f35db04 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -79,7 +79,8 @@ static void report_invalid_token(JsonLexContext *lex);
static int report_json_context(JsonLexContext *lex);
static char *extract_mb_char(char *s);
static void composite_to_json(Datum composite, StringInfo result,
- bool use_line_feeds);
+ bool use_line_feeds,
+ bool ignore_nulls);
static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
Datum *vals, bool *nulls, int *valcount,
JsonTypeCategory tcategory, Oid outfuncoid,
@@ -1362,7 +1363,7 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
array_to_json_internal(val, result, false);
break;
case JSONTYPE_COMPOSITE:
- composite_to_json(val, result, false);
+ composite_to_json(val, result, false, false);
break;
case JSONTYPE_BOOL:
outputstr = DatumGetBool(val) ? "true" : "false";
@@ -1591,7 +1592,8 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
* Turn a composite / record into JSON.
*/
static void
-composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
+composite_to_json(Datum composite, StringInfo result, bool use_line_feeds,
+ bool ignore_nulls)
{
HeapTupleHeader td;
Oid tupType;
@@ -1630,6 +1632,12 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
if (tupdesc->attrs[i]->attisdropped)
continue;
+ val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+ /* Don't serialize NULL field when we don't want it */
+ if (isnull && ignore_nulls)
+ continue;
+
if (needsep)
appendStringInfoString(result, sep);
needsep = true;
@@ -1638,8 +1646,6 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
escape_json(result, attname);
appendStringInfoChar(result, ':');
- val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
-
if (isnull)
{
tcategory = JSONTYPE_NULL;
@@ -1731,7 +1737,7 @@ row_to_json(PG_FUNCTION_ARGS)
result = makeStringInfo();
- composite_to_json(array, result, false);
+ composite_to_json(array, result, false, false);
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
@@ -1748,7 +1754,25 @@ row_to_json_pretty(PG_FUNCTION_ARGS)
result = makeStringInfo();
- composite_to_json(array, result, use_line_feeds);
+ composite_to_json(array, result, use_line_feeds, false);
+
+ PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
+}
+
+/*
+ * SQL function row_to_json(row, prettybool, ignore_nulls)
+ */
+extern Datum
+row_to_json_pretty_choosy(PG_FUNCTION_ARGS)
+{
+ Datum array = PG_GETARG_DATUM(0);
+ bool use_line_feeds = PG_GETARG_BOOL(1);
+ bool ignore_nulls = PG_GETARG_BOOL(2);
+ StringInfo result;
+
+ result = makeStringInfo();
+
+ composite_to_json(array, result, use_line_feeds, ignore_nulls);
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index a84595e..0c7bd0d 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4199,6 +4199,8 @@ DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 1
DESCR("map row to json");
DATA(insert OID = 3156 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2249 16" _null_ _null_ _null_ _null_ row_to_json_pretty _null_ _null_ _null_ ));
DESCR("map row to json with optional pretty printing");
+DATA(insert OID = 3256 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 3 0 114 "2249 16 16" _null_ _null_ _null_ _null_ row_to_json_pretty_choosy _null_ _null_ _null_ ));
+DESCR("map row to json with optional pretty printing and skipping NULLs");
DATA(insert OID = 3173 ( json_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ json_agg_transfn _null_ _null_ _null_ ));
DESCR("json aggregate transition function");
DATA(insert OID = 3174 ( json_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 114 "2281" _null_ _null_ _null_ _null_ json_agg_finalfn _null_ _null_ _null_ ));
diff --git a/src/include/utils/json.h b/src/include/utils/json.h
index 82cc48b..6e43dec 100644
--- a/src/include/utils/json.h
+++ b/src/include/utils/json.h
@@ -26,6 +26,7 @@ extern Datum array_to_json(PG_FUNCTION_ARGS);
extern Datum array_to_json_pretty(PG_FUNCTION_ARGS);
extern Datum row_to_json(PG_FUNCTION_ARGS);
extern Datum row_to_json_pretty(PG_FUNCTION_ARGS);
+extern Datum row_to_json_pretty_choosy(PG_FUNCTION_ARGS);
extern Datum to_json(PG_FUNCTION_ARGS);
extern Datum json_agg_transfn(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index bb4d9ed..fd40117 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -403,6 +403,28 @@ SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),
{"f1":[5,6,7,8,9,10]}
(1 row)
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, false) FROM x;
+ row_to_json
+------------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10,"b":null,"c":null}
+ {"a":null,"b":null,"c":null}
+(3 rows)
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, true) FROM x;
+ row_to_json
+------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10}
+ {}
+(3 rows)
+
-- to_json, timestamps
select to_json(timestamp '2014-05-28 12:22:35.614298');
to_json
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index c980132..48c3e37 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -100,6 +100,16 @@ FROM rows q;
SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, false) FROM x;
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, true) FROM x;
+
-- to_json, timestamps
select to_json(timestamp '2014-05-28 12:22:35.614298');
Hi Pavel,
Patch does look good to me. And found no issues as such.
However here are my optional suggestions:
1. Frankly, I did not like name of the function "row_to_json_pretty_choosy".
Something like "row_to_json_pretty_ignore_nulls" seems better to me.
2. To use ignore nulls feature, I have to always pass pretty flag.
Which seems weired.
Since we do support named argument, can we avoid that?
No idea how much difficult it is. If we have a default arguments to this
function then we do not need one and two argument variations for this
function as well. And we can use named argument for omitting the required
one. Just a thought.
Rest looks good to me.
Thanks
--
Jeevan B Chalke
Principal Software Engineer, Product Development
EnterpriseDB Corporation
The Enterprise PostgreSQL Company
2014-09-01 12:33 GMT+02:00 Jeevan Chalke <jeevan.chalke@enterprisedb.com>:
Hi Pavel,
Patch does look good to me. And found no issues as such.
However here are my optional suggestions:
1. Frankly, I did not like name of the function
"row_to_json_pretty_choosy".
Something like "row_to_json_pretty_ignore_nulls" seems better to me.
should be - I have no better name
2. To use ignore nulls feature, I have to always pass pretty flag.
Which seems weired.Since we do support named argument, can we avoid that?
No idea how much difficult it is. If we have a default arguments to this
function then we do not need one and two argument variations for this
function as well. And we can use named argument for omitting the required
one. Just a thought.
it needs a redesign of original implementation, we should to change API to
use default values with named parameters
but it doesn't help too much (although it can be readable little bit more)
instead row_to_json(x, false, true)
be
row_ro_json(x, ignore_null := true)
it is not too much work, but I need a names for parameters
Regards
Pavel
Show quoted text
Rest looks good to me.
Thanks
--
Jeevan B Chalke
Principal Software Engineer, Product Development
EnterpriseDB Corporation
The Enterprise PostgreSQL Company
Hi Pavel,
it needs a redesign of original implementation, we should to change API to
use default values with named parameters
but it doesn't help too much (although it can be readable little bit more)
instead row_to_json(x, false, true)
be
row_ro_json(x, ignore_null := true)
it is not too much work, but I need a names for parameters
I have tried adding dummy names (a, b, c) in pg_proc entry you have added.
But that is not sufficient. We need to have default values provided to these
arguments to work row_ro_json(x, ignore_null := true) call.
It was not trivial. So I have not put much thought on that.
For name, I choose (row, pretty, ignore_nulls) or similar.
However it was my thought.
If it is too complex of not so useful then we can ignore it.
Thanks
--
Jeevan B Chalke
Principal Software Engineer, Product Development
EnterpriseDB Corporation
The Enterprise PostgreSQL Company
Hello
2014-09-02 13:54 GMT+02:00 Jeevan Chalke <jeevan.chalke@enterprisedb.com>:
Hi Pavel,
it needs a redesign of original implementation, we should to change API
to use default values with named parameters
but it doesn't help too much (although it can be readable little bit more)
instead row_to_json(x, false, true)
be
row_ro_json(x, ignore_null := true)
it is not too much work, but I need a names for parameters
I have tried adding dummy names (a, b, c) in pg_proc entry you have added.
But that is not sufficient. We need to have default values provided to
these
arguments to work row_ro_json(x, ignore_null := true) call.
It was not trivial. So I have not put much thought on that.For name, I choose (row, pretty, ignore_nulls) or similar.
However it was my thought.
If it is too complex of not so useful then we can ignore it.
here is patch
Regards
Pavel
Show quoted text
Thanks
--
Jeevan B Chalke
Principal Software Engineer, Product Development
EnterpriseDB Corporation
The Enterprise PostgreSQL Company
Attachments:
row_to_json_ignorenull-2.patchtext/x-patch; charset=US-ASCII; name=row_to_json_ignorenull-2.patchDownload
commit 5e9a1a2401d2abae3c100dbe3b4c8fdde4a73e48
Author: Pavel Stehule <pavel.stehule@gooddata.com>
Date: Sat Jun 28 17:40:47 2014 +0200
initial
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index 722640b..0d915c1 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -10294,11 +10294,12 @@ table2-mapping
</row>
<row>
<entry>
- <literal>row_to_json(record [, pretty_bool])</literal>
+ <literal>row_to_json(rowval record [, pretty bool [, ignore_nulls bool] ])</literal>
</entry>
<entry>
Returns the row as a JSON object. Line feeds will be added between
- level-1 elements if <parameter>pretty_bool</parameter> is true.
+ level-1 elements if <parameter>pretty_bool</parameter> is true. Ignore
+ NULL when <parameter>ignore_nulls</parameter> is true.
</entry>
<entry><literal>row_to_json(row(1,'foo'))</literal></entry>
<entry><literal>{"f1":1,"f2":"foo"}</literal></entry>
diff --git a/src/backend/catalog/system_views.sql b/src/backend/catalog/system_views.sql
index 1bde175..02cf965 100644
--- a/src/backend/catalog/system_views.sql
+++ b/src/backend/catalog/system_views.sql
@@ -867,3 +867,10 @@ RETURNS interval
LANGUAGE INTERNAL
STRICT IMMUTABLE
AS 'make_interval';
+
+CREATE OR REPLACE FUNCTION
+ row_to_json(rowval record, pretty boolean DEFAULT false, ignore_nulls boolean DEFAULT false)
+RETURNS json
+LANGUAGE INTERNAL
+STRICT STABLE
+AS 'row_to_json';
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index 494a028..8f5f8a8 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -79,7 +79,8 @@ static void report_invalid_token(JsonLexContext *lex);
static int report_json_context(JsonLexContext *lex);
static char *extract_mb_char(char *s);
static void composite_to_json(Datum composite, StringInfo result,
- bool use_line_feeds);
+ bool use_line_feeds,
+ bool ignore_nulls);
static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
Datum *vals, bool *nulls, int *valcount,
JsonTypeCategory tcategory, Oid outfuncoid,
@@ -1362,7 +1363,7 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
array_to_json_internal(val, result, false);
break;
case JSONTYPE_COMPOSITE:
- composite_to_json(val, result, false);
+ composite_to_json(val, result, false, false);
break;
case JSONTYPE_BOOL:
outputstr = DatumGetBool(val) ? "true" : "false";
@@ -1591,7 +1592,8 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
* Turn a composite / record into JSON.
*/
static void
-composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
+composite_to_json(Datum composite, StringInfo result, bool use_line_feeds,
+ bool ignore_nulls)
{
HeapTupleHeader td;
Oid tupType;
@@ -1630,6 +1632,12 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
if (tupdesc->attrs[i]->attisdropped)
continue;
+ val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+ /* Don't serialize NULL field when we don't want it */
+ if (isnull && ignore_nulls)
+ continue;
+
if (needsep)
appendStringInfoString(result, sep);
needsep = true;
@@ -1638,8 +1646,6 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
escape_json(result, attname);
appendStringInfoChar(result, ':');
- val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
-
if (isnull)
{
tcategory = JSONTYPE_NULL;
@@ -1721,34 +1727,19 @@ array_to_json_pretty(PG_FUNCTION_ARGS)
}
/*
- * SQL function row_to_json(row)
+ * SQL function row_to_json(row record, pretty bool, ignore_nulls bool)
*/
extern Datum
row_to_json(PG_FUNCTION_ARGS)
{
Datum array = PG_GETARG_DATUM(0);
- StringInfo result;
-
- result = makeStringInfo();
-
- composite_to_json(array, result, false);
-
- PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
-}
-
-/*
- * SQL function row_to_json(row, prettybool)
- */
-extern Datum
-row_to_json_pretty(PG_FUNCTION_ARGS)
-{
- Datum array = PG_GETARG_DATUM(0);
bool use_line_feeds = PG_GETARG_BOOL(1);
+ bool ignore_nulls = PG_GETARG_BOOL(2);
StringInfo result;
result = makeStringInfo();
- composite_to_json(array, result, use_line_feeds);
+ composite_to_json(array, result, use_line_feeds, ignore_nulls);
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index 5176ed0..c0d749a 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4203,10 +4203,8 @@ DATA(insert OID = 3153 ( array_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s
DESCR("map array to json");
DATA(insert OID = 3154 ( array_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2277 16" _null_ _null_ _null_ _null_ array_to_json_pretty _null_ _null_ _null_ ));
DESCR("map array to json with optional pretty printing");
-DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 114 "2249" _null_ _null_ _null_ _null_ row_to_json _null_ _null_ _null_ ));
+DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 114 "2249 16 16" _null_ _null_ "{rowval,pretty,ignore_nulls}" _null_ row_to_json _null_ _null_ _null_ ));
DESCR("map row to json");
-DATA(insert OID = 3156 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2249 16" _null_ _null_ _null_ _null_ row_to_json_pretty _null_ _null_ _null_ ));
-DESCR("map row to json with optional pretty printing");
DATA(insert OID = 3173 ( json_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ json_agg_transfn _null_ _null_ _null_ ));
DESCR("json aggregate transition function");
DATA(insert OID = 3174 ( json_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 114 "2281" _null_ _null_ _null_ _null_ json_agg_finalfn _null_ _null_ _null_ ));
diff --git a/src/include/utils/json.h b/src/include/utils/json.h
index 82cc48b..6e43dec 100644
--- a/src/include/utils/json.h
+++ b/src/include/utils/json.h
@@ -26,6 +26,7 @@ extern Datum array_to_json(PG_FUNCTION_ARGS);
extern Datum array_to_json_pretty(PG_FUNCTION_ARGS);
extern Datum row_to_json(PG_FUNCTION_ARGS);
extern Datum row_to_json_pretty(PG_FUNCTION_ARGS);
+extern Datum row_to_json_pretty_choosy(PG_FUNCTION_ARGS);
extern Datum to_json(PG_FUNCTION_ARGS);
extern Datum json_agg_transfn(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index bb4d9ed..fd40117 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -403,6 +403,28 @@ SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),
{"f1":[5,6,7,8,9,10]}
(1 row)
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, false) FROM x;
+ row_to_json
+------------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10,"b":null,"c":null}
+ {"a":null,"b":null,"c":null}
+(3 rows)
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, true) FROM x;
+ row_to_json
+------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10}
+ {}
+(3 rows)
+
-- to_json, timestamps
select to_json(timestamp '2014-05-28 12:22:35.614298');
to_json
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index c980132..48c3e37 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -100,6 +100,16 @@ FROM rows q;
SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, false) FROM x;
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, true) FROM x;
+
-- to_json, timestamps
select to_json(timestamp '2014-05-28 12:22:35.614298');
2014-09-03 7:05 GMT+02:00 Pavel Stehule <pavel.stehule@gmail.com>:
Hello
2014-09-02 13:54 GMT+02:00 Jeevan Chalke <jeevan.chalke@enterprisedb.com>:
Hi Pavel,
it needs a redesign of original implementation, we should to change API
to use default values with named parameters
but it doesn't help too much (although it can be readable little bit
more)instead row_to_json(x, false, true)
be
row_ro_json(x, ignore_null := true)
it is not too much work, but I need a names for parameters
I have tried adding dummy names (a, b, c) in pg_proc entry you have added.
But that is not sufficient. We need to have default values provided to
these
arguments to work row_ro_json(x, ignore_null := true) call.
It was not trivial. So I have not put much thought on that.For name, I choose (row, pretty, ignore_nulls) or similar.
I cannot use "row" because it is keyword - I used "rowval"
Regards
Pavel
Show quoted text
However it was my thought.
If it is too complex of not so useful then we can ignore it.here is patch
Regards
Pavel
Thanks
--
Jeevan B Chalke
Principal Software Engineer, Product Development
EnterpriseDB Corporation
The Enterprise PostgreSQL Company
Hi Pavel,
Here are few more comments on new implementation.
1.
/*
- * SQL function row_to_json(row)
+ * SQL function row_to_json(row record, pretty bool, ignore_nulls bool)
*/
In above comments, parameter name "row" should changed to "rowval".
2.
-DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f
t f s 1 0 114 "2249" _null_ _null_ _null_ _null_ row_to_json _null_ _null_
_null_ ));
+DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f
t f s 1 0 114 "2249 16 16" _null_ _null_ "{rowval,pretty,ignore_nulls}"
_null_ row_to_json _null_ _null_ _null_ ));
Number of arguments (pronargs) should be 3 now. However, when we create it
again with default values it gets updated. But still here we should not have
inconsistency.
3.
extern Datum row_to_json(PG_FUNCTION_ARGS);
extern Datum row_to_json_pretty(PG_FUNCTION_ARGS);
+extern Datum row_to_json_pretty_choosy(PG_FUNCTION_ARGS);
extern Datum to_json(PG_FUNCTION_ARGS);
With this new implementation, we have NOT added row_to_json_pretty_choosy()
function. So need to remove that added line. Also we have only one function
with default arguments and thus removed row_to_json_pretty() function as
well. Thus need to remove extern for that too.
4.
Can we have couple of test cases with named argument along with skipped
pretty parameter test?
Thanks
--
Jeevan B Chalke
Principal Software Engineer, Product Development
EnterpriseDB Corporation
The Enterprise PostgreSQL Company
Hi
2014-09-03 9:27 GMT+02:00 Jeevan Chalke <jeevan.chalke@enterprisedb.com>:
Hi Pavel,
Here are few more comments on new implementation.
1. /* - * SQL function row_to_json(row) + * SQL function row_to_json(row record, pretty bool, ignore_nulls bool) */In above comments, parameter name "row" should changed to "rowval".
2. -DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 114 "2249" _null_ _null_ _null_ _null_ row_to_json _null_ _null_ _null_ )); +DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 114 "2249 16 16" _null_ _null_ "{rowval,pretty,ignore_nulls}" _null_ row_to_json _null_ _null_ _null_ ));Number of arguments (pronargs) should be 3 now. However, when we create it
again with default values it gets updated. But still here we should not
have
inconsistency.3.
extern Datum row_to_json(PG_FUNCTION_ARGS);
extern Datum row_to_json_pretty(PG_FUNCTION_ARGS);
+extern Datum row_to_json_pretty_choosy(PG_FUNCTION_ARGS);
extern Datum to_json(PG_FUNCTION_ARGS);With this new implementation, we have NOT added row_to_json_pretty_choosy()
function. So need to remove that added line. Also we have only one function
with default arguments and thus removed row_to_json_pretty() function as
well. Thus need to remove extern for that too.4.
Can we have couple of test cases with named argument along with skipped
pretty parameter test?
done
Regards
Pavel
Show quoted text
Thanks
--
Jeevan B Chalke
Principal Software Engineer, Product Development
EnterpriseDB Corporation
The Enterprise PostgreSQL Company
Attachments:
help-variables-11.patchtext/x-patch; charset=US-ASCII; name=help-variables-11.patchDownload
commit 17a0708a0466cc8ff9e8debd0a7e9062eebe3a61
Author: Pavel Stehule <pavel.stehule@gooddata.com>
Date: Wed Aug 27 22:47:07 2014 +0200
access to help_variables and usage from psql via psql command
diff --git a/doc/src/sgml/ref/psql-ref.sgml b/doc/src/sgml/ref/psql-ref.sgml
index db314c3..9bb14e9 100644
--- a/doc/src/sgml/ref/psql-ref.sgml
+++ b/doc/src/sgml/ref/psql-ref.sgml
@@ -568,6 +568,15 @@ EOF
</listitem>
</varlistentry>
+ <varlistentry>
+ <term><option>--help-variables</></term>
+ <listitem>
+ <para>
+ Show help about <application>psql</application> variables,
+ and exit.
+ </para>
+ </listitem>
+ </varlistentry>
</variablelist>
</refsect1>
@@ -2572,10 +2581,12 @@ testdb=> <userinput>\setenv LESS -imx4F</userinput>
<varlistentry>
- <term><literal>\?</literal></term>
+ <term><literal>\? [ options | variables ]</literal></term>
<listitem>
<para>
- Shows help information about the backslash commands.
+ Shows help information about the backslash commands. This command can have a
+ option "variables" or "options" to take help for psql configuration variables
+ or psql command line options.
</para>
</listitem>
</varlistentry>
diff --git a/src/bin/psql/command.c b/src/bin/psql/command.c
index e16b4d5..987a79f 100644
--- a/src/bin/psql/command.c
+++ b/src/bin/psql/command.c
@@ -1503,7 +1503,19 @@ exec_command(const char *cmd,
/* \? -- slash command help */
else if (strcmp(cmd, "?") == 0)
- slashUsage(pset.popt.topt.pager);
+ {
+ char *opt0 = psql_scan_slash_option(scan_state,
+ OT_NORMAL, NULL, false);
+
+ if (!opt0)
+ slashUsage(pset.popt.topt.pager);
+ else if (strcmp(opt0, "variables") == 0)
+ help_variables(pset.popt.topt.pager);
+ else if (strcmp(opt0, "options") == 0)
+ usage(pset.popt.topt.pager);
+ else
+ slashUsage(pset.popt.topt.pager);
+ }
#if 0
diff --git a/src/bin/psql/help.c b/src/bin/psql/help.c
index f8f000f..4f29f2a 100644
--- a/src/bin/psql/help.c
+++ b/src/bin/psql/help.c
@@ -46,11 +46,12 @@
#define ON(var) (var ? _("on") : _("off"))
void
-usage(void)
+usage(unsigned short int pager)
{
const char *env;
const char *user;
char *errstr;
+ FILE *output;
/* Find default user, in case we need it. */
user = getenv("PGUSER");
@@ -64,77 +65,82 @@ usage(void)
}
}
- printf(_("psql is the PostgreSQL interactive terminal.\n\n"));
- printf(_("Usage:\n"));
- printf(_(" psql [OPTION]... [DBNAME [USERNAME]]\n\n"));
+ output = PageOutput(59, pager);
+
+ fprintf(output, _("psql is the PostgreSQL interactive terminal.\n\n"));
+ fprintf(output, _("Usage:\n"));
+ fprintf(output, _(" psql [OPTION]... [DBNAME [USERNAME]]\n\n"));
- printf(_("General options:\n"));
+ fprintf(output, _("General options:\n"));
/* Display default database */
env = getenv("PGDATABASE");
if (!env)
env = user;
- printf(_(" -c, --command=COMMAND run only single command (SQL or internal) and exit\n"));
- printf(_(" -d, --dbname=DBNAME database name to connect to (default: \"%s\")\n"), env);
- printf(_(" -f, --file=FILENAME execute commands from file, then exit\n"));
- printf(_(" -l, --list list available databases, then exit\n"));
- printf(_(" -v, --set=, --variable=NAME=VALUE\n"
- " set psql variable NAME to VALUE\n"));
- printf(_(" -V, --version output version information, then exit\n"));
- printf(_(" -X, --no-psqlrc do not read startup file (~/.psqlrc)\n"));
- printf(_(" -1 (\"one\"), --single-transaction\n"
+ fprintf(output, _(" -c, --command=COMMAND run only single command (SQL or internal) and exit\n"));
+ fprintf(output, _(" -d, --dbname=DBNAME database name to connect to (default: \"%s\")\n"), env);
+ fprintf(output, _(" -f, --file=FILENAME execute commands from file, then exit\n"));
+ fprintf(output, _(" -l, --list list available databases, then exit\n"));
+ fprintf(output, _(" -v, --set=, --variable=NAME=VALUE\n"
+ " set psql variable NAME to VALUE e.g.: -v ON_ERROR_STOP=1\n"));
+ fprintf(output, _(" -V, --version output version information, then exit\n"));
+ fprintf(output, _(" -X, --no-psqlrc do not read startup file (~/.psqlrc)\n"));
+ fprintf(output, _(" -1 (\"one\"), --single-transaction\n"
" execute as a single transaction (if non-interactive)\n"));
- printf(_(" -?, --help show this help, then exit\n"));
-
- printf(_("\nInput and output options:\n"));
- printf(_(" -a, --echo-all echo all input from script\n"));
- printf(_(" -b, --echo-errors echo failed commands\n"));
- printf(_(" -e, --echo-queries echo commands sent to server\n"));
- printf(_(" -E, --echo-hidden display queries that internal commands generate\n"));
- printf(_(" -L, --log-file=FILENAME send session log to file\n"));
- printf(_(" -n, --no-readline disable enhanced command line editing (readline)\n"));
- printf(_(" -o, --output=FILENAME send query results to file (or |pipe)\n"));
- printf(_(" -q, --quiet run quietly (no messages, only query output)\n"));
- printf(_(" -s, --single-step single-step mode (confirm each query)\n"));
- printf(_(" -S, --single-line single-line mode (end of line terminates SQL command)\n"));
-
- printf(_("\nOutput format options:\n"));
- printf(_(" -A, --no-align unaligned table output mode\n"));
- printf(_(" -F, --field-separator=STRING\n"
+ fprintf(output, _(" -?, --help show this help, then exit\n"));
+ fprintf(output, _(" --help-variables show a list of all specially treated variables, then exit\n"));
+
+ fprintf(output, _("\nInput and output options:\n"));
+ fprintf(output, _(" -a, --echo-all echo all input from script\n"));
+ fprintf(output, _(" -b, --echo-errors echo failed commands\n"));
+ fprintf(output, _(" -e, --echo-queries echo commands sent to server\n"));
+ fprintf(output, _(" -E, --echo-hidden display queries that internal commands generate\n"));
+ fprintf(output, _(" -L, --log-file=FILENAME send session log to file\n"));
+ fprintf(output, _(" -n, --no-readline disable enhanced command line editing (readline)\n"));
+ fprintf(output, _(" -o, --output=FILENAME send query results to file (or |pipe)\n"));
+ fprintf(output, _(" -q, --quiet run quietly (no messages, only query output)\n"));
+ fprintf(output, _(" -s, --single-step single-step mode (confirm each query)\n"));
+ fprintf(output, _(" -S, --single-line single-line mode (end of line terminates SQL command)\n"));
+
+ fprintf(output, _("\nOutput format options:\n"));
+ fprintf(output, _(" -A, --no-align unaligned table output mode\n"));
+ fprintf(output, _(" -F, --field-separator=STRING\n"
" field separator for unaligned output (default: \"%s\")\n"),
DEFAULT_FIELD_SEP);
- printf(_(" -H, --html HTML table output mode\n"));
- printf(_(" -P, --pset=VAR[=ARG] set printing option VAR to ARG (see \\pset command)\n"));
- printf(_(" -R, --record-separator=STRING\n"
+ fprintf(output, _(" -H, --html HTML table output mode\n"));
+ fprintf(output, _(" -P, --pset=VAR[=ARG] set printing option VAR to ARG (see \\pset command)\n"));
+ fprintf(output, _(" -R, --record-separator=STRING\n"
" record separator for unaligned output (default: newline)\n"));
- printf(_(" -t, --tuples-only print rows only\n"));
- printf(_(" -T, --table-attr=TEXT set HTML table tag attributes (e.g., width, border)\n"));
- printf(_(" -x, --expanded turn on expanded table output\n"));
- printf(_(" -z, --field-separator-zero\n"
+ fprintf(output, _(" -t, --tuples-only print rows only\n"));
+ fprintf(output, _(" -T, --table-attr=TEXT set HTML table tag attributes (e.g., width, border)\n"));
+ fprintf(output, _(" -x, --expanded turn on expanded table output\n"));
+ fprintf(output, _(" -z, --field-separator-zero\n"
" set field separator for unaligned output to zero byte\n"));
- printf(_(" -0, --record-separator-zero\n"
+ fprintf(output, _(" -0, --record-separator-zero\n"
" set record separator for unaligned output to zero byte\n"));
- printf(_("\nConnection options:\n"));
+ fprintf(output, _("\nConnection options:\n"));
/* Display default host */
env = getenv("PGHOST");
- printf(_(" -h, --host=HOSTNAME database server host or socket directory (default: \"%s\")\n"),
+ fprintf(output, _(" -h, --host=HOSTNAME database server host or socket directory (default: \"%s\")\n"),
env ? env : _("local socket"));
/* Display default port */
env = getenv("PGPORT");
- printf(_(" -p, --port=PORT database server port (default: \"%s\")\n"),
+ fprintf(output, _(" -p, --port=PORT database server port (default: \"%s\")\n"),
env ? env : DEF_PGPORT_STR);
/* Display default user */
env = getenv("PGUSER");
if (!env)
env = user;
- printf(_(" -U, --username=USERNAME database user name (default: \"%s\")\n"), env);
- printf(_(" -w, --no-password never prompt for password\n"));
- printf(_(" -W, --password force password prompt (should happen automatically)\n"));
+ fprintf(output, _(" -U, --username=USERNAME database user name (default: \"%s\")\n"), env);
+ fprintf(output, _(" -w, --no-password never prompt for password\n"));
+ fprintf(output, _(" -W, --password force password prompt (should happen automatically)\n"));
- printf(_("\nFor more information, type \"\\?\" (for internal commands) or \"\\help\" (for SQL\n"
+ fprintf(output, _("\nFor more information, type \"\\?\" (for internal commands) or \"\\help\" (for SQL\n"
"commands) from within psql, or consult the psql section in the PostgreSQL\n"
"documentation.\n\n"));
- printf(_("Report bugs to <pgsql-bugs@postgresql.org>.\n"));
+ fprintf(output, _("Report bugs to <pgsql-bugs@postgresql.org>.\n"));
+
+ ClosePager(output);
}
@@ -159,11 +165,18 @@ slashUsage(unsigned short int pager)
fprintf(output, _(" \\copyright show PostgreSQL usage and distribution terms\n"));
fprintf(output, _(" \\g [FILE] or ; execute query (and send results to file or |pipe)\n"));
fprintf(output, _(" \\gset [PREFIX] execute query and store results in psql variables\n"));
- fprintf(output, _(" \\h [NAME] help on syntax of SQL commands, * for all commands\n"));
fprintf(output, _(" \\q quit psql\n"));
fprintf(output, _(" \\watch [SEC] execute query every SEC seconds\n"));
fprintf(output, "\n");
+ fprintf(output, _("Help\n"));
+
+ fprintf(output, _(" \\? description of all psql commands\n"));
+ fprintf(output, _(" \\? options description of psql options\n"));
+ fprintf(output, _(" \\? variables description of all psql configuration variables\n"));
+ fprintf(output, _(" \\h [NAME] help on syntax of SQL commands, * for all commands\n"));
+ fprintf(output, "\n");
+
fprintf(output, _("Query Buffer\n"));
fprintf(output, _(" \\e [FILE] [LINE] edit the query buffer (or file) with external editor\n"));
fprintf(output, _(" \\ef [FUNCNAME [LINE]] edit function definition with external editor\n"));
@@ -280,6 +293,108 @@ slashUsage(unsigned short int pager)
}
+/*
+ * show list of available variables (options) from command line
+ */
+void
+help_variables(unsigned short int pager)
+{
+ FILE *output;
+
+ output = PageOutput(86, pager);
+
+ fprintf(output, _("List of specially treated variables.\n"));
+
+ fprintf(output, _("psql variables:\n"));
+ fprintf(output, _("Usage:\n"));
+ fprintf(output, _(" psql --set=NAME=VALUE\n or \\set NAME VALUE in interactive mode\n\n"));
+
+ fprintf(output, _(" AUTOCOMMIT if set, successful SQL commands are automatically committed\n"));
+ fprintf(output, _(" COMP_KEYWORD_CASE determine the case used to complete SQL keywords\n"
+ " [lower, upper, preserve-lower, preserve-upper]\n"));
+ fprintf(output, _(" DBNAME the currently connected database name\n"));
+ fprintf(output, _(" ECHO control what input is written to standard output\n"
+ " [all, errors, none, queries]\n"));
+ fprintf(output, _(" ECHO_HIDDEN display internal queries executed by backslash commands when it is set\n"
+ " or with [noexec] just show without execution\n"));
+ fprintf(output, _(" ENCODING current client character set encoding\n"));
+ fprintf(output, _(" FETCH_COUNT the number of result rows to fetch and display at a time\n"
+ " (default: 0=unlimited)\n"));
+ fprintf(output, _(" HISTCONTROL control history list [ignorespace, ignoredups, ignoreboth]\n"));
+ fprintf(output, _(" HISTFILE file name used to store the history list\n"));
+ fprintf(output, _(" HISTSIZE the number of commands to store in the command history\n"));
+ fprintf(output, _(" HOST the currently connected database server\n"));
+ fprintf(output, _(" IGNOREEOF if unset, sending an EOF to interactive session terminates application\n"));
+ fprintf(output, _(" LASTOID the value of last affected OID\n"));
+ fprintf(output, _(" ON_ERROR_ROLLBACK if set, an error doesn't stop a transaction (uses implicit SAVEPOINTs)\n"));
+ fprintf(output, _(" ON_ERROR_STOP stop batch execution after error\n"));
+ fprintf(output, _(" PORT server port of the current connection\n"));
+ fprintf(output, _(" PROMPT1 specify standard the psql prompt\n"));
+ fprintf(output, _(" PROMPT2 specify the psql prompt when statement continues from previous line\n"));
+ fprintf(output, _(" PROMPT3 specify the psql prompt used by COPY statement\n"));
+ fprintf(output, _(" QUIET run quietly (same as -q option)\n"));
+ fprintf(output, _(" SINGLELINE end of line terminates SQL command mode (same as -S option)\n"));
+ fprintf(output, _(" SINGLESTEP single-step mode (same as -s option)\n"));
+ fprintf(output, _(" USER the currently connected database user\n"));
+ fprintf(output, _(" VERBOSITY control verbosity of error reports [default, verbose, terse]\n"));
+
+ fprintf(output, _("\nPrinting options:\n"));
+ fprintf(output, _("Usage:\n"));
+ fprintf(output, _(" psql --pset=NAME[=VALUE]\n or \\pset NAME [VALUE] in interactive mode\n\n"));
+
+ fprintf(output, _(" border border style (number)\n"));
+ fprintf(output, _(" columns set the target width for the wrapped format\n"));
+ fprintf(output, _(" expanded (or x) toggle expanded output\n"));
+ fprintf(output, _(" fieldsep field separator for unaligned output (default '|')\n"));
+ fprintf(output, _(" fieldsep_zero set field separator in unaligned mode to zero\n"));
+ fprintf(output, _(" format set output format [unaligned, aligned, wrapped, html, latex, ..]\n"));
+ fprintf(output, _(" footer enable or disable display of the table footer [on, off]\n"));
+ fprintf(output, _(" linestyle set the border line drawing style [ascii, old-ascii, unicode]\n"));
+ fprintf(output, _(" null set the string to be printed in place of a null value\n"));
+ fprintf(output, _(" numericlocale enable or disable display of a locale-specific character to separate\n"
+ " groups of digits [on, off]\n"));
+ fprintf(output, _(" pager control when an external pager is used [yes, no, always]\n"));
+ fprintf(output, _(" recordsep specify the record (line) separator to use in unaligned output format\n"));
+ fprintf(output, _(" recordsep_zero set the record separator to use in unaligned output format to a zero byte.\n"));
+ fprintf(output, _(" tableattr (or T) specify attributes for table tag in html format or proportional\n"
+ " column width of left aligned data type in latex format\n"));
+ fprintf(output, _(" title set the table title for any subsequently printed tables\n"));
+ fprintf(output, _(" tuples_only if set, only actual table data is shown\n"));
+
+ fprintf(output, _("\nEnvironment variables:\n"));
+ fprintf(output, _("Usage:\n"));
+
+#ifndef WIN32
+ fprintf(output, _(" NAME=VALUE [NAME=VALUE] psql ...\n or \\setenv NAME [VALUE] in interactive mode\n\n"));
+#else
+ fprintf(output, _(" set NAME=VALUE\n psql ...\n or \\setenv NAME VALUE in interactive mode\n\n"));
+#endif
+
+ fprintf(output, _(" COLUMNS number of columns for wrapped format\n"));
+ fprintf(output, _(" PAGER name of external pager program\n"));
+ fprintf(output, _(" PGAPPNAME same as the application_name connection parameter\n"));
+ fprintf(output, _(" PGDATABASE same as the dbname connection parameter\n"));
+ fprintf(output, _(" PGHOST same as the host connection parameter\n"));
+ fprintf(output, _(" PGPORT same as the port connection parameter\n"));
+ fprintf(output, _(" PGUSER same as the user connection parameter\n"));
+ fprintf(output, _(" PGPASSWORD connection password (not recommended)\n"));
+ fprintf(output, _(" PGPASSFILE password file name\n"));
+ fprintf(output, _(" PSQL_EDITOR, EDITOR, VISUAL\n"
+ " editor used by the \\e and \\ef commands\n"));
+ fprintf(output, _(" PSQL_EDITOR_LINENUMBER_ARG\n"
+ " how to specify a line number when invoking the editor\n"));
+ fprintf(output, _(" PSQL_HISTORY alternative location for the command history file\n"));
+ fprintf(output, _(" PSQLRC alternative location for the user's .psqlrc file\n"));
+ fprintf(output, _(" SHELL shell used by the \\! command\n"));
+ fprintf(output, _(" TMPDIR directory for temporary files\n"));
+
+ fprintf(output, _("\nFor more information consult the psql section in the PostgreSQL\n"
+ "documentation.\n\n"));
+ fprintf(output, _("Report bugs to <pgsql-bugs@postgresql.org>.\n"));
+
+ ClosePager(output);
+}
+
/*
* helpSQL -- help with SQL commands
diff --git a/src/bin/psql/help.h b/src/bin/psql/help.h
index 054d5a4..bab360d 100644
--- a/src/bin/psql/help.h
+++ b/src/bin/psql/help.h
@@ -8,10 +8,12 @@
#ifndef HELP_H
#define HELP_H
-void usage(void);
+void usage(unsigned short int pager);
void slashUsage(unsigned short int pager);
+void help_variables(unsigned short int pager);
+
void helpSQL(const char *topic, unsigned short int pager);
void print_copyright(void);
diff --git a/src/bin/psql/startup.c b/src/bin/psql/startup.c
index 5a397e8..af68e13 100644
--- a/src/bin/psql/startup.c
+++ b/src/bin/psql/startup.c
@@ -77,6 +77,8 @@ static void process_psqlrc_file(char *filename);
static void showVersion(void);
static void EstablishVariableSpace(void);
+#define NOPAGER 0
+
/*
*
* main
@@ -97,7 +99,7 @@ main(int argc, char *argv[])
{
if (strcmp(argv[1], "--help") == 0 || strcmp(argv[1], "-?") == 0)
{
- usage();
+ usage(NOPAGER);
exit(EXIT_SUCCESS);
}
if (strcmp(argv[1], "--version") == 0 || strcmp(argv[1], "-V") == 0)
@@ -384,6 +386,7 @@ parse_psql_options(int argc, char *argv[], struct adhoc_opts * options)
{"expanded", no_argument, NULL, 'x'},
{"no-psqlrc", no_argument, NULL, 'X'},
{"help", no_argument, NULL, '?'},
+ {"help-variables", no_argument, NULL, 1},
{NULL, 0, NULL, 0}
};
@@ -559,7 +562,7 @@ parse_psql_options(int argc, char *argv[], struct adhoc_opts * options)
/* Actual help option given */
if (strcmp(argv[optind - 1], "--help") == 0 || strcmp(argv[optind - 1], "-?") == 0)
{
- usage();
+ usage(NOPAGER);
exit(EXIT_SUCCESS);
}
/* unknown option reported by getopt */
@@ -570,6 +573,9 @@ parse_psql_options(int argc, char *argv[], struct adhoc_opts * options)
exit(EXIT_FAILURE);
}
break;
+ case 1:
+ help_variables(NOPAGER);
+ exit(EXIT_SUCCESS);
default:
fprintf(stderr, _("Try \"%s --help\" for more information.\n"),
pset.progname);
diff --git a/src/bin/psql/tab-complete.c b/src/bin/psql/tab-complete.c
index 8288b41..ec6182b 100644
--- a/src/bin/psql/tab-complete.c
+++ b/src/bin/psql/tab-complete.c
@@ -3524,6 +3524,13 @@ psql_completion(const char *text, int start, int end)
/* Backslash commands */
/* TODO: \dc \dd \dl */
+ else if (strcmp(prev_wd, "\\?") == 0)
+ {
+ static const char *const my_list[] =
+ {"options", "variables", NULL};
+
+ COMPLETE_WITH_LIST_CS(my_list);
+ }
else if (strcmp(prev_wd, "\\connect") == 0 || strcmp(prev_wd, "\\c") == 0)
COMPLETE_WITH_QUERY(Query_for_list_of_databases);
Hi Pavel,
You have attached wrong patch.
Thanks
--
Jeevan B Chalke
Principal Software Engineer, Product Development
EnterpriseDB Corporation
The Enterprise PostgreSQL Company
I am sory
too much patches
Regards
Pavel
2014-09-04 7:35 GMT+02:00 Jeevan Chalke <jeevan.chalke@enterprisedb.com>:
Show quoted text
Hi Pavel,
You have attached wrong patch.
Thanks
--
Jeevan B Chalke
Principal Software Engineer, Product Development
EnterpriseDB Corporation
The Enterprise PostgreSQL Company
Attachments:
row_to_json_ignorenull-3.patchtext/x-patch; charset=US-ASCII; name=row_to_json_ignorenull-3.patchDownload
commit eb67c8d3e5e443d9cad1ef08fe2b4747eac933d9
Author: Pavel Stehule <pavel.stehule@gooddata.com>
Date: Sat Jun 28 17:40:47 2014 +0200
as function with default parameters
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index 722640b..0d915c1 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -10294,11 +10294,12 @@ table2-mapping
</row>
<row>
<entry>
- <literal>row_to_json(record [, pretty_bool])</literal>
+ <literal>row_to_json(rowval record [, pretty bool [, ignore_nulls bool] ])</literal>
</entry>
<entry>
Returns the row as a JSON object. Line feeds will be added between
- level-1 elements if <parameter>pretty_bool</parameter> is true.
+ level-1 elements if <parameter>pretty_bool</parameter> is true. Ignore
+ NULL when <parameter>ignore_nulls</parameter> is true.
</entry>
<entry><literal>row_to_json(row(1,'foo'))</literal></entry>
<entry><literal>{"f1":1,"f2":"foo"}</literal></entry>
diff --git a/src/backend/catalog/system_views.sql b/src/backend/catalog/system_views.sql
index 1bde175..02cf965 100644
--- a/src/backend/catalog/system_views.sql
+++ b/src/backend/catalog/system_views.sql
@@ -867,3 +867,10 @@ RETURNS interval
LANGUAGE INTERNAL
STRICT IMMUTABLE
AS 'make_interval';
+
+CREATE OR REPLACE FUNCTION
+ row_to_json(rowval record, pretty boolean DEFAULT false, ignore_nulls boolean DEFAULT false)
+RETURNS json
+LANGUAGE INTERNAL
+STRICT STABLE
+AS 'row_to_json';
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index 494a028..9f445ff 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -79,7 +79,8 @@ static void report_invalid_token(JsonLexContext *lex);
static int report_json_context(JsonLexContext *lex);
static char *extract_mb_char(char *s);
static void composite_to_json(Datum composite, StringInfo result,
- bool use_line_feeds);
+ bool use_line_feeds,
+ bool ignore_nulls);
static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
Datum *vals, bool *nulls, int *valcount,
JsonTypeCategory tcategory, Oid outfuncoid,
@@ -1362,7 +1363,7 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
array_to_json_internal(val, result, false);
break;
case JSONTYPE_COMPOSITE:
- composite_to_json(val, result, false);
+ composite_to_json(val, result, false, false);
break;
case JSONTYPE_BOOL:
outputstr = DatumGetBool(val) ? "true" : "false";
@@ -1591,7 +1592,8 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
* Turn a composite / record into JSON.
*/
static void
-composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
+composite_to_json(Datum composite, StringInfo result, bool use_line_feeds,
+ bool ignore_nulls)
{
HeapTupleHeader td;
Oid tupType;
@@ -1630,6 +1632,12 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
if (tupdesc->attrs[i]->attisdropped)
continue;
+ val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+ /* Don't serialize NULL field when we don't want it */
+ if (isnull && ignore_nulls)
+ continue;
+
if (needsep)
appendStringInfoString(result, sep);
needsep = true;
@@ -1638,8 +1646,6 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
escape_json(result, attname);
appendStringInfoChar(result, ':');
- val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
-
if (isnull)
{
tcategory = JSONTYPE_NULL;
@@ -1721,34 +1727,19 @@ array_to_json_pretty(PG_FUNCTION_ARGS)
}
/*
- * SQL function row_to_json(row)
+ * SQL function row_to_json(rowval record, pretty bool, ignore_nulls bool)
*/
extern Datum
row_to_json(PG_FUNCTION_ARGS)
{
Datum array = PG_GETARG_DATUM(0);
- StringInfo result;
-
- result = makeStringInfo();
-
- composite_to_json(array, result, false);
-
- PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
-}
-
-/*
- * SQL function row_to_json(row, prettybool)
- */
-extern Datum
-row_to_json_pretty(PG_FUNCTION_ARGS)
-{
- Datum array = PG_GETARG_DATUM(0);
bool use_line_feeds = PG_GETARG_BOOL(1);
+ bool ignore_nulls = PG_GETARG_BOOL(2);
StringInfo result;
result = makeStringInfo();
- composite_to_json(array, result, use_line_feeds);
+ composite_to_json(array, result, use_line_feeds, ignore_nulls);
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index 5176ed0..5aeadc3 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4203,10 +4203,8 @@ DATA(insert OID = 3153 ( array_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s
DESCR("map array to json");
DATA(insert OID = 3154 ( array_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2277 16" _null_ _null_ _null_ _null_ array_to_json_pretty _null_ _null_ _null_ ));
DESCR("map array to json with optional pretty printing");
-DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 114 "2249" _null_ _null_ _null_ _null_ row_to_json _null_ _null_ _null_ ));
+DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 3 0 114 "2249 16 16" _null_ _null_ "{rowval,pretty,ignore_nulls}" _null_ row_to_json _null_ _null_ _null_ ));
DESCR("map row to json");
-DATA(insert OID = 3156 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2249 16" _null_ _null_ _null_ _null_ row_to_json_pretty _null_ _null_ _null_ ));
-DESCR("map row to json with optional pretty printing");
DATA(insert OID = 3173 ( json_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ json_agg_transfn _null_ _null_ _null_ ));
DESCR("json aggregate transition function");
DATA(insert OID = 3174 ( json_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 114 "2281" _null_ _null_ _null_ _null_ json_agg_finalfn _null_ _null_ _null_ ));
diff --git a/src/include/utils/json.h b/src/include/utils/json.h
index 82cc48b..d54aafb 100644
--- a/src/include/utils/json.h
+++ b/src/include/utils/json.h
@@ -25,7 +25,6 @@ extern Datum json_send(PG_FUNCTION_ARGS);
extern Datum array_to_json(PG_FUNCTION_ARGS);
extern Datum array_to_json_pretty(PG_FUNCTION_ARGS);
extern Datum row_to_json(PG_FUNCTION_ARGS);
-extern Datum row_to_json_pretty(PG_FUNCTION_ARGS);
extern Datum to_json(PG_FUNCTION_ARGS);
extern Datum json_agg_transfn(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index bb4d9ed..ecc29da 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -397,12 +397,70 @@ FROM rows q;
"y":"txt3"}
(3 rows)
+SELECT row_to_json(q,pretty := true)
+FROM rows q;
+ row_to_json
+--------------
+ {"x":1, +
+ "y":"txt1"}
+ {"x":2, +
+ "y":"txt2"}
+ {"x":3, +
+ "y":"txt3"}
+(3 rows)
+
SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
row_to_json
-----------------------
{"f1":[5,6,7,8,9,10]}
(1 row)
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, false) FROM x;
+ row_to_json
+------------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10,"b":null,"c":null}
+ {"a":null,"b":null,"c":null}
+(3 rows)
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, true) FROM x;
+ row_to_json
+------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10}
+ {}
+(3 rows)
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, ignore_nulls := true) FROM x;
+ row_to_json
+------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10}
+ {}
+(3 rows)
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, ignore_nulls := true, pretty := true) FROM x;
+ row_to_json
+-------------
+ {"a":10, +
+ "b":20, +
+ "c":30}
+ {"a":10}
+ {}
+(3 rows)
+
-- to_json, timestamps
select to_json(timestamp '2014-05-28 12:22:35.614298');
to_json
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index c980132..1cd20d4 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -98,8 +98,32 @@ FROM generate_series(1,3) AS x;
SELECT row_to_json(q,true)
FROM rows q;
+SELECT row_to_json(q,pretty := true)
+FROM rows q;
+
SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, false) FROM x;
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, true) FROM x;
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, ignore_nulls := true) FROM x;
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, ignore_nulls := true, pretty := true) FROM x;
+
+
-- to_json, timestamps
select to_json(timestamp '2014-05-28 12:22:35.614298');
On Thu, Sep 4, 2014 at 11:41 AM, Pavel Stehule <pavel.stehule@gmail.com>
wrote:
I am sory
too much patches
:)
Patch looks good to me.
Marking "Ready for Committer".
Thanks
Regards
Pavel
--
Jeevan B Chalke
Principal Software Engineer, Product Development
EnterpriseDB Corporation
The Enterprise PostgreSQL Company
Thank you
Regards
Pavel
2014-09-05 8:29 GMT+02:00 Jeevan Chalke <jeevan.chalke@enterprisedb.com>:
Show quoted text
On Thu, Sep 4, 2014 at 11:41 AM, Pavel Stehule <pavel.stehule@gmail.com>
wrote:I am sory
too much patches
:)
Patch looks good to me.
Marking "Ready for Committer".Thanks
Regards
Pavel
--
Jeevan B Chalke
Principal Software Engineer, Product Development
EnterpriseDB Corporation
The Enterprise PostgreSQL Company
Pavel, All,
* Pavel Stehule (pavel.stehule@gmail.com) wrote:
Thank you
I made a few minor adjustments, but the bigger issue (in my view) is
what to do about array_to_json_pretty- seems like we should do the same
there, no? Perhaps you could propose a new patch which incorporates my
minor changes and also removes array_to_json_pretty and makes
array_to_json take an optional argument?
Thoughts?
Thanks,
Stephen
Attachments:
row_to_json_ignorenull-4.patchtext/x-diff; charset=us-asciiDownload
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index e50408c..c6c44a9 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -10294,11 +10294,13 @@ table2-mapping
</row>
<row>
<entry>
- <literal>row_to_json(record [, pretty_bool])</literal>
+ <literal>row_to_json(rowval record [, pretty bool [, ignore_nulls bool] ])</literal>
</entry>
<entry>
Returns the row as a JSON object. Line feeds will be added between
- level-1 elements if <parameter>pretty_bool</parameter> is true.
+ level-1 elements if <parameter>pretty_bool</parameter> is true. Elements
+ with NULL values will be skipped when <parameter>ignore_nulls</parameter>
+ is true.
</entry>
<entry><literal>row_to_json(row(1,'foo'))</literal></entry>
<entry><literal>{"f1":1,"f2":"foo"}</literal></entry>
diff --git a/src/backend/catalog/system_views.sql b/src/backend/catalog/system_views.sql
index 1bde175..02cf965 100644
--- a/src/backend/catalog/system_views.sql
+++ b/src/backend/catalog/system_views.sql
@@ -867,3 +867,10 @@ RETURNS interval
LANGUAGE INTERNAL
STRICT IMMUTABLE
AS 'make_interval';
+
+CREATE OR REPLACE FUNCTION
+ row_to_json(rowval record, pretty boolean DEFAULT false, ignore_nulls boolean DEFAULT false)
+RETURNS json
+LANGUAGE INTERNAL
+STRICT STABLE
+AS 'row_to_json';
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index 494a028..ae6028e 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -79,7 +79,8 @@ static void report_invalid_token(JsonLexContext *lex);
static int report_json_context(JsonLexContext *lex);
static char *extract_mb_char(char *s);
static void composite_to_json(Datum composite, StringInfo result,
- bool use_line_feeds);
+ bool use_line_feeds,
+ bool ignore_nulls);
static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
Datum *vals, bool *nulls, int *valcount,
JsonTypeCategory tcategory, Oid outfuncoid,
@@ -1362,7 +1363,7 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
array_to_json_internal(val, result, false);
break;
case JSONTYPE_COMPOSITE:
- composite_to_json(val, result, false);
+ composite_to_json(val, result, false, false);
break;
case JSONTYPE_BOOL:
outputstr = DatumGetBool(val) ? "true" : "false";
@@ -1591,7 +1592,8 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
* Turn a composite / record into JSON.
*/
static void
-composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
+composite_to_json(Datum composite, StringInfo result, bool use_line_feeds,
+ bool ignore_nulls)
{
HeapTupleHeader td;
Oid tupType;
@@ -1630,6 +1632,12 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
if (tupdesc->attrs[i]->attisdropped)
continue;
+ val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+ /* Don't serialize NULL field when we don't want it */
+ if (isnull && ignore_nulls)
+ continue;
+
if (needsep)
appendStringInfoString(result, sep);
needsep = true;
@@ -1638,8 +1646,6 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
escape_json(result, attname);
appendStringInfoChar(result, ':');
- val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
-
if (isnull)
{
tcategory = JSONTYPE_NULL;
@@ -1721,34 +1727,19 @@ array_to_json_pretty(PG_FUNCTION_ARGS)
}
/*
- * SQL function row_to_json(row)
+ * SQL function row_to_json(rowval record, pretty bool, ignore_nulls bool)
*/
extern Datum
row_to_json(PG_FUNCTION_ARGS)
{
Datum array = PG_GETARG_DATUM(0);
- StringInfo result;
-
- result = makeStringInfo();
-
- composite_to_json(array, result, false);
-
- PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
-}
-
-/*
- * SQL function row_to_json(row, prettybool)
- */
-extern Datum
-row_to_json_pretty(PG_FUNCTION_ARGS)
-{
- Datum array = PG_GETARG_DATUM(0);
bool use_line_feeds = PG_GETARG_BOOL(1);
+ bool ignore_nulls = PG_GETARG_BOOL(2);
StringInfo result;
result = makeStringInfo();
- composite_to_json(array, result, use_line_feeds);
+ composite_to_json(array, result, use_line_feeds, ignore_nulls);
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index 5176ed0..5aeadc3 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4203,10 +4203,8 @@ DATA(insert OID = 3153 ( array_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s
DESCR("map array to json");
DATA(insert OID = 3154 ( array_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2277 16" _null_ _null_ _null_ _null_ array_to_json_pretty _null_ _null_ _null_ ));
DESCR("map array to json with optional pretty printing");
-DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 114 "2249" _null_ _null_ _null_ _null_ row_to_json _null_ _null_ _null_ ));
+DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 3 0 114 "2249 16 16" _null_ _null_ "{rowval,pretty,ignore_nulls}" _null_ row_to_json _null_ _null_ _null_ ));
DESCR("map row to json");
-DATA(insert OID = 3156 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2249 16" _null_ _null_ _null_ _null_ row_to_json_pretty _null_ _null_ _null_ ));
-DESCR("map row to json with optional pretty printing");
DATA(insert OID = 3173 ( json_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ json_agg_transfn _null_ _null_ _null_ ));
DESCR("json aggregate transition function");
DATA(insert OID = 3174 ( json_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 114 "2281" _null_ _null_ _null_ _null_ json_agg_finalfn _null_ _null_ _null_ ));
diff --git a/src/include/utils/json.h b/src/include/utils/json.h
index 82cc48b..d54aafb 100644
--- a/src/include/utils/json.h
+++ b/src/include/utils/json.h
@@ -25,7 +25,6 @@ extern Datum json_send(PG_FUNCTION_ARGS);
extern Datum array_to_json(PG_FUNCTION_ARGS);
extern Datum array_to_json_pretty(PG_FUNCTION_ARGS);
extern Datum row_to_json(PG_FUNCTION_ARGS);
-extern Datum row_to_json_pretty(PG_FUNCTION_ARGS);
extern Datum to_json(PG_FUNCTION_ARGS);
extern Datum json_agg_transfn(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index bb4d9ed..ecc29da 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -397,12 +397,70 @@ FROM rows q;
"y":"txt3"}
(3 rows)
+SELECT row_to_json(q,pretty := true)
+FROM rows q;
+ row_to_json
+--------------
+ {"x":1, +
+ "y":"txt1"}
+ {"x":2, +
+ "y":"txt2"}
+ {"x":3, +
+ "y":"txt3"}
+(3 rows)
+
SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
row_to_json
-----------------------
{"f1":[5,6,7,8,9,10]}
(1 row)
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, false) FROM x;
+ row_to_json
+------------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10,"b":null,"c":null}
+ {"a":null,"b":null,"c":null}
+(3 rows)
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, true) FROM x;
+ row_to_json
+------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10}
+ {}
+(3 rows)
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, ignore_nulls := true) FROM x;
+ row_to_json
+------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10}
+ {}
+(3 rows)
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, ignore_nulls := true, pretty := true) FROM x;
+ row_to_json
+-------------
+ {"a":10, +
+ "b":20, +
+ "c":30}
+ {"a":10}
+ {}
+(3 rows)
+
-- to_json, timestamps
select to_json(timestamp '2014-05-28 12:22:35.614298');
to_json
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index c980132..3ddf656 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -98,8 +98,32 @@ FROM generate_series(1,3) AS x;
SELECT row_to_json(q,true)
FROM rows q;
+SELECT row_to_json(q,pretty := true)
+FROM rows q;
+
SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, false) FROM x;
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, true) FROM x;
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, ignore_nulls := true) FROM x;
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, ignore_nulls := true, pretty := true) FROM x;
+
+
-- to_json, timestamps
select to_json(timestamp '2014-05-28 12:22:35.614298');
Hi
2014-09-08 5:48 GMT+02:00 Stephen Frost <sfrost@snowman.net>:
Pavel, All,
* Pavel Stehule (pavel.stehule@gmail.com) wrote:
Thank you
I made a few minor adjustments, but the bigger issue (in my view) is
what to do about array_to_json_pretty- seems like we should do the same
there, no? Perhaps you could propose a new patch which incorporates my
minor changes and also removes array_to_json_pretty and makes
array_to_json take an optional argument?
I though about it, and I am not sure. If somebody uses arrays as set, then
it can be good idea, when it is used as array, then you cannot to throw a
nulls from array.
I am thinking, so it is not necessary, because we can remove NULLs from
array simply via iteration over array (what is impossible for row fields)
CREATE OR REPLACE FUNCTION remove_null(anyarray)
RETURNS anyarray AS $$
SELECT ARRAY(SELECT a FROM unnest($1) g(a) WHERE a IS NOT NULL)
$$ LANGUAGE plpgsql;
or this function can be in core for general usage.
ignore_nulls in array_to_json_pretty probably is not necessary. On second
hand, the cost is zero, and we can have it for API consistency.
Regards
Pavel
Show quoted text
Thoughts?
Thanks,
Stephen
* Pavel Stehule (pavel.stehule@gmail.com) wrote:
ignore_nulls in array_to_json_pretty probably is not necessary. On second
hand, the cost is zero, and we can have it for API consistency.
I'm willing to be persuaded either way on this, really. I do think it
would be nice for both array_to_json and row_to_json to be single
functions which take default values, but as for if array_to_json has a
ignore_nulls option, I'm on the fence and would defer to people who use
that function regularly (I don't today).
Beyond that, I'm pretty happy moving forward with this patch.
Thanks,
Stephen
2014-09-08 6:27 GMT+02:00 Stephen Frost <sfrost@snowman.net>:
* Pavel Stehule (pavel.stehule@gmail.com) wrote:
ignore_nulls in array_to_json_pretty probably is not necessary. On second
hand, the cost is zero, and we can have it for API consistency.I'm willing to be persuaded either way on this, really. I do think it
would be nice for both array_to_json and row_to_json to be single
functions which take default values, but as for if array_to_json has a
ignore_nulls option, I'm on the fence and would defer to people who use
that function regularly (I don't today).Beyond that, I'm pretty happy moving forward with this patch.
ok
Regards
Pavel
Show quoted text
Thanks,
Stephen
Hi Stephen
Can I help with something, it is there some open question?
Regards
Pavel
2014-09-08 6:27 GMT+02:00 Stephen Frost <sfrost@snowman.net>:
Show quoted text
* Pavel Stehule (pavel.stehule@gmail.com) wrote:
ignore_nulls in array_to_json_pretty probably is not necessary. On second
hand, the cost is zero, and we can have it for API consistency.I'm willing to be persuaded either way on this, really. I do think it
would be nice for both array_to_json and row_to_json to be single
functions which take default values, but as for if array_to_json has a
ignore_nulls option, I'm on the fence and would defer to people who use
that function regularly (I don't today).Beyond that, I'm pretty happy moving forward with this patch.
Thanks,
Stephen
* Pavel Stehule (pavel.stehule@gmail.com) wrote:
Can I help with something, it is there some open question?
I had been hoping for a more definitive answer regarding this option for
array_to_json, or even a comment about the change to row_to_json.
Andrew- any thoughts on this? (that's what the ping on IRC is for :).
Thanks,
Stephen
Show quoted text
2014-09-08 6:27 GMT+02:00 Stephen Frost <sfrost@snowman.net>:
* Pavel Stehule (pavel.stehule@gmail.com) wrote:
ignore_nulls in array_to_json_pretty probably is not necessary. On second
hand, the cost is zero, and we can have it for API consistency.I'm willing to be persuaded either way on this, really. I do think it
would be nice for both array_to_json and row_to_json to be single
functions which take default values, but as for if array_to_json has a
ignore_nulls option, I'm on the fence and would defer to people who use
that function regularly (I don't today).Beyond that, I'm pretty happy moving forward with this patch.
On 09/11/2014 08:29 AM, Stephen Frost wrote:
* Pavel Stehule (pavel.stehule@gmail.com) wrote:
Can I help with something, it is there some open question?
I had been hoping for a more definitive answer regarding this option for
array_to_json, or even a comment about the change to row_to_json.
Andrew- any thoughts on this? (that's what the ping on IRC is for :).
The change in row_to_json looks OK, I think. we're replacing an
overloading with use of default params, yes? That seems quite
reasonable, and users shouldn't notice the difference.
There might be a case for optionally suppressing nulls in array_to_json,
and it might work reasonably since unlike SQL arrays JSON arrays don't
have to be regular (if nested they are arrays of arrays, not
multi-dimensional single arrays). OTOH I'm not sure if it's worth doing
just for the sake of orthogonality. If someone wants it, then go for it.
cheers
andrew
--
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers
Andrew,
* Andrew Dunstan (andrew@dunslane.net) wrote:
On 09/11/2014 08:29 AM, Stephen Frost wrote:
* Pavel Stehule (pavel.stehule@gmail.com) wrote:
Can I help with something, it is there some open question?
I had been hoping for a more definitive answer regarding this option for
array_to_json, or even a comment about the change to row_to_json.
Andrew- any thoughts on this? (that's what the ping on IRC is for :).The change in row_to_json looks OK, I think. we're replacing an
overloading with use of default params, yes? That seems quite
reasonable, and users shouldn't notice the difference.
Right. Great, thanks.
There might be a case for optionally suppressing nulls in
array_to_json, and it might work reasonably since unlike SQL arrays
JSON arrays don't have to be regular (if nested they are arrays of
arrays, not multi-dimensional single arrays). OTOH I'm not sure if
it's worth doing just for the sake of orthogonality. If someone
wants it, then go for it.
Ok. I'll handle updating both of these to remove the overloading
and use default params instead, but I'll only add the 'ignore_null'
option to row_to_json.
Thanks again!
Stephen
All,
* Stephen Frost (sfrost@snowman.net) wrote:
Ok. I'll handle updating both of these to remove the overloading
and use default params instead, but I'll only add the 'ignore_null'
option to row_to_json.
Barring objections or any issues I find as I go back through it, this is
what I'm planning to push a bit later on tonight.
Thanks!
Stephen
Attachments:
row_to_json_ignorenull-5.patchtext/x-diff; charset=us-asciiDownload
diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index 3a7cfa9..7195df8 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -10309,11 +10309,13 @@ table2-mapping
</row>
<row>
<entry>
- <literal>row_to_json(record [, pretty_bool])</literal>
+ <literal>row_to_json(rowval record [, pretty bool [, ignore_nulls bool] ])</literal>
</entry>
<entry>
Returns the row as a JSON object. Line feeds will be added between
- level-1 elements if <parameter>pretty_bool</parameter> is true.
+ level-1 elements if <parameter>pretty_bool</parameter> is true. Elements
+ with NULL values will be skipped when <parameter>ignore_nulls</parameter>
+ is true.
</entry>
<entry><literal>row_to_json(row(1,'foo'))</literal></entry>
<entry><literal>{"f1":1,"f2":"foo"}</literal></entry>
diff --git a/src/backend/catalog/system_views.sql b/src/backend/catalog/system_views.sql
index 1bde175..22663c3 100644
--- a/src/backend/catalog/system_views.sql
+++ b/src/backend/catalog/system_views.sql
@@ -867,3 +867,17 @@ RETURNS interval
LANGUAGE INTERNAL
STRICT IMMUTABLE
AS 'make_interval';
+
+CREATE OR REPLACE FUNCTION
+ row_to_json(rowval record, pretty boolean DEFAULT false, ignore_nulls boolean DEFAULT false)
+RETURNS json
+LANGUAGE INTERNAL
+STRICT STABLE
+AS 'row_to_json';
+
+CREATE OR REPLACE FUNCTION
+ array_to_json(arrayval anyarray, pretty boolean DEFAULT false)
+RETURNS json
+LANGUAGE INTERNAL
+STRICT STABLE
+AS 'array_to_json';
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index 494a028..19d7401 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -79,7 +79,8 @@ static void report_invalid_token(JsonLexContext *lex);
static int report_json_context(JsonLexContext *lex);
static char *extract_mb_char(char *s);
static void composite_to_json(Datum composite, StringInfo result,
- bool use_line_feeds);
+ bool use_line_feeds,
+ bool ignore_nulls);
static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
Datum *vals, bool *nulls, int *valcount,
JsonTypeCategory tcategory, Oid outfuncoid,
@@ -1362,7 +1363,7 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
array_to_json_internal(val, result, false);
break;
case JSONTYPE_COMPOSITE:
- composite_to_json(val, result, false);
+ composite_to_json(val, result, false, false);
break;
case JSONTYPE_BOOL:
outputstr = DatumGetBool(val) ? "true" : "false";
@@ -1591,7 +1592,8 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
* Turn a composite / record into JSON.
*/
static void
-composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
+composite_to_json(Datum composite, StringInfo result, bool use_line_feeds,
+ bool ignore_nulls)
{
HeapTupleHeader td;
Oid tupType;
@@ -1630,6 +1632,12 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
if (tupdesc->attrs[i]->attisdropped)
continue;
+ val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+ /* Don't serialize NULL field when we don't want it */
+ if (isnull && ignore_nulls)
+ continue;
+
if (needsep)
appendStringInfoString(result, sep);
needsep = true;
@@ -1638,8 +1646,6 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
escape_json(result, attname);
appendStringInfoChar(result, ':');
- val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
-
if (isnull)
{
tcategory = JSONTYPE_NULL;
@@ -1688,26 +1694,10 @@ add_json(Datum val, bool is_null, StringInfo result,
}
/*
- * SQL function array_to_json(row)
- */
-extern Datum
-array_to_json(PG_FUNCTION_ARGS)
-{
- Datum array = PG_GETARG_DATUM(0);
- StringInfo result;
-
- result = makeStringInfo();
-
- array_to_json_internal(array, result, false);
-
- PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
-}
-
-/*
* SQL function array_to_json(row, prettybool)
*/
extern Datum
-array_to_json_pretty(PG_FUNCTION_ARGS)
+array_to_json(PG_FUNCTION_ARGS)
{
Datum array = PG_GETARG_DATUM(0);
bool use_line_feeds = PG_GETARG_BOOL(1);
@@ -1721,34 +1711,19 @@ array_to_json_pretty(PG_FUNCTION_ARGS)
}
/*
- * SQL function row_to_json(row)
+ * SQL function row_to_json(rowval record, pretty bool, ignore_nulls bool)
*/
extern Datum
row_to_json(PG_FUNCTION_ARGS)
{
Datum array = PG_GETARG_DATUM(0);
- StringInfo result;
-
- result = makeStringInfo();
-
- composite_to_json(array, result, false);
-
- PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
-}
-
-/*
- * SQL function row_to_json(row, prettybool)
- */
-extern Datum
-row_to_json_pretty(PG_FUNCTION_ARGS)
-{
- Datum array = PG_GETARG_DATUM(0);
bool use_line_feeds = PG_GETARG_BOOL(1);
+ bool ignore_nulls = PG_GETARG_BOOL(2);
StringInfo result;
result = makeStringInfo();
- composite_to_json(array, result, use_line_feeds);
+ composite_to_json(array, result, use_line_feeds, ignore_nulls);
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index a56a635..e66430d 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4203,14 +4203,10 @@ DATA(insert OID = 323 ( json_recv PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0
DESCR("I/O");
DATA(insert OID = 324 ( json_send PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 17 "114" _null_ _null_ _null_ _null_ json_send _null_ _null_ _null_ ));
DESCR("I/O");
-DATA(insert OID = 3153 ( array_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 114 "2277" _null_ _null_ _null_ _null_ array_to_json _null_ _null_ _null_ ));
+DATA(insert OID = 3153 ( array_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2277 16" _null_ _null_ "{arrayval,pretty}" _null_ array_to_json _null_ _null_ _null_ ));
DESCR("map array to json");
-DATA(insert OID = 3154 ( array_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2277 16" _null_ _null_ _null_ _null_ array_to_json_pretty _null_ _null_ _null_ ));
-DESCR("map array to json with optional pretty printing");
-DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 114 "2249" _null_ _null_ _null_ _null_ row_to_json _null_ _null_ _null_ ));
+DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 3 0 114 "2249 16 16" _null_ _null_ "{rowval,pretty,ignore_nulls}" _null_ row_to_json _null_ _null_ _null_ ));
DESCR("map row to json");
-DATA(insert OID = 3156 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2249 16" _null_ _null_ _null_ _null_ row_to_json_pretty _null_ _null_ _null_ ));
-DESCR("map row to json with optional pretty printing");
DATA(insert OID = 3173 ( json_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ json_agg_transfn _null_ _null_ _null_ ));
DESCR("json aggregate transition function");
DATA(insert OID = 3174 ( json_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 114 "2281" _null_ _null_ _null_ _null_ json_agg_finalfn _null_ _null_ _null_ ));
diff --git a/src/include/utils/json.h b/src/include/utils/json.h
index 82cc48b..9d8a0a5 100644
--- a/src/include/utils/json.h
+++ b/src/include/utils/json.h
@@ -23,9 +23,7 @@ extern Datum json_out(PG_FUNCTION_ARGS);
extern Datum json_recv(PG_FUNCTION_ARGS);
extern Datum json_send(PG_FUNCTION_ARGS);
extern Datum array_to_json(PG_FUNCTION_ARGS);
-extern Datum array_to_json_pretty(PG_FUNCTION_ARGS);
extern Datum row_to_json(PG_FUNCTION_ARGS);
-extern Datum row_to_json_pretty(PG_FUNCTION_ARGS);
extern Datum to_json(PG_FUNCTION_ARGS);
extern Datum json_agg_transfn(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index bb4d9ed..968e02a 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -397,12 +397,70 @@ FROM rows q;
"y":"txt3"}
(3 rows)
+SELECT row_to_json(q,pretty := true)
+FROM rows q;
+ row_to_json
+--------------
+ {"x":1, +
+ "y":"txt1"}
+ {"x":2, +
+ "y":"txt2"}
+ {"x":3, +
+ "y":"txt3"}
+(3 rows)
+
SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
row_to_json
-----------------------
{"f1":[5,6,7,8,9,10]}
(1 row)
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, false) FROM x;
+ row_to_json
+------------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10,"b":null,"c":null}
+ {"a":null,"b":null,"c":null}
+(3 rows)
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, true) FROM x;
+ row_to_json
+------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10}
+ {}
+(3 rows)
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, ignore_nulls := true) FROM x;
+ row_to_json
+------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10}
+ {}
+(3 rows)
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, ignore_nulls := true, pretty := true) FROM x;
+ row_to_json
+-------------
+ {"a":10, +
+ "b":20, +
+ "c":30}
+ {"a":10}
+ {}
+(3 rows)
+
-- to_json, timestamps
select to_json(timestamp '2014-05-28 12:22:35.614298');
to_json
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index c980132..3ddf656 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -98,8 +98,32 @@ FROM generate_series(1,3) AS x;
SELECT row_to_json(q,true)
FROM rows q;
+SELECT row_to_json(q,pretty := true)
+FROM rows q;
+
SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, false) FROM x;
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, false, true) FROM x;
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, ignore_nulls := true) FROM x;
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+ (10,NULL, NULL),
+ (NULL, NULL, NULL)) g(a,b,c))
+ SELECT row_to_json(x, ignore_nulls := true, pretty := true) FROM x;
+
+
-- to_json, timestamps
select to_json(timestamp '2014-05-28 12:22:35.614298');
* Stephen Frost (sfrost@snowman.net) wrote:
* Stephen Frost (sfrost@snowman.net) wrote:
Ok. I'll handle updating both of these to remove the overloading
and use default params instead, but I'll only add the 'ignore_null'
option to row_to_json.Barring objections or any issues I find as I go back through it, this is
what I'm planning to push a bit later on tonight.
Pushed.
Thanks!
Stephen