diff --git a/contrib/pg_stat_statements/pg_stat_statements.c b/contrib/pg_stat_statements/pg_stat_statements.c
new file mode 100644
index 914fbf2..b660d0d
*** a/contrib/pg_stat_statements/pg_stat_statements.c
--- b/contrib/pg_stat_statements/pg_stat_statements.c
***************
*** 10,15 ****
--- 10,35 ----
   * an entry, one must hold the lock shared or exclusive (so the entry doesn't
   * disappear!) and also take the entry's mutex spinlock.
   *
+  * As of Postgres 9.2, this module normalizes query strings. Normalization is a
+  * process whereby similar queries, typically differing only in their constants
+  * (though the exact rules are somewhat more subtle than that) are recognized as
+  * equivalent, and are tracked as a single entry. This is particularly useful
+  * for non-prepared queries.
+  *
+  * Normalization is implemented by selectively serializing those fields of each
+  * query tree's nodes that are judged to be essential to the nature of the
+  * query.  This is referred to as a query jumble. This is distinct from a
+  * straight serialization of the query tree in that various extraneous
+  * information is ignored as irrelevant or not essential to the query, such as
+  * the collation of Vars, and, most notably, the value of constants. Once this
+  * jumble is acquired, a 32-bit hash is taken, which is copied back into the
+  * query tree at the post-analysis stage.  Postgres then naively copies this
+  * value around, making it later available from within the corresponding plan
+  * tree. The executor can then use this value to blame query costs on a known
+  * queryId.
+  *
+  * Within the executor hook, the module stores the cost of query  execution,
+  * based on a queryId provided by the core system.
   *
   * Copyright (c) 2008-2012, PostgreSQL Global Development Group
   *
***************
*** 27,38 ****
--- 47,62 ----
  #include "funcapi.h"
  #include "mb/pg_wchar.h"
  #include "miscadmin.h"
+ #include "parser/analyze.h"
+ #include "parser/parsetree.h"
+ #include "parser/scanner.h"
  #include "pgstat.h"
  #include "storage/fd.h"
  #include "storage/ipc.h"
  #include "storage/spin.h"
  #include "tcop/utility.h"
  #include "utils/builtins.h"
+ #include "utils/memutils.h"
  
  
  PG_MODULE_MAGIC;
*************** PG_MODULE_MAGIC;
*** 41,54 ****
  #define PGSS_DUMP_FILE	"global/pg_stat_statements.stat"
  
  /* This constant defines the magic number in the stats file header */
! static const uint32 PGSS_FILE_HEADER = 0x20100108;
  
  /* XXX: Should USAGE_EXEC reflect execution time and/or buffer usage? */
  #define USAGE_EXEC(duration)	(1.0)
  #define USAGE_INIT				(1.0)	/* including initial planning */
  #define USAGE_DECREASE_FACTOR	(0.99)	/* decreased every entry_dealloc */
  #define USAGE_DEALLOC_PERCENT	5		/* free this % of entries at once */
! 
  /*
   * Hashtable key that defines the identity of a hashtable entry.  The
   * hash comparators do not assume that the query string is null-terminated;
--- 65,84 ----
  #define PGSS_DUMP_FILE	"global/pg_stat_statements.stat"
  
  /* This constant defines the magic number in the stats file header */
! static const uint32 PGSS_FILE_HEADER = 0x20120103;
  
  /* XXX: Should USAGE_EXEC reflect execution time and/or buffer usage? */
  #define USAGE_EXEC(duration)	(1.0)
  #define USAGE_INIT				(1.0)	/* including initial planning */
+ #define USAGE_NON_EXEC_STICK	(1.0e10)/* unexecuted queries sticky */
  #define USAGE_DECREASE_FACTOR	(0.99)	/* decreased every entry_dealloc */
  #define USAGE_DEALLOC_PERCENT	5		/* free this % of entries at once */
! #define JUMBLE_SIZE				1024    /* query serialization buffer size */
! /* Magic values for jumble */
! #define MAG_HASH_BUF			0xFA	/* buffer is a hash of query tree */
! #define MAG_STR_BUF				0xEB	/* buffer is query string itself */
! #define MAG_RETURN_LIST			0xAE	/* returning list node follows */
! #define MAG_LIMIT_OFFSET		0xBA	/* limit/offset node follows */
  /*
   * Hashtable key that defines the identity of a hashtable entry.  The
   * hash comparators do not assume that the query string is null-terminated;
*************** typedef struct pgssHashKey
*** 63,70 ****
  	Oid			userid;			/* user OID */
  	Oid			dbid;			/* database OID */
  	int			encoding;		/* query encoding */
! 	int			query_len;		/* # of valid bytes in query string */
! 	const char *query_ptr;		/* query string proper */
  } pgssHashKey;
  
  /*
--- 93,99 ----
  	Oid			userid;			/* user OID */
  	Oid			dbid;			/* database OID */
  	int			encoding;		/* query encoding */
! 	uint32		queryid;		/* query identifier */
  } pgssHashKey;
  
  /*
*************** typedef struct pgssEntry
*** 97,102 ****
--- 126,132 ----
  {
  	pgssHashKey key;			/* hash key of entry - MUST BE FIRST */
  	Counters	counters;		/* the statistics for this query */
+ 	int			query_len;		/* # of valid bytes in query string */
  	slock_t		mutex;			/* protects the counters only */
  	char		query[1];		/* VARIABLE LENGTH ARRAY - MUST BE LAST */
  	/* Note: the allocated length of query[] is actually pgss->query_size */
*************** typedef struct pgssSharedState
*** 111,117 ****
--- 141,171 ----
  	int			query_size;		/* max query length in bytes */
  } pgssSharedState;
  
+ typedef struct pgssLocationLen
+ {
+ 	int location;
+ 	int length;
+ } pgssLocationLen;
+ 
+ /*
+  * Last seen constant positions for a statement
+  */
+ typedef struct pgssQueryConEntry
+ {
+ 	pgssHashKey		key;			/* hash key of entry - MUST BE FIRST */
+ 	int				n_elems;		/* length of offsets array */
+ 	Size offsets[1];		/* VARIABLE LENGTH ARRAY - MUST BE LAST */
+ 	/* Note: the allocated length of offsets is actually n_elems */
+ } pgssQueryConEntry;
  /*---- Local variables ----*/
+ /* Jumble of current query tree */
+ static unsigned char *last_jumble = NULL;
+ /* Buffer that represents position of normalized characters */
+ static pgssLocationLen *last_offsets = NULL;
+ /* Current Length of last_offsets buffer */
+ static Size last_offset_buf_size = 10;
+ /* Current number of actual offsets stored in last_offsets */
+ static Size last_offset_num = 0;
  
  /* Current nesting depth of ExecutorRun calls */
  static int	nested_level = 0;
*************** static ExecutorRun_hook_type prev_Execut
*** 123,133 ****
--- 177,196 ----
  static ExecutorFinish_hook_type prev_ExecutorFinish = NULL;
  static ExecutorEnd_hook_type prev_ExecutorEnd = NULL;
  static ProcessUtility_hook_type prev_ProcessUtility = NULL;
+ static parse_analyze_hook_type prev_parse_analyze_hook = NULL;
+ static parse_analyze_varparams_hook_type prev_parse_analyze_varparams_hook = NULL;
  
  /* Links to shared memory state */
  static pgssSharedState *pgss = NULL;
  static HTAB *pgss_hash = NULL;
  
+ /*
+  * Maintain a stack of the rangetable of the query tree that we're currently
+  * walking, so subqueries can reference parent rangetables. The stack is pushed
+  * and popped as each Query struct is walked into or out of.
+  */
+ static List* pgss_rangetbl_stack = NIL;
+ 
  /*---- GUC variables ----*/
  
  typedef enum
*************** static int	pgss_max;			/* max # statemen
*** 149,154 ****
--- 212,218 ----
  static int	pgss_track;			/* tracking level */
  static bool pgss_track_utility; /* whether to track utility commands */
  static bool pgss_save;			/* whether to save stats across shutdown */
+ static bool pgss_string_key;	/* whether to always only hash query str */
  
  
  #define pgss_enabled() \
*************** PG_FUNCTION_INFO_V1(pg_stat_statements);
*** 168,173 ****
--- 232,255 ----
  
  static void pgss_shmem_startup(void);
  static void pgss_shmem_shutdown(int code, Datum arg);
+ static int comp_offset(const void *a, const void *b);
+ static Query *pgss_parse_analyze(Node *parseTree, const char *sourceText,
+ 			  Oid *paramTypes, int numParams);
+ static Query *pgss_parse_analyze_varparams(Node *parseTree, const char *sourceText,
+ 						Oid **paramTypes, int *numParams);
+ static void pgss_process_post_analysis_tree(Query* post_analysis_tree,
+ 		const char* sourceText, bool canonicalize);
+ static void fill_in_constant_lengths(const char* query,
+ 						pgssLocationLen offs[], Size n_offs);
+ static uint32 JumbleQuery(Query *post_analysis_tree);
+ static void AppendJumb(unsigned char* item, unsigned char jumble[], Size size, Size *i);
+ static void PerformJumble(const Query *tree, Size size, Size *i);
+ static void QualsNode(const OpExpr *node, Size size, Size *i, List *rtable);
+ static void LeafNode(const Node *arg, Size size, Size *i, List *rtable);
+ static void LimitOffsetNode(const Node *node, Size size, Size *i, List *rtable);
+ static void JoinExprNode(JoinExpr *node, Size size, Size *i, List *rtable);
+ static void JoinExprNodeChild(const Node *node, Size size, Size *i, List *rtable);
+ static void RecordConstLocation(int location);
  static void pgss_ExecutorStart(QueryDesc *queryDesc, int eflags);
  static void pgss_ExecutorRun(QueryDesc *queryDesc,
  				 ScanDirection direction,
*************** static void pgss_ProcessUtility(Node *pa
*** 179,188 ****
  					DestReceiver *dest, char *completionTag);
  static uint32 pgss_hash_fn(const void *key, Size keysize);
  static int	pgss_match_fn(const void *key1, const void *key2, Size keysize);
! static void pgss_store(const char *query, double total_time, uint64 rows,
! 		   const BufferUsage *bufusage);
  static Size pgss_memsize(void);
! static pgssEntry *entry_alloc(pgssHashKey *key);
  static void entry_dealloc(void);
  static void entry_reset(void);
  
--- 261,272 ----
  					DestReceiver *dest, char *completionTag);
  static uint32 pgss_hash_fn(const void *key, Size keysize);
  static int	pgss_match_fn(const void *key1, const void *key2, Size keysize);
! static uint32 pgss_hash_string(const char* str);
! static void pgss_store(const char *query, uint32 queryId,
! 				double total_time, uint64 rows,
! 				const BufferUsage *bufusage, bool empty_entry, bool canonicalize);
  static Size pgss_memsize(void);
! static pgssEntry *entry_alloc(pgssHashKey *key, const char* query, int new_query_len);
  static void entry_dealloc(void);
  static void entry_reset(void);
  
*************** static void entry_reset(void);
*** 193,198 ****
--- 277,283 ----
  void
  _PG_init(void)
  {
+ 	MemoryContext oldcontext;
  	/*
  	 * In order to create our shared memory area, we have to be loaded via
  	 * shared_preload_libraries.  If not, fall out without hooking into any of
*************** _PG_init(void)
*** 254,259 ****
--- 339,359 ----
  							 NULL,
  							 NULL);
  
+ 	/*
+ 	 * Support legacy pg_stat_statements behavior, for compatibility with
+ 	 * versions shipped with Postgres 8.4, 9.0 and 9.1
+ 	 */
+ 	DefineCustomBoolVariable("pg_stat_statements.string_key",
+ 			   "Differentiate queries based on query string alone.",
+ 							 NULL,
+ 							 &pgss_string_key,
+ 							 false,
+ 							 PGC_POSTMASTER,
+ 							 0,
+ 							 NULL,
+ 							 NULL,
+ 							 NULL);
+ 
  	EmitWarningsOnPlaceholders("pg_stat_statements");
  
  	/*
*************** _PG_init(void)
*** 265,270 ****
--- 365,382 ----
  	RequestAddinLWLocks(1);
  
  	/*
+ 	 * Allocate a buffer to store selective serialization of the query tree
+ 	 * for the purposes of query normalization.
+ 	 */
+ 	oldcontext = MemoryContextSwitchTo(TopMemoryContext);
+ 
+ 	last_jumble = palloc(JUMBLE_SIZE);
+ 	/* Allocate space for bookkeeping information for query str normalization */
+ 	last_offsets = palloc(last_offset_buf_size * sizeof(pgssLocationLen));
+ 
+ 	MemoryContextSwitchTo(oldcontext);
+ 
+ 	/*
  	 * Install hooks.
  	 */
  	prev_shmem_startup_hook = shmem_startup_hook;
*************** _PG_init(void)
*** 279,284 ****
--- 391,400 ----
  	ExecutorEnd_hook = pgss_ExecutorEnd;
  	prev_ProcessUtility = ProcessUtility_hook;
  	ProcessUtility_hook = pgss_ProcessUtility;
+ 	prev_parse_analyze_hook = parse_analyze_hook;
+ 	parse_analyze_hook = pgss_parse_analyze;
+ 	prev_parse_analyze_varparams_hook = parse_analyze_varparams_hook;
+ 	parse_analyze_varparams_hook = pgss_parse_analyze_varparams;
  }
  
  /*
*************** _PG_fini(void)
*** 294,299 ****
--- 410,420 ----
  	ExecutorFinish_hook = prev_ExecutorFinish;
  	ExecutorEnd_hook = prev_ExecutorEnd;
  	ProcessUtility_hook = prev_ProcessUtility;
+ 	parse_analyze_hook = prev_parse_analyze_hook;
+ 	parse_analyze_varparams_hook = prev_parse_analyze_varparams_hook;
+ 
+ 	pfree(last_jumble);
+ 	pfree(last_offsets);
  }
  
  /*
*************** pgss_shmem_startup(void)
*** 397,423 ****
  		if (!PG_VALID_BE_ENCODING(temp.key.encoding))
  			goto error;
  
  		/* Previous incarnation might have had a larger query_size */
! 		if (temp.key.query_len >= buffer_size)
  		{
! 			buffer = (char *) repalloc(buffer, temp.key.query_len + 1);
! 			buffer_size = temp.key.query_len + 1;
  		}
  
! 		if (fread(buffer, 1, temp.key.query_len, file) != temp.key.query_len)
  			goto error;
! 		buffer[temp.key.query_len] = '\0';
  
  		/* Clip to available length if needed */
! 		if (temp.key.query_len >= query_size)
! 			temp.key.query_len = pg_encoding_mbcliplen(temp.key.encoding,
  													   buffer,
! 													   temp.key.query_len,
  													   query_size - 1);
- 		temp.key.query_ptr = buffer;
  
  		/* make the hashtable entry (discards old entries if too many) */
! 		entry = entry_alloc(&temp.key);
  
  		/* copy in the actual stats */
  		entry->counters = temp.counters;
--- 518,548 ----
  		if (!PG_VALID_BE_ENCODING(temp.key.encoding))
  			goto error;
  
+ 		/* Avoid loading sticky entries */
+ 		if (temp.counters.calls == 0)
+ 			continue;
+ 
  		/* Previous incarnation might have had a larger query_size */
! 		if (temp.query_len >= buffer_size)
  		{
! 			buffer = (char *) repalloc(buffer, temp.query_len + 1);
! 			buffer_size = temp.query_len + 1;
  		}
  
! 		if (fread(buffer, 1, temp.query_len, file) != temp.query_len)
  			goto error;
! 		buffer[temp.query_len] = '\0';
! 
  
  		/* Clip to available length if needed */
! 		if (temp.query_len >= query_size)
! 			temp.query_len = pg_encoding_mbcliplen(temp.key.encoding,
  													   buffer,
! 													   temp.query_len,
  													   query_size - 1);
  
  		/* make the hashtable entry (discards old entries if too many) */
! 		entry = entry_alloc(&temp.key, buffer, temp.query_len);
  
  		/* copy in the actual stats */
  		entry->counters = temp.counters;
*************** pgss_shmem_shutdown(int code, Datum arg)
*** 479,485 ****
  	hash_seq_init(&hash_seq, pgss_hash);
  	while ((entry = hash_seq_search(&hash_seq)) != NULL)
  	{
! 		int			len = entry->key.query_len;
  
  		if (fwrite(entry, offsetof(pgssEntry, mutex), 1, file) != 1 ||
  			fwrite(entry->query, 1, len, file) != len)
--- 604,610 ----
  	hash_seq_init(&hash_seq, pgss_hash);
  	while ((entry = hash_seq_search(&hash_seq)) != NULL)
  	{
! 		int			len = entry->query_len;
  
  		if (fwrite(entry, offsetof(pgssEntry, mutex), 1, file) != 1 ||
  			fwrite(entry->query, 1, len, file) != len)
*************** error:
*** 505,510 ****
--- 630,1667 ----
  }
  
  /*
+  * comp_offset: Comparator for qsorting pgssLocationLen values.
+  */
+ static int
+ comp_offset(const void *a, const void *b)
+ {
+ 	int l = ((pgssLocationLen*) a)->location;
+ 	int r = ((pgssLocationLen*) b)->location;
+ 	if (l < r)
+ 		return -1;
+ 	else if (l > r)
+ 		return +1;
+ 	else
+ 		return 0;
+ }
+ 
+ static Query *
+ pgss_parse_analyze(Node *parseTree, const char *sourceText,
+ 			  Oid *paramTypes, int numParams)
+ {
+ 	Query *post_analysis_tree;
+ 
+ 	if (prev_parse_analyze_hook)
+ 		post_analysis_tree = (*prev_parse_analyze_hook) (parseTree, sourceText,
+ 			  paramTypes, numParams);
+ 	else
+ 		post_analysis_tree = standard_parse_analyze(parseTree, sourceText,
+ 			  paramTypes, numParams);
+ 
+ 	if (!post_analysis_tree->utilityStmt)
+ 		pgss_process_post_analysis_tree(post_analysis_tree, sourceText,
+ 											numParams == 0);
+ 
+ 	return post_analysis_tree;
+ }
+ 
+ static Query *
+ pgss_parse_analyze_varparams(Node *parseTree, const char *sourceText,
+ 						Oid **paramTypes, int *numParams)
+ {
+ 	Query *post_analysis_tree;
+ 
+ 	if (prev_parse_analyze_hook)
+ 		post_analysis_tree = (*prev_parse_analyze_varparams_hook) (parseTree,
+ 				sourceText, paramTypes, numParams);
+ 	else
+ 		post_analysis_tree = standard_parse_analyze_varparams(parseTree,
+ 				sourceText, paramTypes, numParams);
+ 
+ 	if (!post_analysis_tree->utilityStmt)
+ 		pgss_process_post_analysis_tree(post_analysis_tree, sourceText,
+ 											false);
+ 
+ 	return post_analysis_tree;
+ }
+ 
+ /*
+  * pgss_process_post_analysis_tree: Record queryId, which is based on the query
+  * tree, within the tree itself, for later retrieval in the executor hook. The
+  * core system will copy the value to the tree's corresponding plannedstmt.
+  *
+  * Avoid producing a canonicalized string for parameterized queries. It is
+  * simply not desirable given that constants that we might otherwise
+  * canonicalize are going to always be consistent between calls. In addition, it
+  * would be impractical to make the hash entry sticky for an indefinitely long
+  * period (i.e. until the query is actually executed).
+  *
+  * It's still worth going to the trouble of hashing the query tree though,
+  * because that ensures that we can hash an arbitrarily long query.
+  */
+ static void
+ pgss_process_post_analysis_tree(Query* post_analysis_tree,
+ 		const char* sourceText, bool canonicalize)
+ {
+ 	BufferUsage bufusage;
+ 
+ 	post_analysis_tree->queryId = JumbleQuery(post_analysis_tree);
+ 
+ 	memset(&bufusage, 0, sizeof(bufusage));
+ 	pgss_store(sourceText, post_analysis_tree->queryId, 0, 0, &bufusage,
+ 			true, canonicalize);
+ 
+ 	/* Trim last_offsets */
+ 	if (last_offset_buf_size > 10)
+ 	{
+ 		last_offset_buf_size = 10;
+ 		last_offsets = repalloc(last_offsets,
+ 							last_offset_buf_size *
+ 							sizeof(pgssLocationLen));
+ 	}
+ }
+ 
+ /*
+  * Given a valid SQL string, and offsets whose lengths are uninitialized, fill
+  * in the corresponding lengths of those constants.
+  *
+  * The constant may use any available constant syntax, including but not limited
+  * to float literals, bit-strings, single quoted strings and dollar-quoted
+  * strings. This is accomplished by using the public API for the core scanner,
+  * with a workaround for quirks of their representation.
+  *
+  * It is the caller's job to ensure that the string is a valid SQL statement.
+  * Since in practice the string has already been validated, and the locations
+  * that the caller provides will have originated from within the authoritative
+  * parser, this should not be a problem. The caller must also ensure that
+  * constants are provided in pre-sorted order. Duplicates are expected, and have
+  * their lengths marked as '-1', so that they are later ignored.
+  *
+  * N.B. There is an assumption that a '-' character at a Const location begins a
+  * negative constant. This precludes there ever being another reason for a
+  * constant to start with a '-' for any other reason.
+  */
+ static void
+ fill_in_constant_lengths(const char* query, pgssLocationLen offs[],
+ 							Size n_offs)
+ {
+ 	core_yyscan_t  init_scan;
+ 	core_yy_extra_type ext_type;
+ 	core_YYSTYPE type;
+ 	YYLTYPE pos;
+ 	int i, last_loc = -1;
+ 
+ 	init_scan = scanner_init(query,
+ 							 &ext_type,
+ 							 ScanKeywords,
+ 							 NumScanKeywords);
+ 
+ 	for(i = 0; i < n_offs; i++)
+ 	{
+ 		int loc = offs[i].location;
+ 		Assert(loc > 0);
+ 
+ 		if (loc == last_loc)
+ 		{
+ 			/* Duplicate */
+ 			offs[i].length = -1;
+ 			continue;
+ 		}
+ 
+ 		for(;;)
+ 		{
+ 			int scanbuf_len;
+ #ifdef USE_ASSERT_CHECKING
+ 			int tok =
+ #endif
+ 						core_yylex(&type, &pos, init_scan);
+ 			scanbuf_len = strlen(ext_type.scanbuf);
+ 			Assert(tok != 0);
+ 
+ 			if (scanbuf_len > loc)
+ 			{
+ 				if (query[loc] == '-')
+ 				{
+ 					/*
+ 					 * It's a negative value - this is the one and only case
+ 					 * where we canonicalize more than a single token.
+ 					 *
+ 					 * Do not compensate for the core system's special-case
+ 					 * adjustment of location to that of the leading '-'
+ 					 * operator in the event of a negative constant. It is also
+ 					 * useful for our purposes to start from the minus symbol.
+ 					 * In this way, queries like "select * from foo where bar =
+ 					 * 1" and "select * from foo where bar = -2" will always
+ 					 * have identical canonicalized query strings.
+ 					 */
+ 					core_yylex(&type, &pos, init_scan);
+ 					scanbuf_len = strlen(ext_type.scanbuf);
+ 				}
+ 
+ 				/*
+ 				 * Scanner is now at end of const token of outer iteration -
+ 				 * work backwards to get constant length.
+ 				 */
+ 				offs[i].length = scanbuf_len - loc;
+ 				break;
+ 			}
+ 		}
+ 		last_loc = loc;
+ 	}
+ 	scanner_finish(init_scan);
+ }
+ 
+ /*
+  * JumbleQuery: Selectively serialize query tree, and return a hash representing
+  * that serialization - it's queryId.
+  *
+  * Note that this doesn't necessarily uniquely identify the query across
+  * different databases and encodings.
+  */
+ static uint32
+ JumbleQuery(Query *post_analysis_tree)
+ {
+ 	/* State for this run of PerformJumble */
+ 	Size i = 0;
+ 	last_offset_num = 0;
+ 	memset(last_jumble, 0, JUMBLE_SIZE);
+ 	last_jumble[i++] = MAG_HASH_BUF;
+ 	PerformJumble(post_analysis_tree, JUMBLE_SIZE, &i);
+ 	/* Reset rangetbl state */
+ 	list_free(pgss_rangetbl_stack);
+ 	pgss_rangetbl_stack = NIL;
+ 
+ 	/* Sort offsets as required by later query string canonicalization */
+ 	qsort(last_offsets, last_offset_num, sizeof(pgssLocationLen), comp_offset);
+ 	return hash_any((const unsigned char* ) last_jumble, i);
+ }
+ 
+ /*
+  * AppendJumb: Append a value that is substantive to a given query to jumble,
+  * while incrementing the iterator, i.
+  */
+ static void
+ AppendJumb(unsigned char* item, unsigned char jumble[], Size size, Size *i)
+ {
+ 	Assert(item != NULL);
+ 	Assert(jumble != NULL);
+ 	Assert(i != NULL);
+ 
+ 	/*
+ 	 * Copy the entire item to the buffer, or as much of it as possible to fill
+ 	 * the buffer to capacity.
+ 	 */
+ 	memcpy(jumble + *i, item, Min(*i > JUMBLE_SIZE? 0:JUMBLE_SIZE - *i, size));
+ 
+ 	/*
+ 	 * Continually hash the query tree's jumble.
+ 	 *
+ 	 * Was JUMBLE_SIZE exceeded? If so, hash the jumble and append that to the
+ 	 * start of the jumble buffer, and then continue to append the fraction of
+ 	 * "item" that we might not have been able to fit at the end of the buffer
+ 	 * in the last iteration. Since the value of i has been set to 0, there is
+ 	 * no need to memset the buffer in advance of this new iteration, but
+ 	 * effectively we are completely discarding the prior iteration's jumble
+ 	 * except for this representative hash value.
+ 	 */
+ 	if (*i > JUMBLE_SIZE)
+ 	{
+ 		uint32 start_hash = hash_any((const unsigned char* ) last_jumble, JUMBLE_SIZE);
+ 		int hash_l = sizeof(start_hash);
+ 		int part_left_l = Max(0, ((int) size - ((int) *i - JUMBLE_SIZE)));
+ 
+ 		Assert(part_left_l >= 0 && part_left_l <= size);
+ 
+ 		memcpy(jumble, &start_hash, hash_l);
+ 		memcpy(jumble + hash_l, item + (size - part_left_l), part_left_l);
+ 		*i = hash_l + part_left_l;
+ 	}
+ 	else
+ 	{
+ 		*i += size;
+ 	}
+ }
+ 
+ /*
+  * Wrapper around AppendJumb to encapsulate details of serialization
+  * of individual local variable elements.
+  */
+ #define APP_JUMB(item) \
+ AppendJumb((unsigned char*)&item, last_jumble, sizeof(item), i)
+ 
+ /*
+  * PerformJumble: Selectively serialize the query tree and canonicalize
+  * constants (i.e.  don't consider their actual value - just their type).
+  *
+  * The last_jumble buffer, which this function writes to, can be hashed to
+  * uniquely identify a query that may use different constants in successive
+  * calls.
+  */
+ static void
+ PerformJumble(const Query *tree, Size size, Size *i)
+ {
+ 	ListCell *l;
+ 	/* table join tree (FROM and WHERE clauses) */
+ 	FromExpr *jt = (FromExpr *) tree->jointree;
+ 	/* # of result tuples to skip (int8 expr) */
+ 	FuncExpr *off = (FuncExpr *) tree->limitOffset;
+ 	/* # of result tuples to skip (int8 expr) */
+ 	FuncExpr *limcount = (FuncExpr *) tree->limitCount;
+ 
+ 	if (pgss_rangetbl_stack &&
+ 			!IsA(pgss_rangetbl_stack, List))
+ 		pgss_rangetbl_stack = NIL;
+ 
+ 	if (tree->rtable != NIL)
+ 	{
+ 		pgss_rangetbl_stack = lappend(pgss_rangetbl_stack, tree->rtable);
+ 	}
+ 	else
+ 	{
+ 		/* Add dummy Range table entry to maintain stack */
+ 		RangeTblEntry *rte = makeNode(RangeTblEntry);
+ 		List *dummy = lappend(NIL, rte);
+ 		pgss_rangetbl_stack = lappend(pgss_rangetbl_stack, dummy);
+ 	}
+ 
+ 	APP_JUMB(tree->resultRelation);
+ 
+ 	if (tree->intoClause)
+ 	{
+ 		IntoClause *ic = tree->intoClause;
+ 		RangeVar   *rel = ic->rel;
+ 
+ 		APP_JUMB(ic->onCommit);
+ 		APP_JUMB(ic->skipData);
+ 		if (rel)
+ 		{
+ 			APP_JUMB(rel->relpersistence);
+ 			/* Bypass macro abstraction to supply size directly.
+ 			 *
+ 			 * Serialize schemaname, relname themselves - this makes us
+ 			 * somewhat consistent with the behavior of utility statements like "create
+ 			 * table", which seems appropriate.
+ 			 */
+ 			if (rel->schemaname)
+ 				AppendJumb((unsigned char *)rel->schemaname, last_jumble,
+ 								strlen(rel->schemaname), i);
+ 			if (rel->relname)
+ 				AppendJumb((unsigned char *)rel->relname, last_jumble,
+ 								strlen(rel->relname), i);
+ 		}
+ 	}
+ 
+ 	/* WITH list (of CommonTableExpr's) */
+ 	foreach(l, tree->cteList)
+ 	{
+ 		CommonTableExpr	*cte = (CommonTableExpr *) lfirst(l);
+ 		Query			*cteq = (Query*) cte->ctequery;
+ 		if (cteq)
+ 			PerformJumble(cteq, size, i);
+ 	}
+ 	if (jt)
+ 	{
+ 		if (jt->quals)
+ 		{
+ 			if (IsA(jt->quals, OpExpr))
+ 			{
+ 				QualsNode((OpExpr*) jt->quals, size, i, tree->rtable);
+ 			}
+ 			else
+ 			{
+ 				LeafNode((Node*) jt->quals, size, i, tree->rtable);
+ 			}
+ 		}
+ 		/* table join tree */
+ 		foreach(l, jt->fromlist)
+ 		{
+ 			Node* fr = lfirst(l);
+ 			if (IsA(fr, JoinExpr))
+ 			{
+ 				JoinExprNode((JoinExpr*) fr, size, i, tree->rtable);
+ 			}
+ 			else if (IsA(fr, RangeTblRef))
+ 			{
+ 				RangeTblRef   *rtf = (RangeTblRef *) fr;
+ 				RangeTblEntry *rte = rt_fetch(rtf->rtindex, tree->rtable);
+ 				APP_JUMB(rte->relid);
+ 				APP_JUMB(rte->rtekind);
+ 				/* Subselection in where clause */
+ 				if (rte->subquery)
+ 					PerformJumble(rte->subquery, size, i);
+ 
+ 				/* Function call in where clause */
+ 				if (rte->funcexpr)
+ 					LeafNode((Node*) rte->funcexpr, size, i, tree->rtable);
+ 			}
+ 			else
+ 			{
+ 				ereport(WARNING,
+ 						(errcode(ERRCODE_INTERNAL_ERROR),
+ 						 errmsg("unexpected, unrecognised fromlist node type: %d",
+ 							 (int) nodeTag(fr))));
+ 			}
+ 		}
+ 	}
+ 	/*
+ 	 * target list (of TargetEntry)
+ 	 * columns returned by query
+ 	 */
+ 	foreach(l, tree->targetList)
+ 	{
+ 		TargetEntry *tg = (TargetEntry *) lfirst(l);
+ 		Node        *e  = (Node*) tg->expr;
+ 		if (tg->ressortgroupref)
+ 			/* nonzero if referenced by a sort/group - for ORDER BY */
+ 			APP_JUMB(tg->ressortgroupref);
+ 		APP_JUMB(tg->resno); /* column number for select */
+ 		/*
+ 		 * Handle the various types of nodes in
+ 		 * the select list of this query
+ 		 */
+ 		LeafNode(e, size, i, tree->rtable);
+ 	}
+ 	/* return-values list (of TargetEntry) */
+ 	foreach(l, tree->returningList)
+ 	{
+ 		TargetEntry *rt = (TargetEntry *) lfirst(l);
+ 		Expr        *e  = (Expr*) rt->expr;
+ 		unsigned char magic = MAG_RETURN_LIST;
+ 		APP_JUMB(magic);
+ 		/*
+ 		 * Handle the various types of nodes in
+ 		 * the select list of this query
+ 		 */
+ 		LeafNode((Node*) e, size, i, tree->rtable);
+ 	}
+ 	/* a list of SortGroupClause's */
+ 	foreach(l, tree->groupClause)
+ 	{
+ 		SortGroupClause *gc = (SortGroupClause *) lfirst(l);
+ 		APP_JUMB(gc->tleSortGroupRef);
+ 		APP_JUMB(gc->nulls_first);
+ 	}
+ 
+ 	if (tree->havingQual)
+ 	{
+ 		if (IsA(tree->havingQual, OpExpr))
+ 		{
+ 			OpExpr *na = (OpExpr *) tree->havingQual;
+ 			QualsNode(na, size, i, tree->rtable);
+ 		}
+ 		else
+ 		{
+ 			Node *n = (Node*) tree->havingQual;
+ 			LeafNode(n, size, i, tree->rtable);
+ 		}
+ 	}
+ 
+ 	foreach(l, tree->windowClause)
+ 	{
+ 		WindowClause *wc = (WindowClause *) lfirst(l);
+ 		ListCell     *il;
+ 		APP_JUMB(wc->frameOptions);
+ 		foreach(il, wc->partitionClause)	/* PARTITION BY list */
+ 		{
+ 			Node *n = (Node *) lfirst(il);
+ 			LeafNode(n, size, i, tree->rtable);
+ 		}
+ 		foreach(il, wc->orderClause)		/* ORDER BY list */
+ 		{
+ 			Node *n = (Node *) lfirst(il);
+ 			LeafNode(n, size, i, tree->rtable);
+ 		}
+ 	}
+ 
+ 	foreach(l, tree->distinctClause)
+ 	{
+ 		SortGroupClause *dc = (SortGroupClause *) lfirst(l);
+ 		APP_JUMB(dc->tleSortGroupRef);
+ 		APP_JUMB(dc->nulls_first);
+ 	}
+ 
+ 	/* Don't look at tree->sortClause,
+ 	 * because the value ressortgroupref is already
+ 	 * serialized when we iterate through targetList
+ 	 */
+ 
+ 	if (off)
+ 		LimitOffsetNode((Node*) off, size, i, tree->rtable);
+ 
+ 	if (limcount)
+ 		LimitOffsetNode((Node*) limcount, size, i, tree->rtable);
+ 
+ 	if (tree->setOperations)
+ 	{
+ 		/*
+ 		 * set-operation tree if this is top
+ 		 * level of a UNION/INTERSECT/EXCEPT query
+ 		 */
+ 		SetOperationStmt *topop = (SetOperationStmt *) tree->setOperations;
+ 		APP_JUMB(topop->op);
+ 		APP_JUMB(topop->all);
+ 
+ 		/* leaf selects are RTE subselections */
+ 		foreach(l, tree->rtable)
+ 		{
+ 			RangeTblEntry *r = (RangeTblEntry *) lfirst(l);
+ 			if (r->subquery)
+ 				PerformJumble(r->subquery, size, i);
+ 		}
+ 	}
+ 	pgss_rangetbl_stack = list_delete_ptr(pgss_rangetbl_stack,
+ 			list_nth(pgss_rangetbl_stack, pgss_rangetbl_stack->length - 1));
+ }
+ 
+ /*
+  * Perform selective serialization of "Quals" nodes when
+  * they're IsA(*, OpExpr)
+  */
+ static void
+ QualsNode(const OpExpr *node, Size size, Size *i, List *rtable)
+ {
+ 	ListCell *l;
+ 	APP_JUMB(node->xpr);
+ 	APP_JUMB(node->opno);
+ 	foreach(l, node->args)
+ 	{
+ 		Node *arg = (Node *) lfirst(l);
+ 		LeafNode(arg, size, i, rtable);
+ 	}
+ }
+ 
+ /*
+  * LeafNode: Selectively serialize a selection of parser/prim nodes that are
+  * frequently, though certainly not necesssarily leaf nodes, such as Vars
+  * (columns), constants and function calls
+  */
+ static void
+ LeafNode(const Node *arg, Size size, Size *i, List *rtable)
+ {
+ 	ListCell *l;
+ 	/* Use the node's NodeTag as a magic number */
+ 	APP_JUMB(arg->type);
+ 
+ 	if (IsA(arg, Const))
+ 	{
+ 		Const *c = (Const *) arg;
+ 
+ 		/*
+ 		 * Datatype of the constant is a differentiator
+ 		 */
+ 		APP_JUMB(c->consttype);
+ 		RecordConstLocation(c->location);
+ 	}
+ 	else if(IsA(arg, CoerceToDomain))
+ 	{
+ 		CoerceToDomain *cd = (CoerceToDomain*) arg;
+ 		/*
+ 		 * Datatype of the constant is a
+ 		 * differentiator
+ 		 */
+ 		APP_JUMB(cd->resulttype);
+ 		LeafNode((Node*) cd->arg, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, Var))
+ 	{
+ 		Var			  *v = (Var *) arg;
+ 		RangeTblEntry *rte;
+ 		ListCell *lc;
+ 
+ 		/*
+ 		 * We need to get the details of the rangetable, but rtable may not
+ 		 * refer to the relevant one if we're in a subselection.
+ 		 */
+ 		if (v->varlevelsup == 0)
+ 		{
+ 			rte = rt_fetch(v->varno, rtable);
+ 		}
+ 		else
+ 		{
+ 			List *rtable_upper = list_nth(pgss_rangetbl_stack,
+ 					(list_length(pgss_rangetbl_stack) - 1) - v->varlevelsup);
+ 			rte = rt_fetch(v->varno, rtable_upper);
+ 		}
+ 		APP_JUMB(rte->relid);
+ 
+ 		foreach(lc, rte->values_lists)
+ 		{
+ 			List	   *sublist = (List *) lfirst(lc);
+ 			ListCell   *lc2;
+ 
+ 			foreach(lc2, sublist)
+ 			{
+ 				Node	   *col = (Node *) lfirst(lc2);
+ 				LeafNode(col, size, i, rtable);
+ 			}
+ 		}
+ 		APP_JUMB(v->varattno);
+ 	}
+ 	else if (IsA(arg, CurrentOfExpr))
+ 	{
+ 		CurrentOfExpr *CoE = (CurrentOfExpr*) arg;
+ 		APP_JUMB(CoE->cvarno);
+ 		APP_JUMB(CoE->cursor_param);
+ 	}
+ 	else if (IsA(arg, CollateExpr))
+ 	{
+ 		CollateExpr *Ce = (CollateExpr*) arg;
+ 		APP_JUMB(Ce->collOid);
+ 	}
+ 	else if (IsA(arg, FieldSelect))
+ 	{
+ 		FieldSelect *Fs = (FieldSelect*) arg;
+ 		APP_JUMB(Fs->resulttype);
+ 		LeafNode((Node*) Fs->arg, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, NamedArgExpr))
+ 	{
+ 		NamedArgExpr *Nae = (NamedArgExpr*) arg;
+ 		APP_JUMB(Nae->argnumber);
+ 		LeafNode((Node*) Nae->arg, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, Param))
+ 	{
+ 		Param *p = ((Param *) arg);
+ 		APP_JUMB(p->paramkind);
+ 		APP_JUMB(p->paramid);
+ 	}
+ 	else if (IsA(arg, RelabelType))
+ 	{
+ 		RelabelType *rt = (RelabelType*) arg;
+ 		APP_JUMB(rt->resulttype);
+ 		LeafNode((Node*) rt->arg, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, WindowFunc))
+ 	{
+ 		WindowFunc *wf = (WindowFunc *) arg;
+ 		APP_JUMB(wf->winfnoid);
+ 		foreach(l, wf->args)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(arg, FuncExpr))
+ 	{
+ 		FuncExpr *f = (FuncExpr *) arg;
+ 		APP_JUMB(f->funcid);
+ 		foreach(l, f->args)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(arg, OpExpr) || IsA(arg, DistinctExpr))
+ 	{
+ 		QualsNode((OpExpr*) arg, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, CoerceViaIO))
+ 	{
+ 		CoerceViaIO *Cio = (CoerceViaIO*) arg;
+ 		APP_JUMB(Cio->coerceformat);
+ 		APP_JUMB(Cio->resulttype);
+ 		LeafNode((Node*) Cio->arg, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, Aggref))
+ 	{
+ 		Aggref *a =  (Aggref *) arg;
+ 		APP_JUMB(a->aggfnoid);
+ 		foreach(l, a->args)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(arg, SubLink))
+ 	{
+ 		SubLink *s = (SubLink*) arg;
+ 		APP_JUMB(s->subLinkType);
+ 		/* Serialize select-list subselect recursively */
+ 		if (s->subselect)
+ 			PerformJumble((Query*) s->subselect, size, i);
+ 
+ 		if (s->testexpr)
+ 			LeafNode((Node*) s->testexpr, size, i, rtable);
+ 		foreach(l, s->operName)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(arg, TargetEntry))
+ 	{
+ 		TargetEntry *rt = (TargetEntry *) arg;
+ 		Node *e = (Node*) rt->expr;
+ 		APP_JUMB(rt->resorigtbl);
+ 		APP_JUMB(rt->ressortgroupref);
+ 		LeafNode(e, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, BoolExpr))
+ 	{
+ 		BoolExpr *be = (BoolExpr *) arg;
+ 		APP_JUMB(be->boolop);
+ 		foreach(l, be->args)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(arg, NullTest))
+ 	{
+ 		NullTest *nt = (NullTest *) arg;
+ 		Node     *arg = (Node *) nt->arg;
+ 		APP_JUMB(nt->nulltesttype);		/* IS NULL, IS NOT NULL */
+ 		APP_JUMB(nt->argisrow);			/* is input a composite type ? */
+ 		LeafNode(arg, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, ArrayExpr))
+ 	{
+ 		ArrayExpr *ae = (ArrayExpr *) arg;
+ 		APP_JUMB(ae->array_typeid);		/* type of expression result */
+ 		APP_JUMB(ae->element_typeid);	/* common type of array elements */
+ 		foreach(l, ae->elements)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(arg, CaseExpr))
+ 	{
+ 		CaseExpr *ce = (CaseExpr*) arg;
+ 		Assert(ce->casetype != InvalidOid);
+ 		APP_JUMB(ce->casetype);
+ 		foreach(l, ce->args)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 		if (ce->arg)
+ 			LeafNode((Node*) ce->arg, size, i, rtable);
+ 
+ 		if (ce->defresult)
+ 		{
+ 			/* Default result (ELSE clause).
+ 			 *
+ 			 * May be NULL, because no else clause
+ 			 * was actually specified, and thus the value is
+ 			 * equivalent to SQL ELSE NULL
+ 			 */
+ 			LeafNode((Node*) ce->defresult, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(arg, CaseTestExpr))
+ 	{
+ 		CaseTestExpr *ct = (CaseTestExpr*) arg;
+ 		APP_JUMB(ct->typeId);
+ 	}
+ 	else if (IsA(arg, CaseWhen))
+ 	{
+ 		CaseWhen *cw = (CaseWhen*) arg;
+ 		Node     *res = (Node*) cw->result;
+ 		Node     *exp = (Node*) cw->expr;
+ 		if (res)
+ 			LeafNode(res, size, i, rtable);
+ 		if (exp)
+ 			LeafNode(exp, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, MinMaxExpr))
+ 	{
+ 		MinMaxExpr *cw = (MinMaxExpr*) arg;
+ 		APP_JUMB(cw->minmaxtype);
+ 		APP_JUMB(cw->op);
+ 		foreach(l, cw->args)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(arg, ScalarArrayOpExpr))
+ 	{
+ 		ScalarArrayOpExpr *sa = (ScalarArrayOpExpr*) arg;
+ 		APP_JUMB(sa->opfuncid);
+ 		APP_JUMB(sa->useOr);
+ 		foreach(l, sa->args)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(arg, CoalesceExpr))
+ 	{
+ 		CoalesceExpr *ca = (CoalesceExpr*) arg;
+ 		foreach(l, ca->args)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(arg, ArrayCoerceExpr))
+ 	{
+ 		ArrayCoerceExpr *ac = (ArrayCoerceExpr *) arg;
+ 		LeafNode((Node*) ac->arg, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, WindowClause))
+ 	{
+ 		WindowClause *wc = (WindowClause*) arg;
+ 		foreach(l, wc->partitionClause)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 		foreach(l, wc->orderClause)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(arg, SortGroupClause))
+ 	{
+ 		SortGroupClause *sgc = (SortGroupClause*) arg;
+ 		APP_JUMB(sgc->tleSortGroupRef);
+ 		APP_JUMB(sgc->nulls_first);
+ 	}
+ 	else if (IsA(arg, Integer) ||
+ 		  IsA(arg, Float) ||
+ 		  IsA(arg, String) ||
+ 		  IsA(arg, BitString) ||
+ 		  IsA(arg, Null)
+ 		)
+ 	{
+ 		/* It is not necessary to serialize Value nodes - they are seen when
+ 		 * aliases are used, which are ignored.
+ 		 */
+ 		return;
+ 	}
+ 	else if (IsA(arg, BooleanTest))
+ 	{
+ 		BooleanTest *bt = (BooleanTest *) arg;
+ 		APP_JUMB(bt->booltesttype);
+ 		LeafNode((Node*) bt->arg, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, ArrayRef))
+ 	{
+ 		ArrayRef *ar = (ArrayRef*) arg;
+ 		APP_JUMB(ar->refarraytype);
+ 		foreach(l, ar->refupperindexpr)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 		foreach(l, ar->reflowerindexpr)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 		if (ar->refexpr)
+ 			LeafNode((Node*) ar->refexpr, size, i, rtable);
+ 		if (ar->refassgnexpr)
+ 			LeafNode((Node*) ar->refassgnexpr, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, NullIfExpr))
+ 	{
+ 		/* NullIfExpr is just a typedef for OpExpr */
+ 		QualsNode((OpExpr*) arg, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, RowExpr))
+ 	{
+ 		RowExpr *re = (RowExpr*) arg;
+ 		APP_JUMB(re->row_format);
+ 		foreach(l, re->args)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 
+ 	}
+ 	else if (IsA(arg, XmlExpr))
+ 	{
+ 		XmlExpr *xml = (XmlExpr*) arg;
+ 		APP_JUMB(xml->op);
+ 		foreach(l, xml->args)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 		/* non-XML expressions for xml_attributes */
+ 		foreach(l, xml->named_args)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 		/* parallel list of Value strings */
+ 		foreach(l, xml->arg_names)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(arg, RowCompareExpr))
+ 	{
+ 		RowCompareExpr *rc = (RowCompareExpr*) arg;
+ 		foreach(l, rc->largs)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 		foreach(l, rc->rargs)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(arg, SetToDefault))
+ 	{
+ 		SetToDefault *sd = (SetToDefault*) arg;
+ 		APP_JUMB(sd->typeId);
+ 		APP_JUMB(sd->typeMod);
+ 	}
+ 	else if (IsA(arg, ConvertRowtypeExpr))
+ 	{
+ 		ConvertRowtypeExpr* Cr = (ConvertRowtypeExpr*) arg;
+ 		APP_JUMB(Cr->convertformat);
+ 		APP_JUMB(Cr->resulttype);
+ 		LeafNode((Node*) Cr->arg, size, i, rtable);
+ 	}
+ 	else if (IsA(arg, FieldStore))
+ 	{
+ 		FieldStore* Fs = (FieldStore*) arg;
+ 		LeafNode((Node*) Fs->arg, size, i, rtable);
+ 		foreach(l, Fs->newvals)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else
+ 	{
+ 		ereport(WARNING,
+ 				(errcode(ERRCODE_INTERNAL_ERROR),
+ 				 errmsg("unexpected, unrecognised LeafNode node type: %d",
+ 					 (int) nodeTag(arg))));
+ 	}
+ }
+ 
+ /*
+  * Perform selective serialization of limit or offset nodes
+  */
+ static void
+ LimitOffsetNode(const Node *node, Size size, Size *i, List *rtable)
+ {
+ 	ListCell *l;
+ 	unsigned char magic = MAG_LIMIT_OFFSET;
+ 	APP_JUMB(magic);
+ 
+ 	if (IsA(node, FuncExpr))
+ 	{
+ 
+ 		foreach(l, ((FuncExpr*) node)->args)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else
+ 	{
+ 		/* Fall back on leaf node representation */
+ 		LeafNode(node, size, i, rtable);
+ 	}
+ }
+ 
+ /*
+  * JoinExprNode: Perform selective serialization of JoinExpr nodes
+  */
+ static void
+ JoinExprNode(JoinExpr *node, Size size, Size *i, List *rtable)
+ {
+ 	Node	 *larg = node->larg;	/* left subtree */
+ 	Node	 *rarg = node->rarg;	/* right subtree */
+ 	ListCell *l;
+ 
+ 	Assert( IsA(node, JoinExpr));
+ 
+ 	APP_JUMB(node->jointype);
+ 	APP_JUMB(node->isNatural);
+ 
+ 	if (node->quals)
+ 	{
+ 		if ( IsA(node, OpExpr))
+ 		{
+ 			QualsNode((OpExpr*) node->quals, size, i, rtable);
+ 		}
+ 		else
+ 		{
+ 			LeafNode((Node*) node->quals, size, i, rtable);
+ 		}
+ 	}
+ 	foreach(l, node->usingClause) /* USING clause, if any (list of String) */
+ 	{
+ 		Node *arg = (Node *) lfirst(l);
+ 		LeafNode(arg, size, i, rtable);
+ 	}
+ 	if (larg)
+ 		JoinExprNodeChild(larg, size, i, rtable);
+ 	if (rarg)
+ 		JoinExprNodeChild(rarg, size, i, rtable);
+ }
+ 
+ /*
+  * JoinExprNodeChild: Serialize children of the JoinExpr node
+  */
+ static void
+ JoinExprNodeChild(const Node *node, Size size, Size *i, List *rtable)
+ {
+ 	if (IsA(node, RangeTblRef))
+ 	{
+ 		RangeTblRef   *rt = (RangeTblRef*) node;
+ 		RangeTblEntry *rte = rt_fetch(rt->rtindex, rtable);
+ 		ListCell      *l;
+ 
+ 		APP_JUMB(rte->relid);
+ 		APP_JUMB(rte->jointype);
+ 
+ 		if (rte->subquery)
+ 			PerformJumble((Query*) rte->subquery, size, i);
+ 
+ 		foreach(l, rte->joinaliasvars)
+ 		{
+ 			Node *arg = (Node *) lfirst(l);
+ 			LeafNode(arg, size, i, rtable);
+ 		}
+ 	}
+ 	else if (IsA(node, JoinExpr))
+ 	{
+ 		JoinExprNode((JoinExpr*) node, size, i, rtable);
+ 	}
+ 	else
+ 	{
+ 		LeafNode(node, size, i, rtable);
+ 	}
+ }
+ 
+ /*
+  * Record location of constant within query string of query tree that is
+  * currently being walked.
+  */
+ static void
+ RecordConstLocation(int location)
+ {
+ 	/* -1 indicates unknown or undefined location */
+ 	if (location > 0)
+ 	{
+ 		if (last_offset_num >= last_offset_buf_size)
+ 		{
+ 			last_offset_buf_size *= 2;
+ 			last_offsets = repalloc(last_offsets,
+ 							last_offset_buf_size *
+ 							sizeof(pgssLocationLen));
+ 
+ 		}
+ 		last_offsets[last_offset_num++].location = location;
+ 	}
+ }
+ 
+ /*
   * ExecutorStart hook: start up tracking if needed
   */
  static void
*************** pgss_ExecutorEnd(QueryDesc *queryDesc)
*** 587,592 ****
--- 1744,1754 ----
  {
  	if (queryDesc->totaltime && pgss_enabled())
  	{
+ 		uint32 queryId;
+ 		if (pgss_string_key)
+ 			queryId = pgss_hash_string(queryDesc->sourceText);
+ 		else
+ 			queryId = queryDesc->plannedstmt->queryId;
  		/*
  		 * Make sure stats accumulation is done.  (Note: it's okay if several
  		 * levels of hook all do this.)
*************** pgss_ExecutorEnd(QueryDesc *queryDesc)
*** 594,602 ****
  		InstrEndLoop(queryDesc->totaltime);
  
  		pgss_store(queryDesc->sourceText,
! 				   queryDesc->totaltime->total,
! 				   queryDesc->estate->es_processed,
! 				   &queryDesc->totaltime->bufusage);
  	}
  
  	if (prev_ExecutorEnd)
--- 1756,1768 ----
  		InstrEndLoop(queryDesc->totaltime);
  
  		pgss_store(queryDesc->sourceText,
! 		   queryId,
! 		   queryDesc->totaltime->total,
! 		   queryDesc->estate->es_processed,
! 		   &queryDesc->totaltime->bufusage,
! 		   false,
! 		   false);
! 
  	}
  
  	if (prev_ExecutorEnd)
*************** pgss_ProcessUtility(Node *parsetree, con
*** 618,623 ****
--- 1784,1790 ----
  		instr_time	start;
  		instr_time	duration;
  		uint64		rows = 0;
+ 		uint32		queryId;
  		BufferUsage bufusage;
  
  		bufusage = pgBufferUsage;
*************** pgss_ProcessUtility(Node *parsetree, con
*** 671,678 ****
  		bufusage.temp_blks_written =
  			pgBufferUsage.temp_blks_written - bufusage.temp_blks_written;
  
! 		pgss_store(queryString, INSTR_TIME_GET_DOUBLE(duration), rows,
! 				   &bufusage);
  	}
  	else
  	{
--- 1838,1848 ----
  		bufusage.temp_blks_written =
  			pgBufferUsage.temp_blks_written - bufusage.temp_blks_written;
  
! 		queryId = pgss_hash_string(queryString);
! 
! 		/* In the case of utility statements, hash the query string directly */
! 		pgss_store(queryString, queryId,
! 				INSTR_TIME_GET_DOUBLE(duration), rows, &bufusage, false, false);
  	}
  	else
  	{
*************** pgss_hash_fn(const void *key, Size keysi
*** 696,703 ****
  	/* we don't bother to include encoding in the hash */
  	return hash_uint32((uint32) k->userid) ^
  		hash_uint32((uint32) k->dbid) ^
! 		DatumGetUInt32(hash_any((const unsigned char *) k->query_ptr,
! 								k->query_len));
  }
  
  /*
--- 1866,1873 ----
  	/* we don't bother to include encoding in the hash */
  	return hash_uint32((uint32) k->userid) ^
  		hash_uint32((uint32) k->dbid) ^
! 		DatumGetUInt32(hash_any((const unsigned char* ) &k->queryid,
! 					sizeof(k->queryid)) );
  }
  
  /*
*************** pgss_match_fn(const void *key1, const vo
*** 712,733 ****
  	if (k1->userid == k2->userid &&
  		k1->dbid == k2->dbid &&
  		k1->encoding == k2->encoding &&
! 		k1->query_len == k2->query_len &&
! 		memcmp(k1->query_ptr, k2->query_ptr, k1->query_len) == 0)
  		return 0;
  	else
  		return 1;
  }
  
  /*
   * Store some statistics for a statement.
   */
  static void
! pgss_store(const char *query, double total_time, uint64 rows,
! 		   const BufferUsage *bufusage)
  {
  	pgssHashKey key;
  	double		usage;
  	pgssEntry  *entry;
  
  	Assert(query != NULL);
--- 1882,1927 ----
  	if (k1->userid == k2->userid &&
  		k1->dbid == k2->dbid &&
  		k1->encoding == k2->encoding &&
! 		k1->queryid == k2->queryid)
  		return 0;
  	else
  		return 1;
  }
  
  /*
+  * Given an arbitrarily long query string, produce a hash for the purposes of
+  * identifying the query, without canonicalizing constants. Used when hashing
+  * utility statements, or for legacy compatibility mode.
+  */
+ static uint32
+ pgss_hash_string(const char* str)
+ {
+ 	/* For additional protection against collisions, including magic value */
+ 	char magic = MAG_STR_BUF;
+ 	uint32 result;
+ 	Size size = sizeof(magic) + strlen(str);
+ 	unsigned char* p = palloc(size);
+ 	memcpy(p, &magic, sizeof(magic));
+ 	memcpy(p + sizeof(magic), str, strlen(str));
+ 	result = hash_any((const unsigned char *) p, size);
+ 	pfree(p);
+ 	return result;
+ }
+ 
+ /*
   * Store some statistics for a statement.
   */
  static void
! pgss_store(const char *query, uint32 queryId,
! 				double total_time, uint64 rows,
! 				const BufferUsage *bufusage,
! 				bool empty_entry,
! 				bool canonicalize)
  {
  	pgssHashKey key;
  	double		usage;
+ 	int		    new_query_len = strlen(query);
+ 	char	   *norm_query = NULL;
  	pgssEntry  *entry;
  
  	Assert(query != NULL);
*************** pgss_store(const char *query, double tot
*** 740,773 ****
  	key.userid = GetUserId();
  	key.dbid = MyDatabaseId;
  	key.encoding = GetDatabaseEncoding();
! 	key.query_len = strlen(query);
! 	if (key.query_len >= pgss->query_size)
! 		key.query_len = pg_encoding_mbcliplen(key.encoding,
  											  query,
! 											  key.query_len,
  											  pgss->query_size - 1);
- 	key.query_ptr = query;
  
! 	usage = USAGE_EXEC(duration);
  
  	/* Lookup the hash table entry with shared lock. */
  	LWLockAcquire(pgss->lock, LW_SHARED);
  
- 	entry = (pgssEntry *) hash_search(pgss_hash, &key, HASH_FIND, NULL);
  	if (!entry)
  	{
! 		/* Must acquire exclusive lock to add a new entry. */
! 		LWLockRelease(pgss->lock);
! 		LWLockAcquire(pgss->lock, LW_EXCLUSIVE);
! 		entry = entry_alloc(&key);
  	}
  
! 	/* Grab the spinlock while updating the counters. */
  	{
  		volatile pgssEntry *e = (volatile pgssEntry *) entry;
  
  		SpinLockAcquire(&e->mutex);
! 		e->counters.calls += 1;
  		e->counters.total_time += total_time;
  		e->counters.rows += rows;
  		e->counters.shared_blks_hit += bufusage->shared_blks_hit;
--- 1934,2085 ----
  	key.userid = GetUserId();
  	key.dbid = MyDatabaseId;
  	key.encoding = GetDatabaseEncoding();
! 	key.queryid = queryId;
! 
! 	if (new_query_len >= pgss->query_size)
! 		/* We don't have to worry about this later, because canonicalization
! 		 * cannot possibly result in a longer query string
! 		 */
! 		new_query_len = pg_encoding_mbcliplen(key.encoding,
  											  query,
! 											  new_query_len,
  											  pgss->query_size - 1);
  
! 	entry = (pgssEntry *) hash_search(pgss_hash, &key, HASH_FIND, NULL);
! 
! 	/*
! 	 * When just initializing an entry and putting counters at zero, make it
! 	 * artificially sticky so that it will probably still be there when
! 	 * executed. Strictly speaking, query strings are canonicalized on a
! 	 * best effort basis, though it would be difficult to demonstrate this even
! 	 * under artificial conditions.
! 	 */
! 	if (empty_entry && !entry)
! 		usage = USAGE_NON_EXEC_STICK;
! 	else
! 		usage = USAGE_EXEC(duration);
  
  	/* Lookup the hash table entry with shared lock. */
  	LWLockAcquire(pgss->lock, LW_SHARED);
  
  	if (!entry)
  	{
! 		/*
! 		 * Generate a normalized version of the query string that will be used
! 		 * to represent entry.
! 		 *
! 		 * Note that the representation seen by the user will only have
! 		 * non-differentiating Const tokens swapped with '?' characters, and
! 		 * this does not for example take account of the fact that alias names
! 		 * could vary between successive calls of what is regarded as the same
! 		 * query, or that whitespace could vary.
! 		 */
! 		if (last_offset_num > 0 && canonicalize)
! 		{
! 			int i,
! 			  off = 0,				/* Offset from start for cur tok */
! 			  tok_len = 0,			/* length (in bytes) of that tok */
! 			  quer_it = 0,			/* Original query byte iterator */
! 			  n_quer_it = 0,		/* Normalized query byte iterator */
! 			  len_to_wrt = 0,		/* Length (in bytes) to write */
! 			  last_off = 0,			/* Offset from start for last iter's tok */
! 			  last_tok_len = 0,		/* length (in bytes) of that tok */
! 			  tok_len_delta = 0;	/* Finished str is n bytes shorter so far */
! 
! 			/* Fill-in constant lengths - core system only gives us locations */
! 			fill_in_constant_lengths(query, last_offsets, last_offset_num);
! 
! 			norm_query = palloc0(new_query_len + 1);
! 
! 			for(i = 0; i < last_offset_num; i++)
! 			{
! 				if(last_offsets[i].length == -1)
! 					continue; /* don't assume that there's no duplicates */
! 
! 				off = last_offsets[i].location;
! 				tok_len = last_offsets[i].length;
! 				len_to_wrt = off - last_off;
! 				len_to_wrt -= last_tok_len;
! 				/* -1 for the '?' char: */
! 				tok_len_delta += tok_len - 1;
! 
! 				Assert(tok_len > 0);
! 				Assert(len_to_wrt >= 0);
! 				/*
! 				 * Each iteration copies everything prior to the current
! 				 * offset/token to be replaced, except bytes copied in
! 				 * previous iterations
! 				 */
! 				if (off - tok_len_delta + tok_len > new_query_len)
! 				{
! 					if (off - tok_len_delta < new_query_len)
! 					{
! 						len_to_wrt = new_query_len - n_quer_it;
! 						/* Out of space entirely - copy as much as possible */
! 						memcpy(norm_query + n_quer_it, query + quer_it,
! 								len_to_wrt);
! 						n_quer_it += len_to_wrt;
! 						quer_it += len_to_wrt + tok_len;
! 					}
! 					break;
! 				}
! 				memcpy(norm_query + n_quer_it, query + quer_it, len_to_wrt);
! 
! 				n_quer_it += len_to_wrt;
! 				if (n_quer_it < new_query_len)
! 					norm_query[n_quer_it++] = '?';
! 				quer_it += len_to_wrt + tok_len;
! 				last_off = off;
! 				last_tok_len = tok_len;
! 			}
! 			/*
! 			 * We've copied up until the last canonicalized constant
! 			 * (inclusive), or have run out of space entirely. Either fill
! 			 * norm_query to capacity, or copy over all remaining bytes from
! 			 * query, or copy nothing.
! 			 */
! 			memcpy(norm_query + n_quer_it, query + quer_it,
! 					new_query_len - n_quer_it);
! 
! 			/*
! 			 * Must acquire exclusive lock to add a new entry.
! 			 * Leave that until as late as possible.
! 			 */
! 			LWLockRelease(pgss->lock);
! 			LWLockAcquire(pgss->lock, LW_EXCLUSIVE);
! 
! 			entry = entry_alloc(&key, norm_query, new_query_len);
! 		}
! 		else
! 		{
! 			/* Acquire exclusive lock as required by entry_alloc() */
! 			LWLockRelease(pgss->lock);
! 			LWLockAcquire(pgss->lock, LW_EXCLUSIVE);
! 
! 			entry = entry_alloc(&key, query, new_query_len);
! 		}
  	}
  
! 	/*
! 	 * Grab the spinlock while updating the counters, if we're not just here to
! 	 * canonicalize.
! 	 */
  	{
  		volatile pgssEntry *e = (volatile pgssEntry *) entry;
  
  		SpinLockAcquire(&e->mutex);
! 		if (!empty_entry)
! 		{
! 			/*
! 			 * If necessary, "unstick" previously stuck query entry that just
! 			 * held a normalized query string, and then increment calls.
! 			 */
! 			if (e->counters.calls == 0)
! 				e->counters.usage = USAGE_INIT;
! 
! 			e->counters.calls += 1;
! 		}
! 
  		e->counters.total_time += total_time;
  		e->counters.rows += rows;
  		e->counters.shared_blks_hit += bufusage->shared_blks_hit;
*************** pgss_store(const char *query, double tot
*** 783,790 ****
  		e->counters.usage += usage;
  		SpinLockRelease(&e->mutex);
  	}
- 
  	LWLockRelease(pgss->lock);
  }
  
  /*
--- 2095,2103 ----
  		e->counters.usage += usage;
  		SpinLockRelease(&e->mutex);
  	}
  	LWLockRelease(pgss->lock);
+ 	if (norm_query)
+ 		pfree(norm_query);
  }
  
  /*
*************** pg_stat_statements(PG_FUNCTION_ARGS)
*** 875,881 ****
  
  			qstr = (char *)
  				pg_do_encoding_conversion((unsigned char *) entry->query,
! 										  entry->key.query_len,
  										  entry->key.encoding,
  										  GetDatabaseEncoding());
  			values[i++] = CStringGetTextDatum(qstr);
--- 2188,2194 ----
  
  			qstr = (char *)
  				pg_do_encoding_conversion((unsigned char *) entry->query,
! 										  entry->query_len,
  										  entry->key.encoding,
  										  GetDatabaseEncoding());
  			values[i++] = CStringGetTextDatum(qstr);
*************** pg_stat_statements(PG_FUNCTION_ARGS)
*** 893,898 ****
--- 2206,2214 ----
  			tmp = e->counters;
  			SpinLockRelease(&e->mutex);
  		}
+ 		/* Skip record of unexecuted query */
+ 		if (tmp.calls == 0)
+ 			continue;
  
  		values[i++] = Int64GetDatumFast(tmp.calls);
  		values[i++] = Float8GetDatumFast(tmp.total_time);
*************** pgss_memsize(void)
*** 950,963 ****
   * have made the entry while we waited to get exclusive lock.
   */
  static pgssEntry *
! entry_alloc(pgssHashKey *key)
  {
  	pgssEntry  *entry;
  	bool		found;
  
- 	/* Caller must have clipped query properly */
- 	Assert(key->query_len < pgss->query_size);
- 
  	/* Make space if needed */
  	while (hash_get_num_entries(pgss_hash) >= pgss_max)
  		entry_dealloc();
--- 2266,2276 ----
   * have made the entry while we waited to get exclusive lock.
   */
  static pgssEntry *
! entry_alloc(pgssHashKey *key, const char* query, int new_query_len)
  {
  	pgssEntry  *entry;
  	bool		found;
  
  	/* Make space if needed */
  	while (hash_get_num_entries(pgss_hash) >= pgss_max)
  		entry_dealloc();
*************** entry_alloc(pgssHashKey *key)
*** 969,985 ****
  	{
  		/* New entry, initialize it */
  
! 		/* dynahash tried to copy the key for us, but must fix query_ptr */
! 		entry->key.query_ptr = entry->query;
  		/* reset the statistics */
  		memset(&entry->counters, 0, sizeof(Counters));
  		entry->counters.usage = USAGE_INIT;
  		/* re-initialize the mutex each time ... we assume no one using it */
  		SpinLockInit(&entry->mutex);
  		/* ... and don't forget the query text */
! 		memcpy(entry->query, key->query_ptr, key->query_len);
! 		entry->query[key->query_len] = '\0';
  	}
  
  	return entry;
  }
--- 2282,2301 ----
  	{
  		/* New entry, initialize it */
  
! 		entry->query_len = new_query_len;
! 		Assert(entry->query_len > 0);
  		/* reset the statistics */
  		memset(&entry->counters, 0, sizeof(Counters));
  		entry->counters.usage = USAGE_INIT;
  		/* re-initialize the mutex each time ... we assume no one using it */
  		SpinLockInit(&entry->mutex);
  		/* ... and don't forget the query text */
! 		memcpy(entry->query, query, entry->query_len);
! 		Assert(new_query_len <= pgss->query_size);
! 		entry->query[entry->query_len] = '\0';
  	}
+ 	/* Caller must have clipped query properly */
+ 	Assert(entry->query_len < pgss->query_size);
  
  	return entry;
  }
diff --git a/src/backend/nodes/copyfuncs.c b/src/backend/nodes/copyfuncs.c
new file mode 100644
index cc3168d..84483ce
*** a/src/backend/nodes/copyfuncs.c
--- b/src/backend/nodes/copyfuncs.c
*************** _copyPlannedStmt(const PlannedStmt *from
*** 92,97 ****
--- 92,98 ----
  	COPY_NODE_FIELD(relationOids);
  	COPY_NODE_FIELD(invalItems);
  	COPY_SCALAR_FIELD(nParamExec);
+ 	COPY_SCALAR_FIELD(queryId);
  
  	return newnode;
  }
*************** _copyQuery(const Query *from)
*** 2415,2420 ****
--- 2416,2422 ----
  
  	COPY_SCALAR_FIELD(commandType);
  	COPY_SCALAR_FIELD(querySource);
+ 	COPY_SCALAR_FIELD(queryId);
  	COPY_SCALAR_FIELD(canSetTag);
  	COPY_NODE_FIELD(utilityStmt);
  	COPY_SCALAR_FIELD(resultRelation);
diff --git a/src/backend/nodes/equalfuncs.c b/src/backend/nodes/equalfuncs.c
new file mode 100644
index 2295195..ce75da3
*** a/src/backend/nodes/equalfuncs.c
--- b/src/backend/nodes/equalfuncs.c
***************
*** 83,88 ****
--- 83,91 ----
  #define COMPARE_LOCATION_FIELD(fldname) \
  	((void) 0)
  
+ /* Compare a query_id field (this is a no-op, per note above) */
+ #define COMPARE_QUERYID_FIELD(fldname) \
+ 	((void) 0)
  
  /*
   *	Stuff from primnodes.h
*************** _equalQuery(const Query *a, const Query
*** 897,902 ****
--- 900,906 ----
  {
  	COMPARE_SCALAR_FIELD(commandType);
  	COMPARE_SCALAR_FIELD(querySource);
+ 	COMPARE_QUERYID_FIELD(query_id);
  	COMPARE_SCALAR_FIELD(canSetTag);
  	COMPARE_NODE_FIELD(utilityStmt);
  	COMPARE_SCALAR_FIELD(resultRelation);
diff --git a/src/backend/nodes/outfuncs.c b/src/backend/nodes/outfuncs.c
new file mode 100644
index 829f6d4..9646125
*** a/src/backend/nodes/outfuncs.c
--- b/src/backend/nodes/outfuncs.c
***************
*** 81,86 ****
--- 81,90 ----
  #define WRITE_LOCATION_FIELD(fldname) \
  	appendStringInfo(str, " :" CppAsString(fldname) " %d", node->fldname)
  
+ /* Write a query id field */
+ #define WRITE_QUERYID_FIELD(fldname) \
+ 	((void) 0)
+ 
  /* Write a Node field */
  #define WRITE_NODE_FIELD(fldname) \
  	(appendStringInfo(str, " :" CppAsString(fldname) " "), \
*************** _outPlannedStmt(StringInfo str, const Pl
*** 255,260 ****
--- 259,265 ----
  	WRITE_NODE_FIELD(relationOids);
  	WRITE_NODE_FIELD(invalItems);
  	WRITE_INT_FIELD(nParamExec);
+ 	WRITE_QUERYID_FIELD(queryId);
  }
  
  /*
*************** _outQuery(StringInfo str, const Query *n
*** 2159,2164 ****
--- 2164,2170 ----
  
  	WRITE_ENUM_FIELD(commandType, CmdType);
  	WRITE_ENUM_FIELD(querySource, QuerySource);
+ 	WRITE_QUERYID_FIELD(query_id);
  	WRITE_BOOL_FIELD(canSetTag);
  
  	/*
diff --git a/src/backend/nodes/readfuncs.c b/src/backend/nodes/readfuncs.c
new file mode 100644
index b9258ad..5ea0d52
*** a/src/backend/nodes/readfuncs.c
--- b/src/backend/nodes/readfuncs.c
***************
*** 110,115 ****
--- 110,119 ----
  	token = pg_strtok(&length);		/* get field value */ \
  	local_node->fldname = -1	/* set field to "unknown" */
  
+ /* Read a QueryId field - NO-OP */
+ #define READ_QUERYID_FIELD(fldname) \
+ 	((void) 0)
+ 
  /* Read a Node field */
  #define READ_NODE_FIELD(fldname) \
  	token = pg_strtok(&length);		/* skip :fldname */ \
*************** _readQuery(void)
*** 195,200 ****
--- 199,205 ----
  
  	READ_ENUM_FIELD(commandType, CmdType);
  	READ_ENUM_FIELD(querySource, QuerySource);
+ 	READ_QUERYID_FIELD(query_id);
  	READ_BOOL_FIELD(canSetTag);
  	READ_NODE_FIELD(utilityStmt);
  	READ_INT_FIELD(resultRelation);
diff --git a/src/backend/optimizer/plan/planner.c b/src/backend/optimizer/plan/planner.c
new file mode 100644
index 8bbe977..1b4030f
*** a/src/backend/optimizer/plan/planner.c
--- b/src/backend/optimizer/plan/planner.c
*************** standard_planner(Query *parse, int curso
*** 240,245 ****
--- 240,246 ----
  	result->relationOids = glob->relationOids;
  	result->invalItems = glob->invalItems;
  	result->nParamExec = list_length(glob->paramlist);
+ 	result->queryId = parse->queryId;
  
  	return result;
  }
diff --git a/src/backend/parser/analyze.c b/src/backend/parser/analyze.c
new file mode 100644
index b187b03..92a7dec
*** a/src/backend/parser/analyze.c
--- b/src/backend/parser/analyze.c
*************** static Query *transformExplainStmt(Parse
*** 65,73 ****
  static void transformLockingClause(ParseState *pstate, Query *qry,
  					   LockingClause *lc, bool pushedDown);
  
  
  /*
!  * parse_analyze
   *		Analyze a raw parse tree and transform it to Query form.
   *
   * Optionally, information about $n parameter types can be supplied.
--- 65,89 ----
  static void transformLockingClause(ParseState *pstate, Query *qry,
  					   LockingClause *lc, bool pushedDown);
  
+ /* Hooks for plugins to get control of parse analysis */
+ parse_analyze_hook_type				parse_analyze_hook = NULL;
+ parse_analyze_varparams_hook_type	parse_analyze_varparams_hook = NULL;
+ 
+ 
+ Query *
+ parse_analyze(Node *parseTree, const char *sourceText,
+ 			  Oid *paramTypes, int numParams)
+ {
+ 	if (parse_analyze_hook)
+ 		return (*parse_analyze_hook) (parseTree, sourceText,
+ 			  paramTypes, numParams);
+ 	else
+ 		return standard_parse_analyze(parseTree, sourceText,
+ 			  paramTypes, numParams);
+ }
  
  /*
!  * standard_parse_analyze
   *		Analyze a raw parse tree and transform it to Query form.
   *
   * Optionally, information about $n parameter types can be supplied.
*************** static void transformLockingClause(Parse
*** 78,84 ****
   * a dummy CMD_UTILITY Query node.
   */
  Query *
! parse_analyze(Node *parseTree, const char *sourceText,
  			  Oid *paramTypes, int numParams)
  {
  	ParseState *pstate = make_parsestate(NULL);
--- 94,100 ----
   * a dummy CMD_UTILITY Query node.
   */
  Query *
! standard_parse_analyze(Node *parseTree, const char *sourceText,
  			  Oid *paramTypes, int numParams)
  {
  	ParseState *pstate = make_parsestate(NULL);
*************** parse_analyze(Node *parseTree, const cha
*** 98,112 ****
  	return query;
  }
  
  /*
!  * parse_analyze_varparams
   *
   * This variant is used when it's okay to deduce information about $n
   * symbol datatypes from context.  The passed-in paramTypes[] array can
   * be modified or enlarged (via repalloc).
   */
  Query *
! parse_analyze_varparams(Node *parseTree, const char *sourceText,
  						Oid **paramTypes, int *numParams)
  {
  	ParseState *pstate = make_parsestate(NULL);
--- 114,140 ----
  	return query;
  }
  
+ Query *
+ parse_analyze_varparams(Node *parseTree, const char *sourceText,
+ 						Oid **paramTypes, int *numParams)
+ {
+ 	if (parse_analyze_varparams_hook)
+ 		return (*parse_analyze_varparams_hook) (parseTree, sourceText,
+ 						paramTypes, numParams);
+ 	else
+ 		return standard_parse_analyze_varparams(parseTree, sourceText,
+ 			  paramTypes, numParams);
+ }
+ 
  /*
!  * standard_parse_analyze_varparams
   *
   * This variant is used when it's okay to deduce information about $n
   * symbol datatypes from context.  The passed-in paramTypes[] array can
   * be modified or enlarged (via repalloc).
   */
  Query *
! standard_parse_analyze_varparams(Node *parseTree, const char *sourceText,
  						Oid **paramTypes, int *numParams)
  {
  	ParseState *pstate = make_parsestate(NULL);
*************** transformSelectStmt(ParseState *pstate,
*** 877,882 ****
--- 905,911 ----
  	ListCell   *l;
  
  	qry->commandType = CMD_SELECT;
+ 	qry->queryId = 0;
  
  	/* process the WITH clause independently of all else */
  	if (stmt->withClause)
diff --git a/src/backend/parser/parse_coerce.c b/src/backend/parser/parse_coerce.c
new file mode 100644
index 6661a3d..841d2b2
*** a/src/backend/parser/parse_coerce.c
--- b/src/backend/parser/parse_coerce.c
*************** coerce_type(ParseState *pstate, Node *no
*** 280,293 ****
  		newcon->constlen = typeLen(targetType);
  		newcon->constbyval = typeByVal(targetType);
  		newcon->constisnull = con->constisnull;
! 		/* Use the leftmost of the constant's and coercion's locations */
! 		if (location < 0)
! 			newcon->location = con->location;
! 		else if (con->location >= 0 && con->location < location)
! 			newcon->location = con->location;
! 		else
! 			newcon->location = location;
! 
  		/*
  		 * Set up to point at the constant's text if the input routine throws
  		 * an error.
--- 280,286 ----
  		newcon->constlen = typeLen(targetType);
  		newcon->constbyval = typeByVal(targetType);
  		newcon->constisnull = con->constisnull;
! 		newcon->location = con->location;
  		/*
  		 * Set up to point at the constant's text if the input routine throws
  		 * an error.
*************** coerce_type(ParseState *pstate, Node *no
*** 333,340 ****
  		result = (*pstate->p_coerce_param_hook) (pstate,
  												 (Param *) node,
  												 targetTypeId,
! 												 targetTypeMod,
! 												 location);
  		if (result)
  			return result;
  	}
--- 326,332 ----
  		result = (*pstate->p_coerce_param_hook) (pstate,
  												 (Param *) node,
  												 targetTypeId,
! 												 targetTypeMod);
  		if (result)
  			return result;
  	}
diff --git a/src/backend/parser/parse_param.c b/src/backend/parser/parse_param.c
new file mode 100644
index cfe7262..75214ed
*** a/src/backend/parser/parse_param.c
--- b/src/backend/parser/parse_param.c
*************** typedef struct VarParamState
*** 54,61 ****
  static Node *fixed_paramref_hook(ParseState *pstate, ParamRef *pref);
  static Node *variable_paramref_hook(ParseState *pstate, ParamRef *pref);
  static Node *variable_coerce_param_hook(ParseState *pstate, Param *param,
! 						   Oid targetTypeId, int32 targetTypeMod,
! 						   int location);
  static bool check_parameter_resolution_walker(Node *node, ParseState *pstate);
  
  
--- 54,60 ----
  static Node *fixed_paramref_hook(ParseState *pstate, ParamRef *pref);
  static Node *variable_paramref_hook(ParseState *pstate, ParamRef *pref);
  static Node *variable_coerce_param_hook(ParseState *pstate, Param *param,
! 						   Oid targetTypeId, int32 targetTypeMod);
  static bool check_parameter_resolution_walker(Node *node, ParseState *pstate);
  
  
*************** variable_paramref_hook(ParseState *pstat
*** 178,185 ****
   */
  static Node *
  variable_coerce_param_hook(ParseState *pstate, Param *param,
! 						   Oid targetTypeId, int32 targetTypeMod,
! 						   int location)
  {
  	if (param->paramkind == PARAM_EXTERN && param->paramtype == UNKNOWNOID)
  	{
--- 177,183 ----
   */
  static Node *
  variable_coerce_param_hook(ParseState *pstate, Param *param,
! 						   Oid targetTypeId, int32 targetTypeMod)
  {
  	if (param->paramkind == PARAM_EXTERN && param->paramtype == UNKNOWNOID)
  	{
*************** variable_coerce_param_hook(ParseState *p
*** 238,248 ****
  		 */
  		param->paramcollid = get_typcollation(param->paramtype);
  
- 		/* Use the leftmost of the param's and coercion's locations */
- 		if (location >= 0 &&
- 			(param->location < 0 || location < param->location))
- 			param->location = location;
- 
  		return (Node *) param;
  	}
  
--- 236,241 ----
diff --git a/src/include/nodes/parsenodes.h b/src/include/nodes/parsenodes.h
new file mode 100644
index 1d33ceb..9fb3c0f
*** a/src/include/nodes/parsenodes.h
--- b/src/include/nodes/parsenodes.h
*************** typedef struct Query
*** 103,108 ****
--- 103,111 ----
  
  	QuerySource querySource;	/* where did I come from? */
  
+ 	uint32		queryId;		/* query identifier that can be set by plugins.
+ 								 * Will be copied to resulting PlannedStmt. */
+ 
  	bool		canSetTag;		/* do I set the command result tag? */
  
  	Node	   *utilityStmt;	/* non-null if this is DECLARE CURSOR or a
diff --git a/src/include/nodes/plannodes.h b/src/include/nodes/plannodes.h
new file mode 100644
index 7d90b91..3cec1be
*** a/src/include/nodes/plannodes.h
--- b/src/include/nodes/plannodes.h
*************** typedef struct PlannedStmt
*** 67,72 ****
--- 67,74 ----
  	List	   *invalItems;		/* other dependencies, as PlanInvalItems */
  
  	int			nParamExec;		/* number of PARAM_EXEC Params used */
+ 
+ 	uint32		queryId;		/* query identifier carried from query tree */
  } PlannedStmt;
  
  /* macro for fetching the Plan associated with a SubPlan node */
diff --git a/src/include/parser/analyze.h b/src/include/parser/analyze.h
new file mode 100644
index b8987db..2bad10f
*** a/src/include/parser/analyze.h
--- b/src/include/parser/analyze.h
***************
*** 16,26 ****
--- 16,38 ----
  
  #include "parser/parse_node.h"
  
+ /* Hook for plugins to get control in parse_analyze() */
+ typedef Query* (*parse_analyze_hook_type) (Node *parseTree, const char *sourceText,
+ 			  Oid *paramTypes, int numParams);
+ extern PGDLLIMPORT parse_analyze_hook_type parse_analyze_hook;
+ /* Hook for plugins to get control in parse_analyze_varparams() */
+ typedef Query* (*parse_analyze_varparams_hook_type) (Node *parseTree, const char *sourceText,
+ 						Oid **paramTypes, int *numParams);
+ extern PGDLLIMPORT parse_analyze_varparams_hook_type parse_analyze_varparams_hook;
  
  extern Query *parse_analyze(Node *parseTree, const char *sourceText,
  			  Oid *paramTypes, int numParams);
+ extern Query *standard_parse_analyze(Node *parseTree, const char *sourceText,
+ 			  Oid *paramTypes, int numParams);
  extern Query *parse_analyze_varparams(Node *parseTree, const char *sourceText,
  						Oid **paramTypes, int *numParams);
+ extern Query *standard_parse_analyze_varparams(Node *parseTree, const char *sourceText,
+ 						Oid **paramTypes, int *numParams);
  
  extern Query *parse_sub_analyze(Node *parseTree, ParseState *parentParseState,
  				  CommonTableExpr *parentCTE,
diff --git a/src/include/parser/parse_node.h b/src/include/parser/parse_node.h
new file mode 100644
index 670e084..a484ae8
*** a/src/include/parser/parse_node.h
--- b/src/include/parser/parse_node.h
*************** typedef Node *(*PreParseColumnRefHook) (
*** 27,34 ****
  typedef Node *(*PostParseColumnRefHook) (ParseState *pstate, ColumnRef *cref, Node *var);
  typedef Node *(*ParseParamRefHook) (ParseState *pstate, ParamRef *pref);
  typedef Node *(*CoerceParamHook) (ParseState *pstate, Param *param,
! 									   Oid targetTypeId, int32 targetTypeMod,
! 											  int location);
  
  
  /*
--- 27,33 ----
  typedef Node *(*PostParseColumnRefHook) (ParseState *pstate, ColumnRef *cref, Node *var);
  typedef Node *(*ParseParamRefHook) (ParseState *pstate, ParamRef *pref);
  typedef Node *(*CoerceParamHook) (ParseState *pstate, Param *param,
! 									   Oid targetTypeId, int32 targetTypeMod);
  
  
  /*
