--- src/backend/commands/tablecmds.c
+++ src/backend/commands/tablecmds.c
@@ -681,6 +681,23 @@
 	int			child_attno;
 
 	/*
+	 * Check for and reject tables with too many columns. We perform
+	 * this check relatively early for two reasons: (a) we don't run
+	 * the risk of overflowing an AttrNumber in subsequent code (b) an
+	 * O(n^2) algorithm is okay if we're processing <= 1600 columns,
+	 * but could take minutes to execute if the user attempts to
+	 * create a table with hundreds of thousands of columns.
+	 *
+	 * Note that we also need to check that any we do not exceed this
+	 * figure after including columns from inherited relations.
+	 */
+	if (list_length(schema) > MaxHeapAttributeNumber)
+		ereport(ERROR,
+				(errcode(ERRCODE_TOO_MANY_COLUMNS),
+				 errmsg("tables can have at most %d columns",
+						MaxHeapAttributeNumber)));
+
+	/*
 	 * Check for duplicate names in the explicit list of attributes.
 	 *
 	 * Although we might consider merging such entries in the same way that
@@ -979,6 +996,16 @@
 		}
 
 		schema = inhSchema;
+
+		/*
+		 * Check that we haven't exceeded the legal # of columns after
+		 * merging in inherited columns.
+		 */
+		if (list_length(schema) > MaxHeapAttributeNumber)
+			ereport(ERROR,
+					(errcode(ERRCODE_TOO_MANY_COLUMNS),
+					 errmsg("tables can have at most %d columns",
+							MaxHeapAttributeNumber)));
 	}
 
 	/*
--- src/backend/parser/analyze.c
+++ src/backend/parser/analyze.c
@@ -396,6 +396,18 @@
 	result->querySource = QSRC_ORIGINAL;
 	result->canSetTag = true;
 
+	/*
+	 * Check that we did not produce too many resnos; at the very
+	 * least we cannot allow more than 2^16, since that would exceed
+	 * the range of a AttrNumber. It seems safest to use
+	 * MaxTupleAttributeNumber.
+	 */
+	if (pstate->p_next_resno - 1 > MaxTupleAttributeNumber)
+		ereport(ERROR,
+				(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
+				 errmsg("target lists can have at most %d entries",
+						MaxTupleAttributeNumber)));
+
 	return result;
 }
 
