From 01c335316fa1ad0905d5e9f2f182eff824e24ff2 Mon Sep 17 00:00:00 2001 From: "Andrei V. Lepikhov" Date: Mon, 27 Apr 2026 09:54:11 +0200 Subject: [PATCH v1] Fix dangling Path pointers when sharing paths across RelOptInfos In three places the planner reused the same Path pointer across two RelOptInfo pathlists without making a copy: - create_ordered_paths() set sorted_path = input_path when the path was already sorted, placing the same node in both input_rel->pathlist and ordered_rel->pathlist. - grouping_planner() passed paths from current_rel directly into add_path and add_partial_path. Since add_path() may pfree paths it displaces, and apply_projection_to_path() may mutate a path in-place, a path shared between two rels can be freed or corrupted while still referenced by the other. Introduce copy_path(), a type-aware shallow copy (palloc + memcpy at the concrete struct size), and use it at all three sites so each rel owns an independent top-level node. For T_CustomPath, dispatch to a new optional CopyCustomPath callback in CustomPathMethods; fall back to sizeof(CustomPath) memcpy when the callback is not provided. Extensions embedding private fields beyond sizeof(CustomPath) should implement the callback. Discussion: https://postgr.es/m/adab9758-f346-4263-93af-3e37b7b315b7%40gmail.com --- src/backend/optimizer/plan/planner.c | 6 +- src/backend/optimizer/util/pathnode.c | 158 +++++++++++++++++++++++++- src/include/nodes/extensible.h | 7 ++ src/include/optimizer/pathnode.h | 1 + 4 files changed, 168 insertions(+), 4 deletions(-) diff --git a/src/backend/optimizer/plan/planner.c b/src/backend/optimizer/plan/planner.c index 4ec76ce31a9..e920cbbaa8f 100644 --- a/src/backend/optimizer/plan/planner.c +++ b/src/backend/optimizer/plan/planner.c @@ -2226,7 +2226,7 @@ grouping_planner(PlannerInfo *root, double tuple_fraction, } /* And shove it into final_rel */ - add_path(final_rel, path); + add_path(final_rel, copy_path(path)); } /* @@ -2241,7 +2241,7 @@ grouping_planner(PlannerInfo *root, double tuple_fraction, { Path *partial_path = (Path *) lfirst(lc); - add_partial_path(final_rel, partial_path); + add_partial_path(final_rel, copy_path(partial_path)); } } @@ -5440,7 +5440,7 @@ create_ordered_paths(PlannerInfo *root, input_path->pathkeys, &presorted_keys); if (is_sorted) - sorted_path = input_path; + sorted_path = copy_path(input_path); else { /* diff --git a/src/backend/optimizer/util/pathnode.c b/src/backend/optimizer/util/pathnode.c index 73518c8f870..5eae724db80 100644 --- a/src/backend/optimizer/util/pathnode.c +++ b/src/backend/optimizer/util/pathnode.c @@ -237,6 +237,161 @@ compare_path_costs_fuzzily(Path *path1, Path *path2, double fuzz_factor) #undef CONSIDER_PATH_STARTUP_COST } +/* + * copy_path + * Make a shallow copy of a Path node. + * + * The new node deliberately retains path->parent. The parent field is not an + * ownership marker — it is a stable identity link. For example, it is used in + * createplan.c to identify base relation. + * + * For T_CustomPath, we dispatch to the optional CopyCustomPath callback if + * provided; otherwise we fall back to a sizeof(CustomPath) memcpy, which is + * correct only when the extension hasn't appended its own private fields. + * + * Note: this is intentionally shallower than copyObject() (which deep-copies + * sublists and substructure) and lighter than reparameterize_path() (which + * re-runs the constructor and re-costs). We need only a fresh top-level + * node so add_path()'s pfree and apply_projection_to_path()'s in-place + * mutation cannot affect the original. + */ +Path * +copy_path(Path *path) +{ + Path *newpath; + + Assert(path != NULL); + +#define FLAT_COPY_PATH(newnode_, node_, nodetype_) \ + ( (newnode_) = (Path *) palloc(sizeof(nodetype_)), \ + memcpy((newnode_), (node_), sizeof(nodetype_)) ) + + switch (nodeTag(path)) + { + case T_Path: + FLAT_COPY_PATH(newpath, path, Path); + break; + case T_IndexPath: + FLAT_COPY_PATH(newpath, path, IndexPath); + break; + case T_BitmapHeapPath: + FLAT_COPY_PATH(newpath, path, BitmapHeapPath); + break; + case T_BitmapAndPath: + FLAT_COPY_PATH(newpath, path, BitmapAndPath); + break; + case T_BitmapOrPath: + FLAT_COPY_PATH(newpath, path, BitmapOrPath); + break; + case T_TidPath: + FLAT_COPY_PATH(newpath, path, TidPath); + break; + case T_TidRangePath: + FLAT_COPY_PATH(newpath, path, TidRangePath); + break; + case T_SubqueryScanPath: + FLAT_COPY_PATH(newpath, path, SubqueryScanPath); + break; + case T_ForeignPath: + FLAT_COPY_PATH(newpath, path, ForeignPath); + break; + case T_CustomPath: + { + CustomPath *cpath = (CustomPath *) path; + + Assert(cpath->methods != NULL); + if (cpath->methods->CopyCustomPath) + newpath = (Path *) cpath->methods->CopyCustomPath(cpath); + else + FLAT_COPY_PATH(newpath, path, CustomPath); + } + break; + case T_AppendPath: + FLAT_COPY_PATH(newpath, path, AppendPath); + break; + case T_MergeAppendPath: + FLAT_COPY_PATH(newpath, path, MergeAppendPath); + break; + case T_GroupResultPath: + FLAT_COPY_PATH(newpath, path, GroupResultPath); + break; + case T_MaterialPath: + FLAT_COPY_PATH(newpath, path, MaterialPath); + break; + case T_MemoizePath: + FLAT_COPY_PATH(newpath, path, MemoizePath); + break; + case T_GatherPath: + FLAT_COPY_PATH(newpath, path, GatherPath); + break; + case T_GatherMergePath: + FLAT_COPY_PATH(newpath, path, GatherMergePath); + break; + case T_NestPath: + FLAT_COPY_PATH(newpath, path, NestPath); + break; + case T_MergePath: + FLAT_COPY_PATH(newpath, path, MergePath); + break; + case T_HashPath: + FLAT_COPY_PATH(newpath, path, HashPath); + break; + case T_ProjectionPath: + FLAT_COPY_PATH(newpath, path, ProjectionPath); + break; + case T_ProjectSetPath: + FLAT_COPY_PATH(newpath, path, ProjectSetPath); + break; + case T_SortPath: + FLAT_COPY_PATH(newpath, path, SortPath); + break; + case T_IncrementalSortPath: + FLAT_COPY_PATH(newpath, path, IncrementalSortPath); + break; + case T_GroupPath: + FLAT_COPY_PATH(newpath, path, GroupPath); + break; + case T_UniquePath: + FLAT_COPY_PATH(newpath, path, UniquePath); + break; + case T_AggPath: + FLAT_COPY_PATH(newpath, path, AggPath); + break; + case T_GroupingSetsPath: + FLAT_COPY_PATH(newpath, path, GroupingSetsPath); + break; + case T_MinMaxAggPath: + FLAT_COPY_PATH(newpath, path, MinMaxAggPath); + break; + case T_WindowAggPath: + FLAT_COPY_PATH(newpath, path, WindowAggPath); + break; + case T_SetOpPath: + FLAT_COPY_PATH(newpath, path, SetOpPath); + break; + case T_RecursiveUnionPath: + FLAT_COPY_PATH(newpath, path, RecursiveUnionPath); + break; + case T_LockRowsPath: + FLAT_COPY_PATH(newpath, path, LockRowsPath); + break; + case T_ModifyTablePath: + FLAT_COPY_PATH(newpath, path, ModifyTablePath); + break; + case T_LimitPath: + FLAT_COPY_PATH(newpath, path, LimitPath); + break; + default: + elog(ERROR, "unrecognized path type: %d", (int) nodeTag(path)); + newpath = NULL; /* keep compiler quiet */ + break; + } + +#undef FLAT_COPY_PATH + + return newpath; +} + /* * set_cheapest * Find the minimum-cost paths from among a relation's paths, @@ -2678,7 +2833,8 @@ create_projection_path(PlannerInfo *root, * a separate Result plan node isn't needed, we just replace the given path's * pathtarget with the desired one. This must be used only when the caller * knows that the given path isn't referenced elsewhere and so can be modified - * in-place. + * in-place. In particular, callers must not pass a Path that is currently + * reachable from another RelOptInfo's pathlist. * * If the input path is a GatherPath or GatherMergePath, we try to push the * new target down to its input as well; this is a yet more invasive diff --git a/src/include/nodes/extensible.h b/src/include/nodes/extensible.h index 517db95c4a3..762b09976f5 100644 --- a/src/include/nodes/extensible.h +++ b/src/include/nodes/extensible.h @@ -103,6 +103,13 @@ typedef struct CustomPathMethods struct List *(*ReparameterizeCustomPathByChild) (PlannerInfo *root, List *custom_private, RelOptInfo *child_rel); + + /* + * Produce a shallow copy of a CustomPath, returning a freshly palloc'd + * struct of the extension's concrete type. Required when the extension's + * CustomPath subclass embeds private fields beyond sizeof(CustomPath). + */ + struct CustomPath *(*CopyCustomPath) (struct CustomPath *path); } CustomPathMethods; /* diff --git a/src/include/optimizer/pathnode.h b/src/include/optimizer/pathnode.h index e8db321f92b..fbafdfd1e6f 100644 --- a/src/include/optimizer/pathnode.h +++ b/src/include/optimizer/pathnode.h @@ -55,6 +55,7 @@ extern int compare_path_costs(Path *path1, Path *path2, extern int compare_fractional_path_costs(Path *path1, Path *path2, double fraction); extern void set_cheapest(RelOptInfo *parent_rel); +extern Path *copy_path(Path *path); extern void add_path(RelOptInfo *parent_rel, Path *new_path); extern bool add_path_precheck(RelOptInfo *parent_rel, int disabled_nodes, Cost startup_cost, Cost total_cost, -- 2.54.0