1 /*
2 * This file and its contents are licensed under the Apache License 2.0.
3 * Please see the included NOTICE for copyright information and
4 * LICENSE-APACHE for a copy of the license.
5 */
6 #include <postgres.h>
7 #include <catalog/pg_cast.h>
8 #include <catalog/pg_class.h>
9 #include <catalog/pg_namespace.h>
10 #include <catalog/pg_operator.h>
11 #include <commands/explain.h>
12 #include <executor/executor.h>
13 #include <nodes/extensible.h>
14 #include <nodes/makefuncs.h>
15 #include <nodes/nodeFuncs.h>
16 #include <nodes/nodes.h>
17 #include <nodes/parsenodes.h>
18 #include <nodes/plannodes.h>
19 #include <optimizer/appendinfo.h>
20 #include <optimizer/cost.h>
21 #include <optimizer/optimizer.h>
22 #include <optimizer/plancat.h>
23 #include <parser/parsetree.h>
24 #include <rewrite/rewriteManip.h>
25 #include <utils/lsyscache.h>
26 #include <utils/memutils.h>
27 #include <utils/syscache.h>
28
29 #include "compat/compat.h"
30 #include "constraint_aware_append.h"
31 #include "nodes/chunk_append/transform.h"
32 #include "guc.h"
33 #include "utils.h"
34
35 /*
36 * Exclude child relations (chunks) at execution time based on constraints.
37 *
38 * This functions tries to reuse as much functionality as possible from standard
39 * constraint exclusion in PostgreSQL that normally happens at planning
40 * time. Therefore, we need to fake a number of planning-related data
41 * structures.
42 */
43 static bool
excluded_by_constraint(PlannerInfo * root,RangeTblEntry * rte,Index rt_index,List * restrictinfos)44 excluded_by_constraint(PlannerInfo *root, RangeTblEntry *rte, Index rt_index, List *restrictinfos)
45 {
46 RelOptInfo rel = {
47 .type = T_RelOptInfo,
48 .relid = rt_index,
49 .reloptkind = RELOPT_OTHER_MEMBER_REL,
50 .baserestrictinfo = restrictinfos,
51 };
52
53 return relation_excluded_by_constraints(root, &rel, rte);
54 }
55
56 static Plan *
get_plans_for_exclusion(Plan * plan)57 get_plans_for_exclusion(Plan *plan)
58 {
59 /* Optimization: If we want to be able to prune */
60 /* when the node is a T_Result or T_Sort, then we need to peek */
61 /* into the subplans of this Result node. */
62
63 switch (nodeTag(plan))
64 {
65 case T_Result:
66 case T_Sort:
67 Assert(plan->lefttree != NULL && plan->righttree == NULL);
68 return plan->lefttree;
69
70 default:
71 return plan;
72 }
73 }
74
75 static bool
can_exclude_chunk(PlannerInfo * root,EState * estate,Index rt_index,List * restrictinfos)76 can_exclude_chunk(PlannerInfo *root, EState *estate, Index rt_index, List *restrictinfos)
77 {
78 RangeTblEntry *rte = rt_fetch(rt_index, estate->es_range_table);
79
80 return rte->rtekind == RTE_RELATION && rte->relkind == RELKIND_RELATION && !rte->inh &&
81 excluded_by_constraint(root, rte, rt_index, restrictinfos);
82 }
83
84 /*
85 * Convert restriction clauses to constants expressions (i.e., if there are
86 * mutable functions, they need to be evaluated to constants). For instance,
87 * something like:
88 *
89 * ...WHERE time > now - interval '1 hour'
90 *
91 * becomes
92 *
93 * ...WHERE time > '2017-06-02 11:26:43.935712+02'
94 */
95 static List *
constify_restrictinfos(PlannerInfo * root,List * restrictinfos)96 constify_restrictinfos(PlannerInfo *root, List *restrictinfos)
97 {
98 ListCell *lc;
99
100 foreach (lc, restrictinfos)
101 {
102 RestrictInfo *rinfo = lfirst(lc);
103
104 rinfo->clause = (Expr *) estimate_expression_value(root, (Node *) rinfo->clause);
105 }
106
107 return restrictinfos;
108 }
109
110 /*
111 * Initialize the scan state and prune any subplans from the Append node below
112 * us in the plan tree. Pruning happens by evaluating the subplan's table
113 * constraints against a folded version of the restriction clauses in the query.
114 */
115 static void
ca_append_begin(CustomScanState * node,EState * estate,int eflags)116 ca_append_begin(CustomScanState *node, EState *estate, int eflags)
117 {
118 ConstraintAwareAppendState *state = (ConstraintAwareAppendState *) node;
119 CustomScan *cscan = (CustomScan *) node->ss.ps.plan;
120 Plan *subplan = copyObject(state->subplan);
121 List *chunk_ri_clauses = lsecond(cscan->custom_private);
122 List *chunk_relids = lthird(cscan->custom_private);
123 List **appendplans, *old_appendplans;
124 ListCell *lc_plan;
125 ListCell *lc_clauses;
126 ListCell *lc_relid;
127
128 /*
129 * create skeleton plannerinfo to reuse some PostgreSQL planner functions
130 */
131 Query parse = {
132 .resultRelation = InvalidOid,
133 };
134 PlannerGlobal glob = {
135 .boundParams = NULL,
136 };
137 PlannerInfo root = {
138 .glob = &glob,
139 .parse = &parse,
140 };
141
142 /* CustomScan hard-codes the scan and result tuple slot to a fixed
143 * TTSOpsVirtual ops (meaning it expects the slot ops of the child tuple to
144 * also have this type). Oddly, when reading slots from subscan nodes
145 * (children), there is no knowing what tuple slot ops the child slot will
146 * have (e.g., for ChunkAppend it is common that the child is a
147 * seqscan/indexscan that produces a TTSOpsBufferHeapTuple
148 * slot). Unfortunately, any mismatch between slot types when projecting is
149 * asserted by PostgreSQL. To avoid this issue, we mark the scanops as
150 * non-fixed and reinitialize the projection state with this new setting.
151 *
152 * Alternatively, we could copy the child tuple into the scan slot to get
153 * the expected ops before projection, but this would require materializing
154 * and copying the tuple unnecessarily.
155 */
156 node->ss.ps.scanopsfixed = false;
157
158 /* Since we sometimes return the scan slot directly from the subnode, the
159 * result slot is not fixed either. */
160 node->ss.ps.resultopsfixed = false;
161 ExecAssignScanProjectionInfoWithVarno(&node->ss, INDEX_VAR);
162
163 switch (nodeTag(subplan))
164 {
165 case T_Append:
166 {
167 Append *append = (Append *) subplan;
168
169 old_appendplans = append->appendplans;
170 append->appendplans = NIL;
171 appendplans = &append->appendplans;
172 break;
173 }
174 case T_MergeAppend:
175 {
176 MergeAppend *append = (MergeAppend *) subplan;
177
178 old_appendplans = append->mergeplans;
179 append->mergeplans = NIL;
180 appendplans = &append->mergeplans;
181 break;
182 }
183 case T_Result:
184
185 /*
186 * Append plans are turned into a Result node if empty. This can
187 * happen if children are pruned first by constraint exclusion
188 * while we also remove the main table from the appendplans list,
189 * leaving an empty list. In that case, there is nothing to do.
190 */
191 return;
192 default:
193 elog(ERROR, "invalid child of constraint-aware append: %u", nodeTag(subplan));
194 }
195
196 /*
197 * clauses should always have the same length as appendplans because
198 * thats the base for building the lists
199 */
200 Assert(list_length(old_appendplans) == list_length(chunk_ri_clauses));
201 Assert(list_length(chunk_relids) == list_length(chunk_ri_clauses));
202
203 forthree (lc_plan, old_appendplans, lc_clauses, chunk_ri_clauses, lc_relid, chunk_relids)
204 {
205 Plan *plan = get_plans_for_exclusion(lfirst(lc_plan));
206
207 switch (nodeTag(plan))
208 {
209 case T_SeqScan:
210 case T_SampleScan:
211 case T_IndexScan:
212 case T_IndexOnlyScan:
213 case T_BitmapIndexScan:
214 case T_BitmapHeapScan:
215 case T_TidScan:
216 case T_SubqueryScan:
217 case T_FunctionScan:
218 case T_ValuesScan:
219 case T_CteScan:
220 case T_WorkTableScan:
221 case T_ForeignScan:
222 case T_CustomScan:
223 {
224 /*
225 * If this is a base rel (chunk), check if it can be
226 * excluded from the scan. Otherwise, fall through.
227 */
228
229 Index scanrelid = ((Scan *) plan)->scanrelid;
230 List *restrictinfos = NIL;
231 List *ri_clauses = lfirst(lc_clauses);
232 ListCell *lc;
233
234 Assert(scanrelid);
235
236 foreach (lc, ri_clauses)
237 {
238 RestrictInfo *ri = makeNode(RestrictInfo);
239 ri->clause = lfirst(lc);
240
241 /*
242 * The index of the RangeTblEntry might have changed between planning
243 * because of flattening, so we need to adjust the expressions
244 * for the RestrictInfos if they are not equal.
245 */
246 if (lfirst_oid(lc_relid) != scanrelid)
247 ChangeVarNodes((Node *) ri->clause, lfirst_oid(lc_relid), scanrelid, 0);
248
249 restrictinfos = lappend(restrictinfos, ri);
250 }
251 restrictinfos = constify_restrictinfos(&root, restrictinfos);
252
253 if (can_exclude_chunk(&root, estate, scanrelid, restrictinfos))
254 continue;
255
256 *appendplans = lappend(*appendplans, lfirst(lc_plan));
257 break;
258 }
259 default:
260 elog(ERROR, "invalid child of constraint-aware append: %u", nodeTag(plan));
261 break;
262 }
263 }
264
265 state->num_append_subplans = list_length(*appendplans);
266 if (state->num_append_subplans > 0)
267 node->custom_ps = list_make1(ExecInitNode(subplan, estate, eflags));
268 }
269
270 static TupleTableSlot *
ca_append_exec(CustomScanState * node)271 ca_append_exec(CustomScanState *node)
272 {
273 ConstraintAwareAppendState *state = (ConstraintAwareAppendState *) node;
274 TupleTableSlot *subslot;
275 ExprContext *econtext = node->ss.ps.ps_ExprContext;
276
277 /*
278 * Check if all append subplans were pruned. In that case there is nothing
279 * to do.
280 */
281 if (state->num_append_subplans == 0)
282 return NULL;
283
284 ResetExprContext(econtext);
285
286 while (true)
287 {
288 subslot = ExecProcNode(linitial(node->custom_ps));
289
290 if (TupIsNull(subslot))
291 return NULL;
292
293 if (!node->ss.ps.ps_ProjInfo)
294 return subslot;
295
296 econtext->ecxt_scantuple = subslot;
297
298 return ExecProject(node->ss.ps.ps_ProjInfo);
299 }
300 }
301
302 static void
ca_append_end(CustomScanState * node)303 ca_append_end(CustomScanState *node)
304 {
305 if (node->custom_ps != NIL)
306 {
307 ExecEndNode(linitial(node->custom_ps));
308 }
309 }
310
311 static void
ca_append_rescan(CustomScanState * node)312 ca_append_rescan(CustomScanState *node)
313 {
314 if (node->custom_ps != NIL)
315 {
316 ExecReScan(linitial(node->custom_ps));
317 }
318 }
319
320 static void
ca_append_explain(CustomScanState * node,List * ancestors,ExplainState * es)321 ca_append_explain(CustomScanState *node, List *ancestors, ExplainState *es)
322 {
323 CustomScan *cscan = (CustomScan *) node->ss.ps.plan;
324 ConstraintAwareAppendState *state = (ConstraintAwareAppendState *) node;
325 Oid relid = linitial_oid(linitial(cscan->custom_private));
326
327 ExplainPropertyText("Hypertable", get_rel_name(relid), es);
328 ExplainPropertyInteger("Chunks left after exclusion", NULL, state->num_append_subplans, es);
329 }
330
331 static CustomExecMethods constraint_aware_append_state_methods = {
332 .BeginCustomScan = ca_append_begin,
333 .ExecCustomScan = ca_append_exec,
334 .EndCustomScan = ca_append_end,
335 .ReScanCustomScan = ca_append_rescan,
336 .ExplainCustomScan = ca_append_explain,
337 };
338
339 static Node *
constraint_aware_append_state_create(CustomScan * cscan)340 constraint_aware_append_state_create(CustomScan *cscan)
341 {
342 ConstraintAwareAppendState *state;
343 Append *append = linitial(cscan->custom_plans);
344
345 state = (ConstraintAwareAppendState *) newNode(sizeof(ConstraintAwareAppendState),
346 T_CustomScanState);
347 state->csstate.methods = &constraint_aware_append_state_methods;
348 state->subplan = &append->plan;
349
350 return (Node *) state;
351 }
352
353 static CustomScanMethods constraint_aware_append_plan_methods = {
354 .CustomName = "ConstraintAwareAppend",
355 .CreateCustomScanState = constraint_aware_append_state_create,
356 };
357
358 static Plan *
constraint_aware_append_plan_create(PlannerInfo * root,RelOptInfo * rel,CustomPath * path,List * tlist,List * clauses,List * custom_plans)359 constraint_aware_append_plan_create(PlannerInfo *root, RelOptInfo *rel, CustomPath *path,
360 List *tlist, List *clauses, List *custom_plans)
361 {
362 CustomScan *cscan = makeNode(CustomScan);
363 Plan *subplan;
364 RangeTblEntry *rte = planner_rt_fetch(rel->relid, root);
365 List *chunk_ri_clauses = NIL;
366 List *chunk_relids = NIL;
367 List *children = NIL;
368 ListCell *lc_child;
369
370 /*
371 * Postgres will inject Result nodes above mergeappend when target lists don't match
372 * because the nodes themselves do not perform projection. The ConstraintAwareAppend
373 * node can do this projection itself, however, so just throw away the result node
374 * Removing the Result node is only safe if there is no one-time filter
375 */
376 if (IsA(linitial(custom_plans), Result) &&
377 castNode(Result, linitial(custom_plans))->resconstantqual == NULL)
378 {
379 Result *result = castNode(Result, linitial(custom_plans));
380
381 if (result->plan.righttree != NULL)
382 elog(ERROR, "unexpected right tree below result node in constraint aware append");
383
384 custom_plans = list_make1(result->plan.lefttree);
385 }
386 subplan = linitial(custom_plans);
387
388 cscan->scan.scanrelid = 0; /* Not a real relation we are scanning */
389 cscan->scan.plan.targetlist = tlist; /* Target list we expect as output */
390 cscan->custom_plans = custom_plans;
391
392 /*
393 * create per chunk RestrictInfo
394 *
395 * We also need to walk the expression trees of the restriction clauses and
396 * update any Vars that reference the main table to instead reference the child
397 * table (chunk) we want to exclude.
398 */
399 switch (nodeTag(linitial(custom_plans)))
400 {
401 case T_MergeAppend:
402 children = castNode(MergeAppend, linitial(custom_plans))->mergeplans;
403 break;
404 case T_Append:
405 children = castNode(Append, linitial(custom_plans))->appendplans;
406 break;
407 default:
408 elog(ERROR,
409 "invalid child of constraint-aware append: %u",
410 nodeTag(linitial(custom_plans)));
411 break;
412 }
413
414 /*
415 * we only iterate over the child chunks of this node
416 * so the list of metadata exactly matches the list of
417 * child nodes in the executor
418 */
419 foreach (lc_child, children)
420 {
421 Plan *plan = get_plans_for_exclusion(lfirst(lc_child));
422
423 switch (nodeTag(plan))
424 {
425 case T_SeqScan:
426 case T_SampleScan:
427 case T_IndexScan:
428 case T_IndexOnlyScan:
429 case T_BitmapIndexScan:
430 case T_BitmapHeapScan:
431 case T_TidScan:
432 case T_SubqueryScan:
433 case T_FunctionScan:
434 case T_ValuesScan:
435 case T_CteScan:
436 case T_WorkTableScan:
437 case T_ForeignScan:
438 case T_CustomScan:
439 {
440 List *chunk_clauses = NIL;
441 ListCell *lc;
442 Index scanrelid = ((Scan *) plan)->scanrelid;
443 AppendRelInfo *appinfo = ts_get_appendrelinfo(root, scanrelid, false);
444
445 foreach (lc, clauses)
446 {
447 Node *clause = (Node *) ts_transform_cross_datatype_comparison(
448 castNode(RestrictInfo, lfirst(lc))->clause);
449 clause = adjust_appendrel_attrs(root, clause, 1, &appinfo);
450 chunk_clauses = lappend(chunk_clauses, clause);
451 }
452 chunk_ri_clauses = lappend(chunk_ri_clauses, chunk_clauses);
453 chunk_relids = lappend_oid(chunk_relids, scanrelid);
454 break;
455 }
456 default:
457 elog(ERROR, "invalid child of constraint-aware append: %u", nodeTag(plan));
458 break;
459 }
460 }
461
462 cscan->custom_private = list_make3(list_make1_oid(rte->relid), chunk_ri_clauses, chunk_relids);
463 cscan->custom_scan_tlist = subplan->targetlist; /* Target list of tuples
464 * we expect as input */
465 cscan->flags = path->flags;
466 cscan->methods = &constraint_aware_append_plan_methods;
467
468 return &cscan->scan.plan;
469 }
470
471 static CustomPathMethods constraint_aware_append_path_methods = {
472 .CustomName = "ConstraintAwareAppend",
473 .PlanCustomPath = constraint_aware_append_plan_create,
474 };
475
476 Path *
ts_constraint_aware_append_path_create(PlannerInfo * root,Path * subpath)477 ts_constraint_aware_append_path_create(PlannerInfo *root, Path *subpath)
478 {
479 ConstraintAwareAppendPath *path;
480
481 path = (ConstraintAwareAppendPath *) newNode(sizeof(ConstraintAwareAppendPath), T_CustomPath);
482 path->cpath.path.pathtype = T_CustomScan;
483 path->cpath.path.rows = subpath->rows;
484 path->cpath.path.startup_cost = subpath->startup_cost;
485 path->cpath.path.total_cost = subpath->total_cost;
486 path->cpath.path.parent = subpath->parent;
487 path->cpath.path.pathkeys = subpath->pathkeys;
488 path->cpath.path.param_info = subpath->param_info;
489 path->cpath.path.pathtarget = subpath->pathtarget;
490
491 path->cpath.path.parallel_aware = false;
492 path->cpath.path.parallel_safe = subpath->parallel_safe;
493 path->cpath.path.parallel_workers = subpath->parallel_workers;
494
495 /*
496 * Set flags. We can set CUSTOMPATH_SUPPORT_BACKWARD_SCAN and
497 * CUSTOMPATH_SUPPORT_MARK_RESTORE. The only interesting flag is the first
498 * one (backward scan), but since we are not scanning a real relation we
499 * need not indicate that we support backward scans. Lower-level index
500 * scanning nodes will scan backward if necessary, so once tuples get to
501 * this node they will be in a given order already.
502 */
503 path->cpath.flags = 0;
504 path->cpath.custom_paths = list_make1(subpath);
505 path->cpath.methods = &constraint_aware_append_path_methods;
506
507 /*
508 * Make sure our subpath is either an Append or MergeAppend node
509 */
510 switch (nodeTag(subpath))
511 {
512 case T_AppendPath:
513 case T_MergeAppendPath:
514 break;
515 default:
516 elog(ERROR, "invalid child of constraint-aware append: %u", nodeTag(subpath));
517 break;
518 }
519
520 return &path->cpath.path;
521 }
522
523 bool
ts_constraint_aware_append_possible(Path * path)524 ts_constraint_aware_append_possible(Path *path)
525 {
526 RelOptInfo *rel = path->parent;
527 ListCell *lc;
528 int num_children;
529
530 if (!ts_guc_enable_optimizations || !ts_guc_enable_constraint_aware_append ||
531 constraint_exclusion == CONSTRAINT_EXCLUSION_OFF)
532 return false;
533
534 switch (nodeTag(path))
535 {
536 case T_AppendPath:
537 num_children = list_length(castNode(AppendPath, path)->subpaths);
538 break;
539 case T_MergeAppendPath:
540 num_children = list_length(castNode(MergeAppendPath, path)->subpaths);
541 break;
542 default:
543 return false;
544 }
545
546 /* Never use constraint-aware append with only one child, since PG12 will
547 * later prune the (Merge)Append node from such plans, leaving us with an
548 * unexpected child node. */
549 if (num_children <= 1)
550 return false;
551
552 /*
553 * If there are clauses that have mutable functions, this path is ripe for
554 * execution-time optimization.
555 */
556 foreach (lc, rel->baserestrictinfo)
557 {
558 RestrictInfo *rinfo = (RestrictInfo *) lfirst(lc);
559
560 if (contain_mutable_functions((Node *) rinfo->clause))
561 return true;
562 }
563 return false;
564 }
565
566 bool
ts_is_constraint_aware_append_path(Path * path)567 ts_is_constraint_aware_append_path(Path *path)
568 {
569 return IsA(path, CustomPath) &&
570 castNode(CustomPath, path)->methods == &constraint_aware_append_path_methods;
571 }
572
573 void
_constraint_aware_append_init(void)574 _constraint_aware_append_init(void)
575 {
576 RegisterCustomScanMethods(&constraint_aware_append_plan_methods);
577 }
578