From c5c13415f0087879a036d0efc86b6156e7af0011 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Mon, 27 May 2019 20:01:39 +0200 Subject: [PATCH] Use ChunkAppend to replace Append nodes This patch makes TimescaleDB use ChunkAppend in places where it used to used to use ConstraintAwareAppend before. ConstraintAwareAppend will still be used for MergeAppend nodes that cannot be changed to Ordered Append or when ChunkAppend is disabled. When a query on a hypertable is identified as benefitting from execution exclusion Append nodes will be replaced by ChunkAppend nodes. This will enable the use of runtime exclusion for joins, lateral joins, subqueries and correlated subqueries. --- CHANGELOG.md | 3 + src/chunk_append/chunk_append.c | 22 +- src/chunk_append/chunk_append.h | 4 +- src/chunk_append/exec.c | 297 +++- src/chunk_append/exec.h | 13 +- src/chunk_append/explain.c | 11 +- src/chunk_append/planner.c | 18 +- src/plan_expand_hypertable.c | 37 +- src/planner.c | 94 +- src/planner.h | 2 + test/expected/append-10.out | 351 +++- test/expected/append-11.out | 347 +++- test/expected/append-9.6.out | 291 +++- test/expected/delete.out | 50 +- test/expected/parallel-10.out | 84 +- test/expected/parallel-11.out | 84 +- test/expected/parallel-9.6.out | 88 +- test/expected/plan_expand_hypertable-10.out | 67 +- test/expected/plan_expand_hypertable-11.out | 67 +- test/expected/plan_expand_hypertable-9.6.out | 67 +- test/expected/plan_ordered_append-10.out | 102 +- test/expected/plan_ordered_append-11.out | 102 +- test/expected/plan_ordered_append-9.6.out | 103 +- test/expected/rowsecurity-10.out | 1408 ++++++++------- test/expected/rowsecurity-11.out | 1505 ++++++++--------- test/expected/rowsecurity-9.6.out | 238 ++- test/expected/update.out | 42 +- test/sql/include/append_query.sql | 20 + .../sql/include/plan_ordered_append_query.sql | 16 + test/sql/rowsecurity-10.sql | 5 + test/sql/rowsecurity-11.sql | 6 + 31 files changed, 3247 insertions(+), 2297 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index dc948f1c3..77fd6d16f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,9 @@ accidentally triggering the load of a previous DB version.** ## 1.4.0 (unreleased) +**Major features** +* #1270 Use ChunkAppend to replace Append nodes + **Minor features** * #1273 Propagate quals to joined hypertables diff --git a/src/chunk_append/chunk_append.c b/src/chunk_append/chunk_append.c index dc7f71c86..6a998066f 100644 --- a/src/chunk_append/chunk_append.c +++ b/src/chunk_append/chunk_append.c @@ -7,6 +7,7 @@ #include #include #include +#include #include #include #include @@ -81,8 +82,16 @@ ts_chunk_append_path_create(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht, children = castNode(AppendPath, subpath)->subpaths; break; case T_MergeAppendPath: - if (!ordered) - return subpath; + /* + * check if ordered append is applicable, only assert ordered here + * checked properly in ts_ordered_append_should_optimize + */ + Assert(ordered); + + /* + * we only push down LIMIT for ordered append + */ + path->pushdown_limit = true; children = castNode(MergeAppendPath, subpath)->subpaths; path->cpath.path.pathkeys = subpath->pathkeys; break; @@ -184,7 +193,7 @@ ts_chunk_append_path_create(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht, * We do this to prevent planner choosing parallel plan which might * otherwise look preferable cost wise. */ - if (root->limit_tuples == -1.0 || rows < root->limit_tuples) + if (!path->pushdown_limit || root->limit_tuples == -1.0 || rows < root->limit_tuples) { total_cost += child->total_cost; rows += child->rows; @@ -205,7 +214,7 @@ ts_chunk_append_path_create(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht, */ bool ts_ordered_append_should_optimize(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht, - List *join_conditions, bool *reverse) + List *join_conditions, int *order_attno, bool *reverse) { SortGroupClause *sort = linitial(root->parse->sortClause); TargetEntry *tle = get_sortgroupref_tle(sort->tleSortGroupRef, root->parse->targetList); @@ -270,8 +279,9 @@ ts_ordered_append_should_optimize(PlannerInfo *root, RelOptInfo *rel, Hypertable if (namestrcmp(&ht->space->dimensions[0].fd.column_name, column) != 0) return false; - if (reverse != NULL) - *reverse = sort->sortop == tce->lt_opr ? false : true; + Assert(order_attno != NULL && reverse != NULL); + *order_attno = ht_var->varattno; + *reverse = sort->sortop == tce->lt_opr ? false : true; return true; } diff --git a/src/chunk_append/chunk_append.h b/src/chunk_append/chunk_append.h index 882275eee..5b17109d0 100644 --- a/src/chunk_append/chunk_append.h +++ b/src/chunk_append/chunk_append.h @@ -17,12 +17,14 @@ typedef struct ChunkAppendPath CustomPath cpath; bool startup_exclusion; bool runtime_exclusion; + bool pushdown_limit; } ChunkAppendPath; extern Path *ts_chunk_append_path_create(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht, Path *subpath, bool ordered, List *nested_oids); extern bool ts_ordered_append_should_optimize(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht, - List *join_conditions, bool *reverse); + List *join_conditions, int *order_attno, + bool *reverse); #endif /* TIMESCALEDB_CHUNK_APPEND_H */ diff --git a/src/chunk_append/exec.c b/src/chunk_append/exec.c index 41be4afbd..316f3a2f8 100644 --- a/src/chunk_append/exec.c +++ b/src/chunk_append/exec.c @@ -6,16 +6,22 @@ #include #include +#include +#include #include #include #include +#include #include +#include #include +#include +#include #include #include #include -#include #include +#include #include "chunk_append/chunk_append.h" #include "chunk_append/exec.h" @@ -37,13 +43,12 @@ static CustomExecMethods chunk_append_state_methods = { }; static List *constify_restrictinfos(PlannerInfo *root, List *restrictinfos); -static bool can_exclude_chunk(PlannerInfo *root, EState *estate, Index rt_index, - List *restrictinfos); +static bool can_exclude_chunk(List *constraints, List *restrictinfos); static void do_startup_exclusion(ChunkAppendState *state); static Node *constify_param_mutator(Node *node, void *context); static List *constify_restrictinfo_params(PlannerInfo *root, EState *state, List *restrictinfos); -static void adjust_ri_clauses(ChunkAppendState *state, List *initial_rt_indexes); +static void initialize_constraints(ChunkAppendState *state, List *initial_rt_indexes); Node * chunk_append_state_create(CustomScan *cscan) @@ -54,20 +59,14 @@ chunk_append_state_create(CustomScan *cscan) state->csstate.methods = &chunk_append_state_methods; - state->num_subplans = 0; - state->subplanstates = NULL; - state->initial_subplans = cscan->custom_plans; state->initial_ri_clauses = lsecond(cscan->custom_private); - adjust_ri_clauses(state, lthird(cscan->custom_private)); state->sort_options = lfourth(cscan->custom_private); state->startup_exclusion = (bool) linitial_oid(linitial(cscan->custom_private)); state->runtime_exclusion = (bool) lsecond_oid(linitial(cscan->custom_private)); state->limit = lthird_oid(linitial(cscan->custom_private)); - state->current = 0; - state->runtime_initialized = false; state->filtered_subplans = state->initial_subplans; state->filtered_ri_clauses = state->initial_ri_clauses; @@ -79,30 +78,33 @@ do_startup_exclusion(ChunkAppendState *state) { List *filtered_children = NIL; List *filtered_ri_clauses = NIL; + List *filtered_constraints = NIL; ListCell *lc_plan; ListCell *lc_clauses; + ListCell *lc_constraints; /* - * create skeleton plannerinfo to reuse some PostgreSQL planner functions + * create skeleton plannerinfo for estimate_expression_value */ - Query parse = { - .resultRelation = InvalidOid, - }; PlannerGlobal glob = { .boundParams = NULL, }; PlannerInfo root = { .glob = &glob, - .parse = &parse, }; /* - * clauses should always have the same length as appendplans because - * the list of clauses is built from the list of appendplans + * clauses and constraints should always have the same length as initial_subplans */ Assert(list_length(state->initial_subplans) == list_length(state->initial_ri_clauses)); + Assert(list_length(state->initial_subplans) == list_length(state->initial_constraints)); - forboth (lc_plan, state->initial_subplans, lc_clauses, state->initial_ri_clauses) + forthree (lc_plan, + state->initial_subplans, + lc_constraints, + state->initial_constraints, + lc_clauses, + state->initial_ri_clauses) { List *restrictinfos = NIL; List *ri_clauses = lfirst(lc_clauses); @@ -123,18 +125,18 @@ do_startup_exclusion(ChunkAppendState *state) } restrictinfos = constify_restrictinfos(&root, restrictinfos); - if (can_exclude_chunk(&root, - state->csstate.ss.ps.state, - scan->scanrelid, - restrictinfos)) + if (can_exclude_chunk(lfirst(lc_constraints), restrictinfos)) continue; } filtered_children = lappend(filtered_children, lfirst(lc_plan)); filtered_ri_clauses = lappend(filtered_ri_clauses, ri_clauses); + filtered_constraints = lappend(filtered_constraints, lfirst(lc_constraints)); } + state->filtered_subplans = filtered_children; state->filtered_ri_clauses = filtered_ri_clauses; + state->filtered_constraints = filtered_constraints; } static void @@ -145,6 +147,9 @@ chunk_append_begin(CustomScanState *node, EState *estate, int eflags) ListCell *lc; int i; + Assert(list_length(cscan->custom_plans) == list_length(state->initial_subplans)); + initialize_constraints(state, lthird(cscan->custom_private)); + if (state->startup_exclusion) do_startup_exclusion(state); @@ -175,29 +180,42 @@ chunk_append_begin(CustomScanState *node, EState *estate, int eflags) i++; } + + if (state->runtime_exclusion) + { + state->params = state->subplanstates[0]->plan->allParam; + /* + * make sure all params are initialized for runtime exclusion + */ + node->ss.ps.chgParam = state->subplanstates[0]->plan->allParam; + } } +/* + * build bitmap of valid subplans for runtime exclusion + */ static void initialize_runtime_exclusion(ChunkAppendState *state) { - ListCell *lc_clauses; + ListCell *lc_clauses, *lc_constraints; int i = 0; - Query parse = { - .resultRelation = InvalidOid, - }; PlannerGlobal glob = { .boundParams = NULL, }; PlannerInfo root = { .glob = &glob, - .parse = &parse, }; Assert(state->num_subplans == list_length(state->filtered_ri_clauses)); lc_clauses = list_head(state->filtered_ri_clauses); + lc_constraints = list_head(state->filtered_constraints); + state->runtime_number_loops++; + /* + * mark subplans as active/inactive in valid_subplans + */ for (i = 0; i < state->num_subplans; i++) { PlanState *ps = state->subplanstates[i]; @@ -219,11 +237,14 @@ initialize_runtime_exclusion(ChunkAppendState *state) } restrictinfos = constify_restrictinfo_params(&root, ps->state, restrictinfos); - if (!can_exclude_chunk(&root, ps->state, scan->scanrelid, restrictinfos)) + if (!can_exclude_chunk(lfirst(lc_constraints), restrictinfos)) state->valid_subplans = bms_add_member(state->valid_subplans, i); + else + state->runtime_number_exclusions++; } lc_clauses = lnext(lc_clauses); + lc_constraints = lnext(lc_constraints); } state->runtime_initialized = true; @@ -251,7 +272,7 @@ chunk_append_exec(CustomScanState *node) { initialize_runtime_exclusion(state); - if (!state->valid_subplans || bms_num_members(state->valid_subplans) == 0) + if (bms_is_empty(state->valid_subplans)) return ExecClearTuple(node->ss.ps.ps_ResultTupleSlot); state->current = bms_next_member(state->valid_subplans, -1); @@ -288,9 +309,8 @@ chunk_append_exec(CustomScanState *node) if (!TupIsNull(subslot)) { /* - * If the subplan gave us something then return it as-is. We do - * NOT make use of the result slot that was set up in - * chunk_append_begin there's no need for it. + * If the subplan gave us something check if we need + * to do projection otherwise return as is. */ if (node->ss.ps.ps_ProjInfo == NULL) return subslot; @@ -348,11 +368,17 @@ chunk_append_rescan(CustomScanState *node) for (i = 0; i < state->num_subplans; i++) { + if (node->ss.ps.chgParam != NULL) + UpdateChangedParamSet(state->subplanstates[i], node->ss.ps.chgParam); + ExecReScan(state->subplanstates[i]); } state->current = 0; - if (state->runtime_exclusion) + /* + * detect changed params and reset runtime exclusion state + */ + if (state->runtime_exclusion && bms_overlap(node->ss.ps.chgParam, state->params)) { bms_free(state->valid_subplans); state->valid_subplans = NULL; @@ -411,20 +437,26 @@ constify_param_mutator(Node *node, void *context) if (IsA(node, Param)) { Param *param = castNode(Param, node); - EState *state = (EState *) context; + EState *estate = (EState *) context; if (param->paramkind == PARAM_EXEC) { TypeCacheEntry *tce = lookup_type_cache(param->paramtype, 0); - ParamExecData value = state->es_param_exec_vals[param->paramid]; + ParamExecData prm = estate->es_param_exec_vals[param->paramid]; - if (!value.execPlan) + if (prm.execPlan != NULL) + { + ExprContext *econtext = GetPerTupleExprContext(estate); + ExecSetParamPlan(prm.execPlan, econtext); + } + + if (prm.execPlan == NULL) return (Node *) makeConst(param->paramtype, param->paramtypmod, param->paramcollid, tce->typlen, - value.value, - value.isnull, + prm.value, + prm.isnull, tce->typbyval); } return node; @@ -434,37 +466,166 @@ constify_param_mutator(Node *node, void *context) } /* - * Exclude child relations (chunks) at execution time based on constraints. - * - * This functions tries to reuse as much functionality as possible from standard - * constraint exclusion in PostgreSQL that normally happens at planning - * time. Therefore, we need to fake a number of planning-related data - * structures. + * stripped down version of postgres get_relation_constraints */ -static bool -can_exclude_chunk(PlannerInfo *root, EState *estate, Index rt_index, List *restrictinfos) +static List * +ca_get_relation_constraints(Oid relationObjectId, Index varno, bool include_notnull) { - RangeTblEntry *rte = rt_fetch(rt_index, estate->es_range_table); - RelOptInfo rel = { - .type = T_RelOptInfo, - .relid = rt_index, - .reloptkind = RELOPT_OTHER_MEMBER_REL, - .baserestrictinfo = restrictinfos, - }; + List *result = NIL; + Relation relation; + TupleConstr *constr; - return rte->rtekind == RTE_RELATION && rte->relkind == RELKIND_RELATION && !rte->inh && - relation_excluded_by_constraints(root, &rel, rte); + /* + * We assume the relation has already been safely locked. + */ + relation = heap_open(relationObjectId, NoLock); + + constr = relation->rd_att->constr; + if (constr != NULL) + { + int num_check = constr->num_check; + int i; + + for (i = 0; i < num_check; i++) + { + Node *cexpr; + + /* + * If this constraint hasn't been fully validated yet, we must + * ignore it here. + */ + if (!constr->check[i].ccvalid) + continue; + + cexpr = stringToNode(constr->check[i].ccbin); + + /* + * Run each expression through const-simplification and + * canonicalization. This is not just an optimization, but is + * necessary, because we will be comparing it to + * similarly-processed qual clauses, and may fail to detect valid + * matches without this. This must match the processing done to + * qual clauses in preprocess_expression()! (We can skip the + * stuff involving subqueries, however, since we don't allow any + * in check constraints.) + */ + cexpr = eval_const_expressions(NULL, cexpr); + +#if (PG96 && PG_VERSION_NUM < 90609) || (PG10 && PG_VERSION_NUM < 100004) + cexpr = (Node *) canonicalize_qual((Expr *) cexpr); +#elif PG96 || PG10 + cexpr = (Node *) canonicalize_qual_ext((Expr *) cexpr, true); +#else + cexpr = (Node *) canonicalize_qual((Expr *) cexpr, true); +#endif + + /* Fix Vars to have the desired varno */ + if (varno != 1) + ChangeVarNodes(cexpr, 1, varno, 0); + + /* + * Finally, convert to implicit-AND format (that is, a List) and + * append the resulting item(s) to our output list. + */ + result = list_concat(result, make_ands_implicit((Expr *) cexpr)); + } + + /* Add NOT NULL constraints in expression form, if requested */ + if (include_notnull && constr->has_not_null) + { + int natts = relation->rd_att->natts; + + for (i = 1; i <= natts; i++) + { + Form_pg_attribute att = TupleDescAttr(relation->rd_att, i - 1); + + if (att->attnotnull && !att->attisdropped) + { + NullTest *ntest = makeNode(NullTest); + + ntest->arg = (Expr *) + makeVar(varno, i, att->atttypid, att->atttypmod, att->attcollation, 0); + ntest->nulltesttype = IS_NOT_NULL; + + /* + * argisrow=false is correct even for a composite column, + * because attnotnull does not represent a SQL-spec IS NOT + * NULL test in such a case, just IS DISTINCT FROM NULL. + */ + ntest->argisrow = false; + ntest->location = -1; + result = lappend(result, ntest); + } + } + } + } + + heap_close(relation, NoLock); + + return result; } /* - * Adjust the RangeTableEntry indexes in the restrictinfo - * clauses because during planning subquery indexes will be - * different from the final index after flattening. + * Exclude child relations (chunks) at execution time based on constraints. + * + * constraints is the list of constraint expressions of the relation + * baserestrictinfo is the list of RestrictInfos + */ +static bool +can_exclude_chunk(List *constraints, List *baserestrictinfo) +{ + /* + * Regardless of the setting of constraint_exclusion, detect + * constant-FALSE-or-NULL restriction clauses. Because const-folding will + * reduce "anything AND FALSE" to just "FALSE", any such case should + * result in exactly one baserestrictinfo entry. This doesn't fire very + * often, but it seems cheap enough to be worth doing anyway. (Without + * this, we'd miss some optimizations that 9.5 and earlier found via much + * more roundabout methods.) + */ + if (list_length(baserestrictinfo) == 1) + { + RestrictInfo *rinfo = (RestrictInfo *) linitial(baserestrictinfo); + Expr *clause = rinfo->clause; + + if (clause && IsA(clause, Const) && + (((Const *) clause)->constisnull || !DatumGetBool(((Const *) clause)->constvalue))) + return true; + } + + /* + * The constraints are effectively ANDed together, so we can just try to + * refute the entire collection at once. This may allow us to make proofs + * that would fail if we took them individually. + * + * Note: we use rel->baserestrictinfo, not safe_restrictions as might seem + * an obvious optimization. Some of the clauses might be OR clauses that + * have volatile and nonvolatile subclauses, and it's OK to make + * deductions with the nonvolatile parts. + * + * We need strong refutation because we have to prove that the constraints + * would yield false, not just NULL. + */ +#if PG96 + if (predicate_refuted_by(constraints, baserestrictinfo)) +#else + if (predicate_refuted_by(constraints, baserestrictinfo, false)) +#endif + return true; + + return false; +} + +/* + * Fetch the constraints for a relation and adjust range table indexes + * if necessary. */ static void -adjust_ri_clauses(ChunkAppendState *state, List *initial_rt_indexes) +initialize_constraints(ChunkAppendState *state, List *initial_rt_indexes) { ListCell *lc_clauses, *lc_plan, *lc_relid; + List *constraints = NIL; + EState *estate = state->csstate.ss.ps.state; if (initial_rt_indexes == NIL) return; @@ -481,10 +642,24 @@ adjust_ri_clauses(ChunkAppendState *state, List *initial_rt_indexes) { Scan *scan = chunk_append_get_scan_plan(lfirst(lc_plan)); Index initial_index = lfirst_oid(lc_relid); + List *relation_constraints = NIL; - if (scan != NULL && scan->scanrelid > 0 && scan->scanrelid != initial_index) + if (scan != NULL && scan->scanrelid > 0) { - ChangeVarNodes(lfirst(lc_clauses), initial_index, scan->scanrelid, 0); + Index rt_index = scan->scanrelid; + RangeTblEntry *rte = rt_fetch(rt_index, estate->es_range_table); + relation_constraints = ca_get_relation_constraints(rte->relid, rt_index, true); + + /* + * Adjust the RangeTableEntry indexes in the restrictinfo + * clauses because during planning subquery indexes may be + * different from the final index after flattening. + */ + if (rt_index != initial_index) + ChangeVarNodes(lfirst(lc_clauses), initial_index, scan->scanrelid, 0); } + constraints = lappend(constraints, relation_constraints); } + state->initial_constraints = constraints; + state->filtered_constraints = constraints; } diff --git a/src/chunk_append/exec.h b/src/chunk_append/exec.h index c88c4e234..05c93b0ed 100644 --- a/src/chunk_append/exec.h +++ b/src/chunk_append/exec.h @@ -26,20 +26,29 @@ typedef struct ChunkAppendState /* list of subplans after planning */ List *initial_subplans; - /* list of restrictinfo clauses indexed similar to initial_subplans */ + /* list of constraints indexed like initial_subplans */ + List *initial_constraints; + /* list of restrictinfo clauses indexed like initial_subplans */ List *initial_ri_clauses; /* list of subplans after startup exclusion */ List *filtered_subplans; + /* list of relation constraints after startup exclusion */ + List *filtered_constraints; /* list of restrictinfo clauses after startup exclusion */ List *filtered_ri_clauses; /* valid subplans for runtime exclusion */ Bitmapset *valid_subplans; + Bitmapset *params; - /* sort options if this append is ordered */ + /* sort options if this append is ordered, only used for EXPLAIN */ List *sort_options; + /* number of loops and exclusions for EXPLAIN */ + int runtime_number_loops; + int runtime_number_exclusions; + } ChunkAppendState; extern Node *chunk_append_state_create(CustomScan *cscan); diff --git a/src/chunk_append/explain.c b/src/chunk_append/explain.c index fba68dbe1..34fc64daf 100644 --- a/src/chunk_append/explain.c +++ b/src/chunk_append/explain.c @@ -45,12 +45,11 @@ chunk_append_explain(CustomScanState *node, List *ancestors, ExplainState *es) list_length(node->custom_ps), es); - if (state->runtime_exclusion) - ExplainPropertyIntegerCompat("Chunks excluded during runtime", - NULL, - list_length(state->filtered_subplans) - - bms_num_members(state->valid_subplans), - es); + if (state->runtime_exclusion && state->runtime_number_loops > 0) + { + int avg_excluded = state->runtime_number_exclusions / state->runtime_number_loops; + ExplainPropertyIntegerCompat("Chunks excluded during runtime", NULL, avg_excluded, es); + } } /* diff --git a/src/chunk_append/planner.c b/src/chunk_append/planner.c index 12b4327f7..7b72515b1 100644 --- a/src/chunk_append/planner.c +++ b/src/chunk_append/planner.c @@ -101,7 +101,21 @@ chunk_append_plan_create(PlannerInfo *root, RelOptInfo *rel, CustomPath *path, L cscan->custom_scan_tlist = tlist; cscan->scan.plan.targetlist = tlist; - if (path->path.pathkeys != NIL) + if (path->path.pathkeys == NIL) + { + ListCell *lc_plan, *lc_path; + forboth (lc_path, path->custom_paths, lc_plan, custom_plans) + { + Plan *child_plan = lfirst(lc_plan); + Path *child_path = lfirst(lc_path); + AppendRelInfo *appinfo = get_appendrelinfo(root, child_path->parent->relid); + + /* push down targetlist to children */ + child_plan->targetlist = + (List *) adjust_appendrel_attrs_compat(root, (Node *) tlist, appinfo); + } + } + else { /* * If this is an ordered append node we need to ensure the columns @@ -226,7 +240,7 @@ chunk_append_plan_create(PlannerInfo *root, RelOptInfo *rel, CustomPath *path, L Assert(list_length(chunk_ri_clauses) == list_length(chunk_rt_indexes)); } - if (root->limit_tuples > 0 && root->limit_tuples <= PG_UINT32_MAX) + if (capath->pushdown_limit && root->limit_tuples > 0 && root->limit_tuples <= PG_UINT32_MAX) limit = root->limit_tuples; custom_private = list_make1( diff --git a/src/plan_expand_hypertable.c b/src/plan_expand_hypertable.c index 389f76319..ee2baa277 100644 --- a/src/plan_expand_hypertable.c +++ b/src/plan_expand_hypertable.c @@ -570,7 +570,7 @@ find_children_oids(HypertableRestrictInfo *hri, Hypertable *ht, LOCKMODE lockmod static bool should_order_append(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht, List *join_conditions, - bool *reverse) + int *order_attno, bool *reverse) { /* check if optimizations are enabled */ if (ts_guc_disable_optimizations || !ts_guc_enable_ordered_append || @@ -584,7 +584,7 @@ should_order_append(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht, List *jo if (root->parse->sortClause == NIL) return false; - return ts_ordered_append_should_optimize(root, rel, ht, join_conditions, reverse); + return ts_ordered_append_should_optimize(root, rel, ht, join_conditions, order_attno, reverse); } bool @@ -659,6 +659,8 @@ get_explicit_chunk_oids(CollectQualCtx *ctx, Hypertable *ht) * takes precedence. * * If appends are returned in order appends_ordered on rel->fdw_private is set to true. + * To make verifying pathkeys easier in set_rel_pathlist the attno of the column ordered by + * is * If the hypertable uses space partitioning the nested oids are stored in nested_oids * on rel->fdw_private when appends are ordered. */ @@ -666,6 +668,7 @@ static List * get_chunk_oids(CollectQualCtx *ctx, PlannerInfo *root, RelOptInfo *rel, Hypertable *ht) { bool reverse; + int order_attno; if (ctx->chunk_exclusion_func == NULL) { @@ -678,21 +681,29 @@ get_chunk_oids(CollectQualCtx *ctx, PlannerInfo *root, RelOptInfo *rel, Hypertab */ ts_hypertable_restrict_info_add(hri, root, ctx->restrictions); - if (should_order_append(root, rel, ht, ctx->join_conditions, &reverse)) + /* + * If fdw_private has not been setup by caller there is no point checking + * for ordered append as we can't pass the required metadata in fdw_private + * to signal that this is safe to transform in ordered append plan in + * set_rel_pathlist. + */ + if (rel->fdw_private != NULL && + should_order_append(root, rel, ht, ctx->join_conditions, &order_attno, &reverse)) { + TimescaleDBPrivate *private = (TimescaleDBPrivate *) rel->fdw_private; List **nested_oids = NULL; - if (rel->fdw_private != NULL) - { - ((TimescaleDBPrivate *) rel->fdw_private)->appends_ordered = true; + private + ->appends_ordered = true; + private + ->order_attno = order_attno; - /* - * for space partitioning we need extra information about the - * time slices of the chunks - */ - if (ht->space->num_dimensions > 1) - nested_oids = &((TimescaleDBPrivate *) rel->fdw_private)->nested_oids; - } + /* + * for space partitioning we need extra information about the + * time slices of the chunks + */ + if (ht->space->num_dimensions > 1) + nested_oids = &private->nested_oids; return ts_hypertable_restrict_info_get_chunk_oids_ordered(hri, ht, diff --git a/src/planner.c b/src/planner.c index 8dc03f850..8a4caf9fd 100644 --- a/src/planner.c +++ b/src/planner.c @@ -63,6 +63,7 @@ static planner_hook_type prev_planner_hook; static set_rel_pathlist_hook_type prev_set_rel_pathlist_hook; static get_relation_info_hook_type prev_get_relation_info_hook; static create_upper_paths_hook_type prev_create_upper_paths_hook; +static bool contain_param(Node *node); #define CTE_NAME_HYPERTABLES "hypertable_parent" @@ -198,6 +199,65 @@ should_optimize_append(const Path *path) return false; } +static inline bool +should_chunk_append(PlannerInfo *root, RelOptInfo *rel, Path *path, bool ordered, int order_attno) +{ + if (root->parse->commandType != CMD_SELECT || !ts_guc_enable_chunk_append) + return false; + + switch (nodeTag(path)) + { + case T_AppendPath: + /* + * If there are clauses that have mutable functions, or clauses that reference + * Params this Path might benefit from startup or runtime exclusion + */ + { + ListCell *lc; + + foreach (lc, rel->baserestrictinfo) + { + RestrictInfo *rinfo = (RestrictInfo *) lfirst(lc); + + if (contain_mutable_functions((Node *) rinfo->clause) || + contain_param((Node *) rinfo->clause)) + return true; + } + return false; + break; + } + case T_MergeAppendPath: + /* + * Can we do ordered append + */ + { + PathKey *pk; + ListCell *lc; + + if (!ordered || path->pathkeys == NIL) + return false; + + pk = linitial_node(PathKey, path->pathkeys); + + /* + * check pathkey is compatible with ordered append ordering + * we created when expanding hypertable + */ + foreach (lc, pk->pk_eclass->ec_members) + { + EquivalenceMember *em = lfirst(lc); + if (!em->em_is_child && IsA(em->em_expr, Var) && + castNode(Var, em->em_expr)->varattno == order_attno) + return true; + } + return false; + break; + } + default: + return false; + } +} + static inline bool is_append_child(RelOptInfo *rel, RangeTblEntry *rte) { @@ -283,6 +343,7 @@ timescaledb_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, Rang { ListCell *lc; bool ordered = false; + int order_attno = 0; List *nested_oids = NIL; if (rel->fdw_private != NULL) @@ -290,6 +351,7 @@ timescaledb_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, Rang TimescaleDBPrivate *private = (TimescaleDBPrivate *) rel->fdw_private; ordered = private->appends_ordered; + order_attno = private->order_attno; nested_oids = private->nested_oids; } @@ -300,11 +362,8 @@ timescaledb_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, Rang switch (nodeTag(*pathptr)) { case T_AppendPath: - if (should_optimize_append(*pathptr)) - *pathptr = ts_constraint_aware_append_path_create(root, ht, *pathptr); - break; case T_MergeAppendPath: - if (ordered) + if (should_chunk_append(root, rel, *pathptr, ordered, order_attno)) *pathptr = ts_chunk_append_path_create(root, rel, ht, @@ -327,7 +386,14 @@ timescaledb_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, Rang { case T_AppendPath: case T_MergeAppendPath: - if (should_optimize_append(*pathptr)) + if (should_chunk_append(root, rel, *pathptr, ordered, order_attno)) + *pathptr = ts_chunk_append_path_create(root, + rel, + ht, + *pathptr, + ordered, + nested_oids); + else if (should_optimize_append(*pathptr)) *pathptr = ts_constraint_aware_append_path_create(root, ht, *pathptr); break; default: @@ -565,6 +631,24 @@ timescale_create_upper_paths_hook(PlannerInfo *root, UpperRelationKind stage, Re } } +static bool +contain_param_exec_walker(Node *node, void *context) +{ + if (node == NULL) + return false; + + if (IsA(node, Param)) + return true; + + return expression_tree_walker(node, contain_param_exec_walker, context); +} + +static bool +contain_param(Node *node) +{ + return contain_param_exec_walker(node, NULL); +} + void _planner_init(void) { diff --git a/src/planner.h b/src/planner.h index 4e4780c2d..8f6533f58 100644 --- a/src/planner.h +++ b/src/planner.h @@ -9,6 +9,8 @@ typedef struct TimescaleDBPrivate { bool appends_ordered; + /* attno of the time dimension in the parent table if appends are ordered */ + int order_attno; List *nested_oids; } TimescaleDBPrivate; diff --git a/test/expected/append-10.out b/test/expected/append-10.out index 1005c7510..bed1c1a2b 100644 --- a/test/expected/append-10.out +++ b/test/expected/append-10.out @@ -151,15 +151,13 @@ psql:include/append_query.sql:20: NOTICE: Stable function now_s() called! psql:include/append_query.sql:20: NOTICE: Stable function now_s() called! psql:include/append_query.sql:20: NOTICE: Stable function now_s() called! psql:include/append_query.sql:20: NOTICE: Stable function now_s() called! - QUERY PLAN ----------------------------------------------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) (actual rows=1 loops=1) - Hypertable: append_test - Chunks left after exclusion: 1 - -> Append (actual rows=1 loops=1) - -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1) - Index Cond: ("time" > (now_s() - '@ 2 mons'::interval)) -(6 rows) + QUERY PLAN +---------------------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on append_test (actual rows=1 loops=1) + Chunks excluded during startup: 2 + -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1) + Index Cond: ("time" > (now_s() - '@ 2 mons'::interval)) +(4 rows) -- adding ORDER BY and LIMIT should turn the plan into an optimized -- ordered append plan @@ -263,22 +261,20 @@ psql:include/append_query.sql:58: NOTICE: Stable function now_s() called! psql:include/append_query.sql:58: NOTICE: Stable function now_s() called! psql:include/append_query.sql:58: NOTICE: Stable function now_s() called! psql:include/append_query.sql:58: NOTICE: Stable function now_s() called! - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------- Sort (actual rows=1 loops=1) Sort Key: (date_trunc('year'::text, append_test."time")) DESC Sort Method: quicksort Memory: 25kB -> HashAggregate (actual rows=1 loops=1) - Group Key: date_trunc('year'::text, append_test."time") - -> Custom Scan (ConstraintAwareAppend) (actual rows=3 loops=1) - Hypertable: append_test - Chunks left after exclusion: 2 - -> Append (actual rows=3 loops=1) - -> Index Scan using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1) - Index Cond: ("time" > (now_s() - '@ 4 mons'::interval)) - -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1) - Index Cond: ("time" > (now_s() - '@ 4 mons'::interval)) -(13 rows) + Group Key: (date_trunc('year'::text, append_test."time")) + -> Custom Scan (ChunkAppend) on append_test (actual rows=3 loops=1) + Chunks excluded during startup: 1 + -> Index Scan using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1) + Index Cond: ("time" > (now_s() - '@ 4 mons'::interval)) + -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1) + Index Cond: ("time" > (now_s() - '@ 4 mons'::interval)) +(11 rows) -- querying outside the time range should return nothing. This tests -- that ConstraintAwareAppend can handle the case when an Append node @@ -345,26 +341,24 @@ psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------- Merge Left Join (actual rows=6 loops=1) Merge Cond: (period.btime = data.btime) CTE data -> HashAggregate (actual rows=3 loops=1) - Group Key: time_bucket('@ 30 days'::interval, append_test."time") - -> Custom Scan (ConstraintAwareAppend) (actual rows=5 loops=1) - Hypertable: append_test - Chunks left after exclusion: 3 - -> Append (actual rows=5 loops=1) - -> Index Scan Backward using _hyper_1_1_chunk_append_test_time_idx on _hyper_1_1_chunk (actual rows=2 loops=1) - Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) - Filter: (colorid > 0) - -> Index Scan Backward using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1) - Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) - Filter: (colorid > 0) - -> Index Scan Backward using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1) - Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) - Filter: (colorid > 0) + Group Key: (time_bucket('@ 30 days'::interval, append_test."time")) + -> Custom Scan (ChunkAppend) on append_test (actual rows=5 loops=1) + Chunks excluded during startup: 0 + -> Index Scan Backward using _hyper_1_1_chunk_append_test_time_idx on _hyper_1_1_chunk (actual rows=2 loops=1) + Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) + Filter: (colorid > 0) + -> Index Scan Backward using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1) + Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) + Filter: (colorid > 0) + -> Index Scan Backward using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1) + Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) + Filter: (colorid > 0) CTE period -> Function Scan on generate_series "time" (actual rows=6 loops=1) -> Sort (actual rows=6 loops=1) @@ -375,7 +369,7 @@ psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! Sort Key: data.btime Sort Method: quicksort Memory: 25kB -> CTE Scan on data (actual rows=3 loops=1) -(28 rows) +(26 rows) WITH data AS ( SELECT time_bucket(INTERVAL '30 day', TIME) AS btime, AVG(temp) AS VALUE @@ -435,23 +429,19 @@ psql:include/append_query.sql:126: NOTICE: Stable function now_s() called! psql:include/append_query.sql:126: NOTICE: Stable function now_s() called! psql:include/append_query.sql:126: NOTICE: Stable function now_s() called! psql:include/append_query.sql:126: NOTICE: Stable function now_s() called! - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------- Nested Loop (actual rows=1 loops=1) Join Filter: (a.colorid = j.colorid) - -> Custom Scan (ConstraintAwareAppend) (actual rows=1 loops=1) - Hypertable: append_test - Chunks left after exclusion: 1 - -> Append (actual rows=1 loops=1) - -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk a_1 (actual rows=1 loops=1) - Index Cond: ("time" > (now_s() - '@ 3 hours'::interval)) - -> Custom Scan (ConstraintAwareAppend) (actual rows=1 loops=1) - Hypertable: join_test - Chunks left after exclusion: 1 - -> Append (actual rows=1 loops=1) - -> Index Scan using _hyper_2_6_chunk_join_test_time_idx on _hyper_2_6_chunk j_1 (actual rows=1 loops=1) - Index Cond: ("time" > (now_s() - '@ 3 hours'::interval)) -(14 rows) + -> Custom Scan (ChunkAppend) on append_test a (actual rows=1 loops=1) + Chunks excluded during startup: 2 + -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk a_1 (actual rows=1 loops=1) + Index Cond: ("time" > (now_s() - '@ 3 hours'::interval)) + -> Custom Scan (ChunkAppend) on join_test j (actual rows=1 loops=1) + Chunks excluded during startup: 2 + -> Index Scan using _hyper_2_6_chunk_join_test_time_idx on _hyper_2_6_chunk j_1 (actual rows=1 loops=1) + Index Cond: ("time" > (now_s() - '@ 3 hours'::interval)) +(10 rows) reset enable_hashjoin; reset enable_mergejoin; @@ -980,25 +970,248 @@ psql:include/append_query.sql:208: NOTICE: Stable function now_s() called! psql:include/append_query.sql:208: NOTICE: Stable function now_s() called! psql:include/append_query.sql:208: NOTICE: Stable function now_s() called! psql:include/append_query.sql:208: NOTICE: Stable function now_s() called! - QUERY PLAN ---------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------ Sort (actual rows=0 loops=1) Sort Key: append_test."time" DESC Sort Method: quicksort Memory: 25kB - -> Custom Scan (ConstraintAwareAppend) (actual rows=0 loops=1) - Hypertable: append_test - Chunks left after exclusion: 3 - -> Append (actual rows=0 loops=1) - -> Sample Scan on _hyper_1_3_chunk (actual rows=0 loops=1) - Sampling: system ('1'::real) - Filter: ("time" > (now_s() - '@ 400 days'::interval)) - -> Sample Scan on _hyper_1_2_chunk (actual rows=0 loops=1) - Sampling: system ('1'::real) - Filter: ("time" > (now_s() - '@ 400 days'::interval)) - -> Sample Scan on _hyper_1_1_chunk (actual rows=0 loops=1) - Sampling: system ('1'::real) - Filter: ("time" > (now_s() - '@ 400 days'::interval)) -(16 rows) + -> Custom Scan (ChunkAppend) on append_test (actual rows=0 loops=1) + Chunks excluded during startup: 0 + -> Sample Scan on _hyper_1_3_chunk (actual rows=0 loops=1) + Sampling: system ('1'::real) + Filter: ("time" > (now_s() - '@ 400 days'::interval)) + -> Sample Scan on _hyper_1_2_chunk (actual rows=0 loops=1) + Sampling: system ('1'::real) + Filter: ("time" > (now_s() - '@ 400 days'::interval)) + -> Sample Scan on _hyper_1_1_chunk (actual rows=0 loops=1) + Sampling: system ('1'::real) + Filter: ("time" > (now_s() - '@ 400 days'::interval)) +(14 rows) + +-- test runtime exclusion +-- test runtime exclusion with LATERAL and 2 hypertables +:PREFIX SELECT m1.time, m2.time FROM metrics_timestamptz m1 LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m2 WHERE m1.time = m2.time LIMIT 1) m2 ON true ORDER BY m1.time; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------- + Nested Loop Left Join (actual rows=745 loops=1) + -> Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=745 loops=1) + Order: m1."time" + -> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 (actual rows=112 loops=1) + Heap Fetches: 112 + -> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan Backward using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan Backward using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_4 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan Backward using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_5 (actual rows=129 loops=1) + Heap Fetches: 129 + -> Limit (actual rows=1 loops=745) + -> Custom Scan (ChunkAppend) on metrics_timestamptz m2 (actual rows=1 loops=745) + Chunks excluded during runtime: 4 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2_1 (actual rows=1 loops=112) + Index Cond: ("time" = m1."time") + Heap Fetches: 112 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_2 (actual rows=1 loops=168) + Index Cond: ("time" = m1."time") + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_3 (actual rows=1 loops=168) + Index Cond: ("time" = m1."time") + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_4 (actual rows=1 loops=168) + Index Cond: ("time" = m1."time") + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_5 (actual rows=1 loops=129) + Index Cond: ("time" = m1."time") + Heap Fetches: 129 +(31 rows) + +-- test runtime exclusion with LATERAL and generate_series +:PREFIX SELECT g.time FROM generate_series('2000-01-01'::timestamptz, '2000-02-01'::timestamptz, '1d'::interval) g(time) LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m WHERE m.time=g.time LIMIT 1) m ON true; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------- + Nested Loop Left Join (actual rows=32 loops=1) + -> Function Scan on generate_series g (actual rows=32 loops=1) + -> Limit (actual rows=1 loops=32) + -> Custom Scan (ChunkAppend) on metrics_timestamptz m (actual rows=1 loops=32) + Chunks excluded during runtime: 4 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m_1 (actual rows=1 loops=5) + Index Cond: ("time" = g."time") + Heap Fetches: 5 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_2 (actual rows=1 loops=7) + Index Cond: ("time" = g."time") + Heap Fetches: 7 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_3 (actual rows=1 loops=7) + Index Cond: ("time" = g."time") + Heap Fetches: 7 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_4 (actual rows=1 loops=7) + Index Cond: ("time" = g."time") + Heap Fetches: 7 + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_5 (actual rows=1 loops=6) + Index Cond: ("time" = g."time") + Heap Fetches: 6 +(20 rows) + +:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time) m ON true; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------- + Merge Join (actual rows=32 loops=1) + Merge Cond: (m."time" = g."time") + -> Merge Append (actual rows=745 loops=1) + Sort Key: m."time" + -> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m (actual rows=112 loops=1) + Heap Fetches: 112 + -> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_1 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan Backward using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_2 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan Backward using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_3 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan Backward using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_4 (actual rows=129 loops=1) + Heap Fetches: 129 + -> Sort (actual rows=32 loops=1) + Sort Key: g."time" + Sort Method: quicksort Memory: 26kB + -> Function Scan on generate_series g (actual rows=32 loops=1) +(18 rows) + +:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time ORDER BY time) m ON true; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Nested Loop (actual rows=32 loops=1) + -> Function Scan on generate_series g (actual rows=32 loops=1) + -> Custom Scan (ChunkAppend) on metrics_timestamptz m (actual rows=1 loops=32) + Chunks excluded during runtime: 4 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m_1 (actual rows=1 loops=5) + Index Cond: ("time" = g."time") + Heap Fetches: 5 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_2 (actual rows=1 loops=7) + Index Cond: ("time" = g."time") + Heap Fetches: 7 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_3 (actual rows=1 loops=7) + Index Cond: ("time" = g."time") + Heap Fetches: 7 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_4 (actual rows=1 loops=7) + Index Cond: ("time" = g."time") + Heap Fetches: 7 + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_5 (actual rows=1 loops=6) + Index Cond: ("time" = g."time") + Heap Fetches: 6 +(19 rows) + +-- test runtime exclusion with subquery +:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE m1.time=(SELECT max(time) FROM metrics_timestamptz); + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=1 loops=1) + Chunks excluded during runtime: 4 + InitPlan 2 (returns $1) + -> Result (actual rows=1 loops=1) + InitPlan 1 (returns $0) + -> Limit (actual rows=1 loops=1) + -> Custom Scan (ChunkAppend) on metrics_timestamptz (actual rows=1 loops=1) + Order: metrics_timestamptz."time" DESC + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk (actual rows=1 loops=1) + Index Cond: ("time" IS NOT NULL) + Heap Fetches: 1 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk (never executed) + Index Cond: ("time" IS NOT NULL) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk (never executed) + Index Cond: ("time" IS NOT NULL) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk (never executed) + Index Cond: ("time" IS NOT NULL) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk (never executed) + Index Cond: ("time" IS NOT NULL) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 (never executed) + Index Cond: ("time" = $1) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 (never executed) + Index Cond: ("time" = $1) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 (never executed) + Index Cond: ("time" = $1) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_4 (never executed) + Index Cond: ("time" = $1) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_5 (actual rows=1 loops=1) + Index Cond: ("time" = $1) + Heap Fetches: 1 +(38 rows) + +-- test runtime exclusion with correlated subquery +:PREFIX SELECT m1.time, (SELECT m2.time FROM metrics_timestamptz m2 WHERE m2.time < m1.time ORDER BY m2.time DESC LIMIT 1) FROM metrics_timestamptz m1 WHERE m1.time < '2000-01-10' ORDER BY m1.time; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=216 loops=1) + Order: m1."time" + -> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 (actual rows=112 loops=1) + Index Cond: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + Heap Fetches: 112 + SubPlan 1 + -> Limit (actual rows=1 loops=216) + -> Custom Scan (ChunkAppend) on metrics_timestamptz m2 (actual rows=1 loops=216) + Order: m2."time" DESC + Chunks excluded during runtime: 3 + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_1 (never executed) + Index Cond: ("time" < m1_1."time") + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_2 (never executed) + Index Cond: ("time" < m1_1."time") + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_3 (never executed) + Index Cond: ("time" < m1_1."time") + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_4 (actual rows=1 loops=103) + Index Cond: ("time" < m1_1."time") + Heap Fetches: 103 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2_5 (actual rows=1 loops=113) + Index Cond: ("time" < m1_1."time") + Heap Fetches: 112 + -> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 (actual rows=104 loops=1) + Index Cond: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + Heap Fetches: 104 +(28 rows) + +-- test EXISTS +:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE EXISTS(SELECT 1 FROM metrics_timestamptz m2 WHERE m1.time < m2.time) ORDER BY m1.time DESC limit 1000; + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------- + Limit (actual rows=744 loops=1) + -> Nested Loop Semi Join (actual rows=744 loops=1) + -> Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=745 loops=1) + Order: m1."time" DESC + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_1 (actual rows=129 loops=1) + Heap Fetches: 129 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_2 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_4 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_5 (actual rows=112 loops=1) + Heap Fetches: 112 + -> Append (actual rows=1 loops=745) + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2 (actual rows=0 loops=745) + Index Cond: ("time" > m1."time") + Heap Fetches: 111 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_1 (actual rows=0 loops=634) + Index Cond: ("time" > m1."time") + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_2 (actual rows=0 loops=466) + Index Cond: ("time" > m1."time") + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_3 (actual rows=1 loops=298) + Index Cond: ("time" > m1."time") + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_4 (actual rows=1 loops=130) + Index Cond: ("time" > m1."time") + Heap Fetches: 129 +(30 rows) --generate the results into two different files \set ECHO errors diff --git a/test/expected/append-11.out b/test/expected/append-11.out index 1005c7510..66c4f6a59 100644 --- a/test/expected/append-11.out +++ b/test/expected/append-11.out @@ -151,15 +151,13 @@ psql:include/append_query.sql:20: NOTICE: Stable function now_s() called! psql:include/append_query.sql:20: NOTICE: Stable function now_s() called! psql:include/append_query.sql:20: NOTICE: Stable function now_s() called! psql:include/append_query.sql:20: NOTICE: Stable function now_s() called! - QUERY PLAN ----------------------------------------------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) (actual rows=1 loops=1) - Hypertable: append_test - Chunks left after exclusion: 1 - -> Append (actual rows=1 loops=1) - -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1) - Index Cond: ("time" > (now_s() - '@ 2 mons'::interval)) -(6 rows) + QUERY PLAN +---------------------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on append_test (actual rows=1 loops=1) + Chunks excluded during startup: 2 + -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1) + Index Cond: ("time" > (now_s() - '@ 2 mons'::interval)) +(4 rows) -- adding ORDER BY and LIMIT should turn the plan into an optimized -- ordered append plan @@ -263,22 +261,20 @@ psql:include/append_query.sql:58: NOTICE: Stable function now_s() called! psql:include/append_query.sql:58: NOTICE: Stable function now_s() called! psql:include/append_query.sql:58: NOTICE: Stable function now_s() called! psql:include/append_query.sql:58: NOTICE: Stable function now_s() called! - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------- Sort (actual rows=1 loops=1) Sort Key: (date_trunc('year'::text, append_test."time")) DESC Sort Method: quicksort Memory: 25kB -> HashAggregate (actual rows=1 loops=1) Group Key: date_trunc('year'::text, append_test."time") - -> Custom Scan (ConstraintAwareAppend) (actual rows=3 loops=1) - Hypertable: append_test - Chunks left after exclusion: 2 - -> Append (actual rows=3 loops=1) - -> Index Scan using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1) - Index Cond: ("time" > (now_s() - '@ 4 mons'::interval)) - -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1) - Index Cond: ("time" > (now_s() - '@ 4 mons'::interval)) -(13 rows) + -> Custom Scan (ChunkAppend) on append_test (actual rows=3 loops=1) + Chunks excluded during startup: 1 + -> Index Scan using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1) + Index Cond: ("time" > (now_s() - '@ 4 mons'::interval)) + -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1) + Index Cond: ("time" > (now_s() - '@ 4 mons'::interval)) +(11 rows) -- querying outside the time range should return nothing. This tests -- that ConstraintAwareAppend can handle the case when an Append node @@ -345,26 +341,24 @@ psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------- Merge Left Join (actual rows=6 loops=1) Merge Cond: (period.btime = data.btime) CTE data -> HashAggregate (actual rows=3 loops=1) Group Key: time_bucket('@ 30 days'::interval, append_test."time") - -> Custom Scan (ConstraintAwareAppend) (actual rows=5 loops=1) - Hypertable: append_test - Chunks left after exclusion: 3 - -> Append (actual rows=5 loops=1) - -> Index Scan Backward using _hyper_1_1_chunk_append_test_time_idx on _hyper_1_1_chunk (actual rows=2 loops=1) - Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) - Filter: (colorid > 0) - -> Index Scan Backward using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1) - Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) - Filter: (colorid > 0) - -> Index Scan Backward using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1) - Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) - Filter: (colorid > 0) + -> Custom Scan (ChunkAppend) on append_test (actual rows=5 loops=1) + Chunks excluded during startup: 0 + -> Index Scan Backward using _hyper_1_1_chunk_append_test_time_idx on _hyper_1_1_chunk (actual rows=2 loops=1) + Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) + Filter: (colorid > 0) + -> Index Scan Backward using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1) + Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) + Filter: (colorid > 0) + -> Index Scan Backward using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1) + Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) + Filter: (colorid > 0) CTE period -> Function Scan on generate_series "time" (actual rows=6 loops=1) -> Sort (actual rows=6 loops=1) @@ -375,7 +369,7 @@ psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! Sort Key: data.btime Sort Method: quicksort Memory: 25kB -> CTE Scan on data (actual rows=3 loops=1) -(28 rows) +(26 rows) WITH data AS ( SELECT time_bucket(INTERVAL '30 day', TIME) AS btime, AVG(temp) AS VALUE @@ -435,23 +429,19 @@ psql:include/append_query.sql:126: NOTICE: Stable function now_s() called! psql:include/append_query.sql:126: NOTICE: Stable function now_s() called! psql:include/append_query.sql:126: NOTICE: Stable function now_s() called! psql:include/append_query.sql:126: NOTICE: Stable function now_s() called! - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------- Nested Loop (actual rows=1 loops=1) Join Filter: (a.colorid = j.colorid) - -> Custom Scan (ConstraintAwareAppend) (actual rows=1 loops=1) - Hypertable: append_test - Chunks left after exclusion: 1 - -> Append (actual rows=1 loops=1) - -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk a_1 (actual rows=1 loops=1) - Index Cond: ("time" > (now_s() - '@ 3 hours'::interval)) - -> Custom Scan (ConstraintAwareAppend) (actual rows=1 loops=1) - Hypertable: join_test - Chunks left after exclusion: 1 - -> Append (actual rows=1 loops=1) - -> Index Scan using _hyper_2_6_chunk_join_test_time_idx on _hyper_2_6_chunk j_1 (actual rows=1 loops=1) - Index Cond: ("time" > (now_s() - '@ 3 hours'::interval)) -(14 rows) + -> Custom Scan (ChunkAppend) on append_test a (actual rows=1 loops=1) + Chunks excluded during startup: 2 + -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk a_1 (actual rows=1 loops=1) + Index Cond: ("time" > (now_s() - '@ 3 hours'::interval)) + -> Custom Scan (ChunkAppend) on join_test j (actual rows=1 loops=1) + Chunks excluded during startup: 2 + -> Index Scan using _hyper_2_6_chunk_join_test_time_idx on _hyper_2_6_chunk j_1 (actual rows=1 loops=1) + Index Cond: ("time" > (now_s() - '@ 3 hours'::interval)) +(10 rows) reset enable_hashjoin; reset enable_mergejoin; @@ -980,25 +970,248 @@ psql:include/append_query.sql:208: NOTICE: Stable function now_s() called! psql:include/append_query.sql:208: NOTICE: Stable function now_s() called! psql:include/append_query.sql:208: NOTICE: Stable function now_s() called! psql:include/append_query.sql:208: NOTICE: Stable function now_s() called! - QUERY PLAN ---------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------ Sort (actual rows=0 loops=1) Sort Key: append_test."time" DESC Sort Method: quicksort Memory: 25kB - -> Custom Scan (ConstraintAwareAppend) (actual rows=0 loops=1) - Hypertable: append_test - Chunks left after exclusion: 3 - -> Append (actual rows=0 loops=1) - -> Sample Scan on _hyper_1_3_chunk (actual rows=0 loops=1) - Sampling: system ('1'::real) - Filter: ("time" > (now_s() - '@ 400 days'::interval)) - -> Sample Scan on _hyper_1_2_chunk (actual rows=0 loops=1) - Sampling: system ('1'::real) - Filter: ("time" > (now_s() - '@ 400 days'::interval)) - -> Sample Scan on _hyper_1_1_chunk (actual rows=0 loops=1) - Sampling: system ('1'::real) - Filter: ("time" > (now_s() - '@ 400 days'::interval)) -(16 rows) + -> Custom Scan (ChunkAppend) on append_test (actual rows=0 loops=1) + Chunks excluded during startup: 0 + -> Sample Scan on _hyper_1_3_chunk (actual rows=0 loops=1) + Sampling: system ('1'::real) + Filter: ("time" > (now_s() - '@ 400 days'::interval)) + -> Sample Scan on _hyper_1_2_chunk (actual rows=0 loops=1) + Sampling: system ('1'::real) + Filter: ("time" > (now_s() - '@ 400 days'::interval)) + -> Sample Scan on _hyper_1_1_chunk (actual rows=0 loops=1) + Sampling: system ('1'::real) + Filter: ("time" > (now_s() - '@ 400 days'::interval)) +(14 rows) + +-- test runtime exclusion +-- test runtime exclusion with LATERAL and 2 hypertables +:PREFIX SELECT m1.time, m2.time FROM metrics_timestamptz m1 LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m2 WHERE m1.time = m2.time LIMIT 1) m2 ON true ORDER BY m1.time; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------- + Nested Loop Left Join (actual rows=745 loops=1) + -> Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=745 loops=1) + Order: m1."time" + -> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 (actual rows=112 loops=1) + Heap Fetches: 112 + -> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan Backward using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan Backward using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_4 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan Backward using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_5 (actual rows=129 loops=1) + Heap Fetches: 129 + -> Limit (actual rows=1 loops=745) + -> Custom Scan (ChunkAppend) on metrics_timestamptz m2 (actual rows=1 loops=745) + Chunks excluded during runtime: 4 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2_1 (actual rows=1 loops=112) + Index Cond: ("time" = m1."time") + Heap Fetches: 112 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_2 (actual rows=1 loops=168) + Index Cond: ("time" = m1."time") + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_3 (actual rows=1 loops=168) + Index Cond: ("time" = m1."time") + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_4 (actual rows=1 loops=168) + Index Cond: ("time" = m1."time") + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_5 (actual rows=1 loops=129) + Index Cond: ("time" = m1."time") + Heap Fetches: 129 +(31 rows) + +-- test runtime exclusion with LATERAL and generate_series +:PREFIX SELECT g.time FROM generate_series('2000-01-01'::timestamptz, '2000-02-01'::timestamptz, '1d'::interval) g(time) LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m WHERE m.time=g.time LIMIT 1) m ON true; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------- + Nested Loop Left Join (actual rows=32 loops=1) + -> Function Scan on generate_series g (actual rows=32 loops=1) + -> Limit (actual rows=1 loops=32) + -> Custom Scan (ChunkAppend) on metrics_timestamptz m (actual rows=1 loops=32) + Chunks excluded during runtime: 4 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m_1 (actual rows=1 loops=5) + Index Cond: ("time" = g."time") + Heap Fetches: 5 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_2 (actual rows=1 loops=7) + Index Cond: ("time" = g."time") + Heap Fetches: 7 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_3 (actual rows=1 loops=7) + Index Cond: ("time" = g."time") + Heap Fetches: 7 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_4 (actual rows=1 loops=7) + Index Cond: ("time" = g."time") + Heap Fetches: 7 + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_5 (actual rows=1 loops=6) + Index Cond: ("time" = g."time") + Heap Fetches: 6 +(20 rows) + +:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time) m ON true; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------- + Merge Join (actual rows=32 loops=1) + Merge Cond: (m."time" = g."time") + -> Merge Append (actual rows=745 loops=1) + Sort Key: m."time" + -> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m (actual rows=112 loops=1) + Heap Fetches: 112 + -> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_1 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan Backward using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_2 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan Backward using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_3 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan Backward using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_4 (actual rows=129 loops=1) + Heap Fetches: 129 + -> Sort (actual rows=32 loops=1) + Sort Key: g."time" + Sort Method: quicksort Memory: 26kB + -> Function Scan on generate_series g (actual rows=32 loops=1) +(18 rows) + +:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time ORDER BY time) m ON true; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Nested Loop (actual rows=32 loops=1) + -> Function Scan on generate_series g (actual rows=32 loops=1) + -> Custom Scan (ChunkAppend) on metrics_timestamptz m (actual rows=1 loops=32) + Chunks excluded during runtime: 4 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m_1 (actual rows=1 loops=5) + Index Cond: ("time" = g."time") + Heap Fetches: 5 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_2 (actual rows=1 loops=7) + Index Cond: ("time" = g."time") + Heap Fetches: 7 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_3 (actual rows=1 loops=7) + Index Cond: ("time" = g."time") + Heap Fetches: 7 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_4 (actual rows=1 loops=7) + Index Cond: ("time" = g."time") + Heap Fetches: 7 + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_5 (actual rows=1 loops=6) + Index Cond: ("time" = g."time") + Heap Fetches: 6 +(19 rows) + +-- test runtime exclusion with subquery +:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE m1.time=(SELECT max(time) FROM metrics_timestamptz); + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=1 loops=1) + Chunks excluded during runtime: 4 + InitPlan 2 (returns $1) + -> Result (actual rows=1 loops=1) + InitPlan 1 (returns $0) + -> Limit (actual rows=1 loops=1) + -> Custom Scan (ChunkAppend) on metrics_timestamptz (actual rows=1 loops=1) + Order: metrics_timestamptz."time" DESC + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk (actual rows=1 loops=1) + Index Cond: ("time" IS NOT NULL) + Heap Fetches: 1 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk (never executed) + Index Cond: ("time" IS NOT NULL) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk (never executed) + Index Cond: ("time" IS NOT NULL) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk (never executed) + Index Cond: ("time" IS NOT NULL) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk (never executed) + Index Cond: ("time" IS NOT NULL) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 (never executed) + Index Cond: ("time" = $1) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 (never executed) + Index Cond: ("time" = $1) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 (never executed) + Index Cond: ("time" = $1) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_4 (never executed) + Index Cond: ("time" = $1) + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_5 (actual rows=1 loops=1) + Index Cond: ("time" = $1) + Heap Fetches: 1 +(38 rows) + +-- test runtime exclusion with correlated subquery +:PREFIX SELECT m1.time, (SELECT m2.time FROM metrics_timestamptz m2 WHERE m2.time < m1.time ORDER BY m2.time DESC LIMIT 1) FROM metrics_timestamptz m1 WHERE m1.time < '2000-01-10' ORDER BY m1.time; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=216 loops=1) + Order: m1."time" + -> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 (actual rows=112 loops=1) + Index Cond: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + Heap Fetches: 112 + -> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 (actual rows=104 loops=1) + Index Cond: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + Heap Fetches: 104 + SubPlan 1 + -> Limit (actual rows=1 loops=216) + -> Custom Scan (ChunkAppend) on metrics_timestamptz m2 (actual rows=1 loops=216) + Order: m2."time" DESC + Chunks excluded during runtime: 3 + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_1 (never executed) + Index Cond: ("time" < m1."time") + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_2 (never executed) + Index Cond: ("time" < m1."time") + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_3 (never executed) + Index Cond: ("time" < m1."time") + Heap Fetches: 0 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_4 (actual rows=1 loops=103) + Index Cond: ("time" < m1."time") + Heap Fetches: 103 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2_5 (actual rows=1 loops=113) + Index Cond: ("time" < m1."time") + Heap Fetches: 112 +(28 rows) + +-- test EXISTS +:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE EXISTS(SELECT 1 FROM metrics_timestamptz m2 WHERE m1.time < m2.time) ORDER BY m1.time DESC limit 1000; + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------- + Limit (actual rows=744 loops=1) + -> Nested Loop Semi Join (actual rows=744 loops=1) + -> Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=745 loops=1) + Order: m1."time" DESC + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_1 (actual rows=129 loops=1) + Heap Fetches: 129 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_2 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_4 (actual rows=168 loops=1) + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_5 (actual rows=112 loops=1) + Heap Fetches: 112 + -> Append (actual rows=1 loops=745) + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2 (actual rows=0 loops=745) + Index Cond: ("time" > m1."time") + Heap Fetches: 111 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_1 (actual rows=0 loops=634) + Index Cond: ("time" > m1."time") + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_2 (actual rows=0 loops=466) + Index Cond: ("time" > m1."time") + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_3 (actual rows=1 loops=298) + Index Cond: ("time" > m1."time") + Heap Fetches: 168 + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_4 (actual rows=1 loops=130) + Index Cond: ("time" > m1."time") + Heap Fetches: 129 +(30 rows) --generate the results into two different files \set ECHO errors diff --git a/test/expected/append-9.6.out b/test/expected/append-9.6.out index a44a45b25..cbae521d6 100644 --- a/test/expected/append-9.6.out +++ b/test/expected/append-9.6.out @@ -150,15 +150,13 @@ psql:include/append_query.sql:20: NOTICE: Stable function now_s() called! psql:include/append_query.sql:20: NOTICE: Stable function now_s() called! psql:include/append_query.sql:20: NOTICE: Stable function now_s() called! psql:include/append_query.sql:20: NOTICE: Stable function now_s() called! - QUERY PLAN ----------------------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: append_test - Chunks left after exclusion: 1 - -> Append - -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk - Index Cond: ("time" > (now_s() - '@ 2 mons'::interval)) -(6 rows) + QUERY PLAN +---------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on append_test + Chunks excluded during startup: 2 + -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk + Index Cond: ("time" > (now_s() - '@ 2 mons'::interval)) +(4 rows) -- adding ORDER BY and LIMIT should turn the plan into an optimized -- ordered append plan @@ -251,21 +249,19 @@ psql:include/append_query.sql:58: NOTICE: Stable function now_s() called! psql:include/append_query.sql:58: NOTICE: Stable function now_s() called! psql:include/append_query.sql:58: NOTICE: Stable function now_s() called! psql:include/append_query.sql:58: NOTICE: Stable function now_s() called! - QUERY PLAN ----------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------- Sort Sort Key: (date_trunc('year'::text, append_test."time")) DESC -> HashAggregate - Group Key: date_trunc('year'::text, append_test."time") - -> Custom Scan (ConstraintAwareAppend) - Hypertable: append_test - Chunks left after exclusion: 2 - -> Append - -> Index Scan using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk - Index Cond: ("time" > (now_s() - '@ 4 mons'::interval)) - -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk - Index Cond: ("time" > (now_s() - '@ 4 mons'::interval)) -(12 rows) + Group Key: (date_trunc('year'::text, append_test."time")) + -> Custom Scan (ChunkAppend) on append_test + Chunks excluded during startup: 1 + -> Index Scan using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk + Index Cond: ("time" > (now_s() - '@ 4 mons'::interval)) + -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk + Index Cond: ("time" > (now_s() - '@ 4 mons'::interval)) +(10 rows) -- querying outside the time range should return nothing. This tests -- that ConstraintAwareAppend can handle the case when an Append node @@ -328,26 +324,24 @@ psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! - QUERY PLAN ---------------------------------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------------------------- Merge Left Join Merge Cond: (period.btime = data.btime) CTE data -> HashAggregate - Group Key: time_bucket('@ 30 days'::interval, append_test."time") - -> Custom Scan (ConstraintAwareAppend) - Hypertable: append_test - Chunks left after exclusion: 3 - -> Append - -> Index Scan Backward using _hyper_1_1_chunk_append_test_time_idx on _hyper_1_1_chunk - Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) - Filter: (colorid > 0) - -> Index Scan Backward using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk - Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) - Filter: (colorid > 0) - -> Index Scan Backward using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk - Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) - Filter: (colorid > 0) + Group Key: (time_bucket('@ 30 days'::interval, append_test."time")) + -> Custom Scan (ChunkAppend) on append_test + Chunks excluded during startup: 0 + -> Index Scan Backward using _hyper_1_1_chunk_append_test_time_idx on _hyper_1_1_chunk + Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) + Filter: (colorid > 0) + -> Index Scan Backward using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk + Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) + Filter: (colorid > 0) + -> Index Scan Backward using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk + Index Cond: ("time" > (now_s() - '@ 400 days'::interval)) + Filter: (colorid > 0) CTE period -> Function Scan on generate_series "time" -> Sort @@ -356,7 +350,7 @@ psql:include/append_query.sql:98: NOTICE: Stable function now_s() called! -> Sort Sort Key: data.btime -> CTE Scan on data -(26 rows) +(24 rows) WITH data AS ( SELECT time_bucket(INTERVAL '30 day', TIME) AS btime, AVG(temp) AS VALUE @@ -414,23 +408,19 @@ psql:include/append_query.sql:126: NOTICE: Stable function now_s() called! psql:include/append_query.sql:126: NOTICE: Stable function now_s() called! psql:include/append_query.sql:126: NOTICE: Stable function now_s() called! psql:include/append_query.sql:126: NOTICE: Stable function now_s() called! - QUERY PLAN --------------------------------------------------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------------------------------- Nested Loop Join Filter: (a.colorid = j.colorid) - -> Custom Scan (ConstraintAwareAppend) - Hypertable: append_test - Chunks left after exclusion: 1 - -> Append - -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk a_1 - Index Cond: ("time" > (now_s() - '@ 3 hours'::interval)) - -> Custom Scan (ConstraintAwareAppend) - Hypertable: join_test - Chunks left after exclusion: 1 - -> Append - -> Index Scan using _hyper_2_6_chunk_join_test_time_idx on _hyper_2_6_chunk j_1 - Index Cond: ("time" > (now_s() - '@ 3 hours'::interval)) -(14 rows) + -> Custom Scan (ChunkAppend) on append_test a + Chunks excluded during startup: 2 + -> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk a_1 + Index Cond: ("time" > (now_s() - '@ 3 hours'::interval)) + -> Custom Scan (ChunkAppend) on join_test j + Chunks excluded during startup: 2 + -> Index Scan using _hyper_2_6_chunk_join_test_time_idx on _hyper_2_6_chunk j_1 + Index Cond: ("time" > (now_s() - '@ 3 hours'::interval)) +(10 rows) reset enable_hashjoin; reset enable_mergejoin; @@ -887,24 +877,189 @@ psql:include/append_query.sql:208: NOTICE: Stable function now_s() called! psql:include/append_query.sql:208: NOTICE: Stable function now_s() called! psql:include/append_query.sql:208: NOTICE: Stable function now_s() called! psql:include/append_query.sql:208: NOTICE: Stable function now_s() called! - QUERY PLAN ---------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------- Sort Sort Key: append_test."time" DESC - -> Custom Scan (ConstraintAwareAppend) - Hypertable: append_test - Chunks left after exclusion: 3 + -> Custom Scan (ChunkAppend) on append_test + Chunks excluded during startup: 0 + -> Sample Scan on _hyper_1_3_chunk + Sampling: system ('1'::real) + Filter: ("time" > (now_s() - '@ 400 days'::interval)) + -> Sample Scan on _hyper_1_2_chunk + Sampling: system ('1'::real) + Filter: ("time" > (now_s() - '@ 400 days'::interval)) + -> Sample Scan on _hyper_1_1_chunk + Sampling: system ('1'::real) + Filter: ("time" > (now_s() - '@ 400 days'::interval)) +(13 rows) + +-- test runtime exclusion +-- test runtime exclusion with LATERAL and 2 hypertables +:PREFIX SELECT m1.time, m2.time FROM metrics_timestamptz m1 LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m2 WHERE m1.time = m2.time LIMIT 1) m2 ON true ORDER BY m1.time; + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------- + Nested Loop Left Join + -> Custom Scan (ChunkAppend) on metrics_timestamptz m1 + Order: m1."time" + -> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 + -> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 + -> Index Only Scan Backward using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 + -> Index Only Scan Backward using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_4 + -> Index Only Scan Backward using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_5 + -> Limit + -> Custom Scan (ChunkAppend) on metrics_timestamptz m2 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2_1 + Index Cond: ("time" = m1."time") + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_2 + Index Cond: ("time" = m1."time") + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_3 + Index Cond: ("time" = m1."time") + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_4 + Index Cond: ("time" = m1."time") + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_5 + Index Cond: ("time" = m1."time") +(20 rows) + +-- test runtime exclusion with LATERAL and generate_series +:PREFIX SELECT g.time FROM generate_series('2000-01-01'::timestamptz, '2000-02-01'::timestamptz, '1d'::interval) g(time) LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m WHERE m.time=g.time LIMIT 1) m ON true; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------- + Nested Loop Left Join + -> Function Scan on generate_series g + -> Limit + -> Custom Scan (ChunkAppend) on metrics_timestamptz m + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m_1 + Index Cond: ("time" = g."time") + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_2 + Index Cond: ("time" = g."time") + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_3 + Index Cond: ("time" = g."time") + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_4 + Index Cond: ("time" = g."time") + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_5 + Index Cond: ("time" = g."time") +(14 rows) + +:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time) m ON true; + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------- + Merge Join + Merge Cond: (m."time" = g."time") + -> Merge Append + Sort Key: m."time" + -> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m + -> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_1 + -> Index Only Scan Backward using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_2 + -> Index Only Scan Backward using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_3 + -> Index Only Scan Backward using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_4 + -> Sort + Sort Key: g."time" + -> Function Scan on generate_series g +(12 rows) + +:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time ORDER BY time) m ON true; + QUERY PLAN +----------------------------------------------------------------------------------------------------------- + Nested Loop + -> Function Scan on generate_series g + -> Custom Scan (ChunkAppend) on metrics_timestamptz m + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m_1 + Index Cond: ("time" = g."time") + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_2 + Index Cond: ("time" = g."time") + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_3 + Index Cond: ("time" = g."time") + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_4 + Index Cond: ("time" = g."time") + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_5 + Index Cond: ("time" = g."time") +(13 rows) + +-- test runtime exclusion with subquery +:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE m1.time=(SELECT max(time) FROM metrics_timestamptz); + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on metrics_timestamptz m1 + InitPlan 2 (returns $1) + -> Result + InitPlan 1 (returns $0) + -> Limit + -> Custom Scan (ChunkAppend) on metrics_timestamptz + Order: metrics_timestamptz."time" DESC + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk + Index Cond: ("time" IS NOT NULL) + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk + Index Cond: ("time" IS NOT NULL) + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk + Index Cond: ("time" IS NOT NULL) + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk + Index Cond: ("time" IS NOT NULL) + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk + Index Cond: ("time" IS NOT NULL) + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 + Index Cond: ("time" = $1) + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 + Index Cond: ("time" = $1) + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 + Index Cond: ("time" = $1) + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_4 + Index Cond: ("time" = $1) + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_5 + Index Cond: ("time" = $1) +(27 rows) + +-- test runtime exclusion with correlated subquery +:PREFIX SELECT m1.time, (SELECT m2.time FROM metrics_timestamptz m2 WHERE m2.time < m1.time ORDER BY m2.time DESC LIMIT 1) FROM metrics_timestamptz m1 WHERE m1.time < '2000-01-10' ORDER BY m1.time; + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on metrics_timestamptz m1 + Order: m1."time" + -> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 + Index Cond: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + SubPlan 1 + -> Limit + -> Custom Scan (ChunkAppend) on metrics_timestamptz m2 + Order: m2."time" DESC + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_1 + Index Cond: ("time" < m1_1."time") + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_2 + Index Cond: ("time" < m1_1."time") + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_3 + Index Cond: ("time" < m1_1."time") + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_4 + Index Cond: ("time" < m1_1."time") + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2_5 + Index Cond: ("time" < m1_1."time") + -> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 + Index Cond: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) +(20 rows) + +-- test EXISTS +:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE EXISTS(SELECT 1 FROM metrics_timestamptz m2 WHERE m1.time < m2.time) ORDER BY m1.time DESC limit 1000; + QUERY PLAN +------------------------------------------------------------------------------------------------------------------ + Limit + -> Nested Loop Semi Join + -> Custom Scan (ChunkAppend) on metrics_timestamptz m1 + Order: m1."time" DESC + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_1 + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_2 + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_4 + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_5 -> Append - -> Sample Scan on _hyper_1_3_chunk - Sampling: system ('1'::real) - Filter: ("time" > (now_s() - '@ 400 days'::interval)) - -> Sample Scan on _hyper_1_2_chunk - Sampling: system ('1'::real) - Filter: ("time" > (now_s() - '@ 400 days'::interval)) - -> Sample Scan on _hyper_1_1_chunk - Sampling: system ('1'::real) - Filter: ("time" > (now_s() - '@ 400 days'::interval)) -(15 rows) + -> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2 + Index Cond: ("time" > m1."time") + -> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_1 + Index Cond: ("time" > m1."time") + -> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_2 + Index Cond: ("time" > m1."time") + -> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_3 + Index Cond: ("time" > m1."time") + -> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_4 + Index Cond: ("time" > m1."time") +(20 rows) --generate the results into two different files \set ECHO errors diff --git a/test/expected/delete.out b/test/expected/delete.out index a66610a8a..f4284eb5c 100644 --- a/test/expected/delete.out +++ b/test/expected/delete.out @@ -78,38 +78,34 @@ $BODY$; EXPLAIN (costs off) SELECT FROM "two_Partitions" WHERE series_1 IN (SELECT series_1 FROM "two_Partitions" WHERE series_1 > series_val()); - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------ Hash Join Hash Cond: ("two_Partitions".series_1 = "two_Partitions_1".series_1) - -> Custom Scan (ConstraintAwareAppend) - Hypertable: two_Partitions - Chunks left after exclusion: 4 - -> Append - -> Index Only Scan using "_hyper_1_1_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_1_chunk - Index Cond: (series_1 > (series_val())::double precision) - -> Index Only Scan using "_hyper_1_2_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_2_chunk - Index Cond: (series_1 > (series_val())::double precision) - -> Index Only Scan using "_hyper_1_3_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_3_chunk - Index Cond: (series_1 > (series_val())::double precision) - -> Index Only Scan using "_hyper_1_4_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_4_chunk - Index Cond: (series_1 > (series_val())::double precision) + -> Custom Scan (ChunkAppend) on "two_Partitions" + Chunks excluded during startup: 0 + -> Index Only Scan using "_hyper_1_1_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_1_chunk + Index Cond: (series_1 > (series_val())::double precision) + -> Index Only Scan using "_hyper_1_2_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_2_chunk + Index Cond: (series_1 > (series_val())::double precision) + -> Index Only Scan using "_hyper_1_3_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_3_chunk + Index Cond: (series_1 > (series_val())::double precision) + -> Index Only Scan using "_hyper_1_4_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_4_chunk + Index Cond: (series_1 > (series_val())::double precision) -> Hash -> HashAggregate Group Key: "two_Partitions_1".series_1 - -> Custom Scan (ConstraintAwareAppend) - Hypertable: two_Partitions - Chunks left after exclusion: 4 - -> Append - -> Index Only Scan using "_hyper_1_1_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_1_chunk _hyper_1_1_chunk_1 - Index Cond: (series_1 > (series_val())::double precision) - -> Index Only Scan using "_hyper_1_2_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_2_chunk _hyper_1_2_chunk_1 - Index Cond: (series_1 > (series_val())::double precision) - -> Index Only Scan using "_hyper_1_3_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_3_chunk _hyper_1_3_chunk_1 - Index Cond: (series_1 > (series_val())::double precision) - -> Index Only Scan using "_hyper_1_4_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_4_chunk _hyper_1_4_chunk_1 - Index Cond: (series_1 > (series_val())::double precision) -(29 rows) + -> Custom Scan (ChunkAppend) on "two_Partitions" "two_Partitions_1" + Chunks excluded during startup: 0 + -> Index Only Scan using "_hyper_1_1_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_1_chunk _hyper_1_1_chunk_1 + Index Cond: (series_1 > (series_val())::double precision) + -> Index Only Scan using "_hyper_1_2_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_2_chunk _hyper_1_2_chunk_1 + Index Cond: (series_1 > (series_val())::double precision) + -> Index Only Scan using "_hyper_1_3_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_3_chunk _hyper_1_3_chunk_1 + Index Cond: (series_1 > (series_val())::double precision) + -> Index Only Scan using "_hyper_1_4_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_4_chunk _hyper_1_4_chunk_1 + Index Cond: (series_1 > (series_val())::double precision) +(25 rows) -- ConstraintAwareAppend NOT applied for DELETE EXPLAIN (costs off) diff --git a/test/expected/parallel-10.out b/test/expected/parallel-10.out index 82dea77f0..526a00ca4 100644 --- a/test/expected/parallel-10.out +++ b/test/expected/parallel-10.out @@ -180,13 +180,15 @@ SELECT histogram(i, 10, 100000, 5) FROM "test"; Single Copy: true -> Result (actual rows=1000000 loops=1) One-Time Filter: (length(version()) > 0) - -> Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1) - Hypertable: test - Chunks left after exclusion: 2 - -> Append (actual rows=1000000 loops=1) + -> Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1) + Chunks excluded during startup: 0 + -> Result (actual rows=500000 loops=1) + One-Time Filter: (length(version()) > 0) -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) + -> Result (actual rows=500000 loops=1) + One-Time Filter: (length(version()) > 0) -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) -(12 rows) +(14 rows) -- test constraint aware append with parallel aggregation SET max_parallel_workers_per_gather = 1; @@ -199,13 +201,15 @@ EXPLAIN (costs off) SELECT count(*) FROM "test" WHERE length(version()) > 0; -> Partial Aggregate -> Result One-Time Filter: (length(version()) > 0) - -> Custom Scan (ConstraintAwareAppend) - Hypertable: test - Chunks left after exclusion: 2 - -> Append + -> Custom Scan (ChunkAppend) on test + Chunks excluded during startup: 0 + -> Result + One-Time Filter: (length(version()) > 0) -> Parallel Seq Scan on _hyper_1_1_chunk + -> Result + One-Time Filter: (length(version()) > 0) -> Parallel Seq Scan on _hyper_1_2_chunk -(12 rows) +(14 rows) SELECT count(*) FROM "test" WHERE length(version()) > 0; count @@ -218,50 +222,44 @@ SET max_parallel_workers_per_gather = 4; -- in a query will prevent parallelism but CURRENT_TIMESTAMP and -- transaction_timestamp() are marked parallel safe :PREFIX SELECT i FROM "test" WHERE ts < CURRENT_TIMESTAMP; - QUERY PLAN ------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------- Gather (actual rows=1000000 loops=1) Workers Planned: 1 Workers Launched: 1 Single Copy: true - -> Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1) - Hypertable: test - Chunks left after exclusion: 2 - -> Append (actual rows=1000000 loops=1) - -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) - Filter: (ts < CURRENT_TIMESTAMP) - -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) - Filter: (ts < CURRENT_TIMESTAMP) -(12 rows) + -> Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1) + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) + Filter: (ts < CURRENT_TIMESTAMP) + -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) + Filter: (ts < CURRENT_TIMESTAMP) +(10 rows) :PREFIX SELECT i FROM "test" WHERE ts < transaction_timestamp(); - QUERY PLAN ------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------- Gather (actual rows=1000000 loops=1) Workers Planned: 1 Workers Launched: 1 Single Copy: true - -> Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1) - Hypertable: test - Chunks left after exclusion: 2 - -> Append (actual rows=1000000 loops=1) - -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) - Filter: (ts < transaction_timestamp()) - -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) - Filter: (ts < transaction_timestamp()) -(12 rows) + -> Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1) + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) + Filter: (ts < transaction_timestamp()) + -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) + Filter: (ts < transaction_timestamp()) +(10 rows) -- this won't be parallel query because now() is parallel restricted in PG < 12 :PREFIX SELECT i FROM "test" WHERE ts < now(); - QUERY PLAN ------------------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1) - Hypertable: test - Chunks left after exclusion: 2 - -> Append (actual rows=1000000 loops=1) - -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) - Filter: (ts < now()) - -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) - Filter: (ts < now()) -(8 rows) + QUERY PLAN +----------------------------------------------------------------- + Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1) + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) + Filter: (ts < now()) + -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) + Filter: (ts < now()) +(6 rows) diff --git a/test/expected/parallel-11.out b/test/expected/parallel-11.out index 6f6b656f9..dc78803de 100644 --- a/test/expected/parallel-11.out +++ b/test/expected/parallel-11.out @@ -179,13 +179,15 @@ SELECT histogram(i, 10, 100000, 5) FROM "test"; Single Copy: true -> Result (actual rows=1000000 loops=1) One-Time Filter: (length(version()) > 0) - -> Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1) - Hypertable: test - Chunks left after exclusion: 2 - -> Append (actual rows=1000000 loops=1) + -> Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1) + Chunks excluded during startup: 0 + -> Result (actual rows=500000 loops=1) + One-Time Filter: (length(version()) > 0) -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) + -> Result (actual rows=500000 loops=1) + One-Time Filter: (length(version()) > 0) -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) -(12 rows) +(14 rows) -- test constraint aware append with parallel aggregation SET max_parallel_workers_per_gather = 1; @@ -198,13 +200,15 @@ EXPLAIN (costs off) SELECT count(*) FROM "test" WHERE length(version()) > 0; -> Partial Aggregate -> Result One-Time Filter: (length(version()) > 0) - -> Custom Scan (ConstraintAwareAppend) - Hypertable: test - Chunks left after exclusion: 2 - -> Parallel Append + -> Custom Scan (ChunkAppend) on test + Chunks excluded during startup: 0 + -> Result + One-Time Filter: (length(version()) > 0) -> Parallel Seq Scan on _hyper_1_1_chunk + -> Result + One-Time Filter: (length(version()) > 0) -> Parallel Seq Scan on _hyper_1_2_chunk -(12 rows) +(14 rows) SELECT count(*) FROM "test" WHERE length(version()) > 0; count @@ -217,50 +221,44 @@ SET max_parallel_workers_per_gather = 4; -- in a query will prevent parallelism but CURRENT_TIMESTAMP and -- transaction_timestamp() are marked parallel safe :PREFIX SELECT i FROM "test" WHERE ts < CURRENT_TIMESTAMP; - QUERY PLAN ------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------- Gather (actual rows=1000000 loops=1) Workers Planned: 1 Workers Launched: 1 Single Copy: true - -> Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1) - Hypertable: test - Chunks left after exclusion: 2 - -> Append (actual rows=1000000 loops=1) - -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) - Filter: (ts < CURRENT_TIMESTAMP) - -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) - Filter: (ts < CURRENT_TIMESTAMP) -(12 rows) + -> Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1) + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) + Filter: (ts < CURRENT_TIMESTAMP) + -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) + Filter: (ts < CURRENT_TIMESTAMP) +(10 rows) :PREFIX SELECT i FROM "test" WHERE ts < transaction_timestamp(); - QUERY PLAN ------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------- Gather (actual rows=1000000 loops=1) Workers Planned: 1 Workers Launched: 1 Single Copy: true - -> Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1) - Hypertable: test - Chunks left after exclusion: 2 - -> Append (actual rows=1000000 loops=1) - -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) - Filter: (ts < transaction_timestamp()) - -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) - Filter: (ts < transaction_timestamp()) -(12 rows) + -> Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1) + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) + Filter: (ts < transaction_timestamp()) + -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) + Filter: (ts < transaction_timestamp()) +(10 rows) -- this won't be parallel query because now() is parallel restricted in PG < 12 :PREFIX SELECT i FROM "test" WHERE ts < now(); - QUERY PLAN ------------------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1) - Hypertable: test - Chunks left after exclusion: 2 - -> Append (actual rows=1000000 loops=1) - -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) - Filter: (ts < now()) - -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) - Filter: (ts < now()) -(8 rows) + QUERY PLAN +----------------------------------------------------------------- + Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1) + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1) + Filter: (ts < now()) + -> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1) + Filter: (ts < now()) +(6 rows) diff --git a/test/expected/parallel-9.6.out b/test/expected/parallel-9.6.out index 9351d35d6..d33fa81ed 100644 --- a/test/expected/parallel-9.6.out +++ b/test/expected/parallel-9.6.out @@ -172,20 +172,22 @@ SELECT histogram(i, 10, 100000, 5) FROM "test"; -- test constraint aware append :PREFIX SELECT i FROM "test" WHERE length(version()) > 0; - QUERY PLAN ------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------- Gather Workers Planned: 1 Single Copy: true -> Result One-Time Filter: (length(version()) > 0) - -> Custom Scan (ConstraintAwareAppend) - Hypertable: test - Chunks left after exclusion: 2 - -> Append + -> Custom Scan (ChunkAppend) on test + Chunks excluded during startup: 0 + -> Result + One-Time Filter: (length(version()) > 0) -> Seq Scan on _hyper_1_1_chunk + -> Result + One-Time Filter: (length(version()) > 0) -> Seq Scan on _hyper_1_2_chunk -(11 rows) +(13 rows) -- test constraint aware append with parallel aggregation SET max_parallel_workers_per_gather = 1; @@ -198,13 +200,15 @@ EXPLAIN (costs off) SELECT count(*) FROM "test" WHERE length(version()) > 0; -> Partial Aggregate -> Result One-Time Filter: (length(version()) > 0) - -> Custom Scan (ConstraintAwareAppend) - Hypertable: test - Chunks left after exclusion: 2 - -> Append + -> Custom Scan (ChunkAppend) on test + Chunks excluded during startup: 0 + -> Result + One-Time Filter: (length(version()) > 0) -> Parallel Seq Scan on _hyper_1_1_chunk + -> Result + One-Time Filter: (length(version()) > 0) -> Parallel Seq Scan on _hyper_1_2_chunk -(12 rows) +(14 rows) SELECT count(*) FROM "test" WHERE length(version()) > 0; count @@ -217,45 +221,39 @@ SET max_parallel_workers_per_gather = 4; -- in a query will prevent parallelism but CURRENT_TIMESTAMP and -- transaction_timestamp() are marked parallel safe :PREFIX SELECT i FROM "test" WHERE ts < CURRENT_TIMESTAMP; - QUERY PLAN ------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: test - Chunks left after exclusion: 2 - -> Append - -> Seq Scan on _hyper_1_1_chunk - Filter: (ts < now()) - -> Seq Scan on _hyper_1_2_chunk - Filter: (ts < now()) -(8 rows) + QUERY PLAN +------------------------------------- + Custom Scan (ChunkAppend) on test + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_1_1_chunk + Filter: (ts < now()) + -> Seq Scan on _hyper_1_2_chunk + Filter: (ts < now()) +(6 rows) :PREFIX SELECT i FROM "test" WHERE ts < transaction_timestamp(); - QUERY PLAN ------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------ Gather Workers Planned: 1 Single Copy: true - -> Custom Scan (ConstraintAwareAppend) - Hypertable: test - Chunks left after exclusion: 2 - -> Append - -> Seq Scan on _hyper_1_1_chunk - Filter: (ts < transaction_timestamp()) - -> Seq Scan on _hyper_1_2_chunk - Filter: (ts < transaction_timestamp()) -(11 rows) + -> Custom Scan (ChunkAppend) on test + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_1_1_chunk + Filter: (ts < transaction_timestamp()) + -> Seq Scan on _hyper_1_2_chunk + Filter: (ts < transaction_timestamp()) +(9 rows) -- this won't be parallel query because now() is parallel restricted in PG < 12 :PREFIX SELECT i FROM "test" WHERE ts < now(); - QUERY PLAN ------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: test - Chunks left after exclusion: 2 - -> Append - -> Seq Scan on _hyper_1_1_chunk - Filter: (ts < now()) - -> Seq Scan on _hyper_1_2_chunk - Filter: (ts < now()) -(8 rows) + QUERY PLAN +------------------------------------- + Custom Scan (ChunkAppend) on test + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_1_1_chunk + Filter: (ts < now()) + -> Seq Scan on _hyper_1_2_chunk + Filter: (ts < now()) +(6 rows) diff --git a/test/expected/plan_expand_hypertable-10.out b/test/expected/plan_expand_hypertable-10.out index 4a803f092..40c0262af 100644 --- a/test/expected/plan_expand_hypertable-10.out +++ b/test/expected/plan_expand_hypertable-10.out @@ -764,73 +764,38 @@ SELECT * FROM cte ORDER BY value; QUERY PLAN ------------------------------------------------------------------------------------------ Sort - Sort Key: _hyper_3_116_chunk.value - -> Append + Sort Key: hyper_ts.value + -> Custom Scan (ChunkAppend) on hyper_ts + Chunks excluded during startup: 6 -> Seq Scan on _hyper_3_116_chunk Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) -> Seq Scan on _hyper_3_117_chunk Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_118_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_119_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_120_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_121_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_122_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_123_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) -(19 rows) +(8 rows) :PREFIX SELECT * FROM hyper_ts WHERE time < ('Wed Dec 31 16:00:10 1969'::timestamp::timestamptz) ORDER BY value; QUERY PLAN ---------------------------------------------------------------------------------------------------------------------- Sort - Sort Key: _hyper_3_116_chunk.value - -> Append + Sort Key: hyper_ts.value + -> Custom Scan (ChunkAppend) on hyper_ts + Chunks excluded during startup: 6 -> Seq Scan on _hyper_3_116_chunk Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) -> Seq Scan on _hyper_3_117_chunk Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_118_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_119_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_120_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_121_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_122_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_123_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) -(19 rows) +(8 rows) :PREFIX SELECT * FROM hyper_ts WHERE NOW() < time ORDER BY value; - QUERY PLAN --------------------------------------------- + QUERY PLAN +--------------------------------------------- Sort - Sort Key: _hyper_3_116_chunk.value - -> Append - -> Seq Scan on _hyper_3_116_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_117_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_118_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_119_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_120_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_121_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_122_chunk - Filter: (now() < "time") + Sort Key: hyper_ts.value + -> Custom Scan (ChunkAppend) on hyper_ts + Chunks excluded during startup: 7 -> Seq Scan on _hyper_3_123_chunk Filter: (now() < "time") -(19 rows) +(6 rows) --joins :PREFIX SELECT * FROM hyper_ts WHERE tag_id IN (SELECT id FROM tag WHERE tag.id=1) and time < to_timestamp(10) and device_id = 'dev1' ORDER BY value; @@ -850,8 +815,8 @@ SELECT * FROM cte ORDER BY value; QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------- Sort - Sort Key: _hyper_3_116_chunk.value - -> Append + Sort Key: hyper_ts.value + -> Custom Scan (ChunkAppend) on hyper_ts -> Seq Scan on _hyper_3_116_chunk Filter: ((hashed SubPlan 1) OR (("time" < 'Wed Dec 31 16:00:10 1969 PST'::timestamp with time zone) AND (device_id = 'dev1'::text))) SubPlan 1 diff --git a/test/expected/plan_expand_hypertable-11.out b/test/expected/plan_expand_hypertable-11.out index 2a8b1b804..8c0cb222b 100644 --- a/test/expected/plan_expand_hypertable-11.out +++ b/test/expected/plan_expand_hypertable-11.out @@ -764,73 +764,38 @@ SELECT * FROM cte ORDER BY value; QUERY PLAN ------------------------------------------------------------------------------------------ Sort - Sort Key: _hyper_3_116_chunk.value - -> Append + Sort Key: hyper_ts.value + -> Custom Scan (ChunkAppend) on hyper_ts + Chunks excluded during startup: 6 -> Seq Scan on _hyper_3_116_chunk Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) -> Seq Scan on _hyper_3_117_chunk Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_118_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_119_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_120_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_121_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_122_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_123_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) -(19 rows) +(8 rows) :PREFIX SELECT * FROM hyper_ts WHERE time < ('Wed Dec 31 16:00:10 1969'::timestamp::timestamptz) ORDER BY value; QUERY PLAN ---------------------------------------------------------------------------------------------------------------------- Sort - Sort Key: _hyper_3_116_chunk.value - -> Append + Sort Key: hyper_ts.value + -> Custom Scan (ChunkAppend) on hyper_ts + Chunks excluded during startup: 6 -> Seq Scan on _hyper_3_116_chunk Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) -> Seq Scan on _hyper_3_117_chunk Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_118_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_119_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_120_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_121_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_122_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_123_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) -(19 rows) +(8 rows) :PREFIX SELECT * FROM hyper_ts WHERE NOW() < time ORDER BY value; - QUERY PLAN --------------------------------------------- + QUERY PLAN +--------------------------------------------- Sort - Sort Key: _hyper_3_116_chunk.value - -> Append - -> Seq Scan on _hyper_3_116_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_117_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_118_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_119_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_120_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_121_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_122_chunk - Filter: (now() < "time") + Sort Key: hyper_ts.value + -> Custom Scan (ChunkAppend) on hyper_ts + Chunks excluded during startup: 7 -> Seq Scan on _hyper_3_123_chunk Filter: (now() < "time") -(19 rows) +(6 rows) --joins :PREFIX SELECT * FROM hyper_ts WHERE tag_id IN (SELECT id FROM tag WHERE tag.id=1) and time < to_timestamp(10) and device_id = 'dev1' ORDER BY value; @@ -850,8 +815,8 @@ SELECT * FROM cte ORDER BY value; QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------- Sort - Sort Key: _hyper_3_116_chunk.value - -> Append + Sort Key: hyper_ts.value + -> Custom Scan (ChunkAppend) on hyper_ts -> Seq Scan on _hyper_3_116_chunk Filter: ((hashed SubPlan 1) OR (("time" < 'Wed Dec 31 16:00:10 1969 PST'::timestamp with time zone) AND (device_id = 'dev1'::text))) SubPlan 1 diff --git a/test/expected/plan_expand_hypertable-9.6.out b/test/expected/plan_expand_hypertable-9.6.out index f4bc9d381..6b62a63e2 100644 --- a/test/expected/plan_expand_hypertable-9.6.out +++ b/test/expected/plan_expand_hypertable-9.6.out @@ -764,73 +764,38 @@ SELECT * FROM cte ORDER BY value; QUERY PLAN ------------------------------------------------------------------------------------------ Sort - Sort Key: _hyper_3_116_chunk.value - -> Append + Sort Key: hyper_ts.value + -> Custom Scan (ChunkAppend) on hyper_ts + Chunks excluded during startup: 6 -> Seq Scan on _hyper_3_116_chunk Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) -> Seq Scan on _hyper_3_117_chunk Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_118_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_119_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_120_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_121_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_122_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) - -> Seq Scan on _hyper_3_123_chunk - Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone) -(19 rows) +(8 rows) :PREFIX SELECT * FROM hyper_ts WHERE time < ('Wed Dec 31 16:00:10 1969'::timestamp::timestamptz) ORDER BY value; QUERY PLAN ---------------------------------------------------------------------------------------------------------------------- Sort - Sort Key: _hyper_3_116_chunk.value - -> Append + Sort Key: hyper_ts.value + -> Custom Scan (ChunkAppend) on hyper_ts + Chunks excluded during startup: 6 -> Seq Scan on _hyper_3_116_chunk Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) -> Seq Scan on _hyper_3_117_chunk Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_118_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_119_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_120_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_121_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_122_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) - -> Seq Scan on _hyper_3_123_chunk - Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone) -(19 rows) +(8 rows) :PREFIX SELECT * FROM hyper_ts WHERE NOW() < time ORDER BY value; - QUERY PLAN --------------------------------------------- + QUERY PLAN +--------------------------------------------- Sort - Sort Key: _hyper_3_116_chunk.value - -> Append - -> Seq Scan on _hyper_3_116_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_117_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_118_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_119_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_120_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_121_chunk - Filter: (now() < "time") - -> Seq Scan on _hyper_3_122_chunk - Filter: (now() < "time") + Sort Key: hyper_ts.value + -> Custom Scan (ChunkAppend) on hyper_ts + Chunks excluded during startup: 7 -> Seq Scan on _hyper_3_123_chunk Filter: (now() < "time") -(19 rows) +(6 rows) --joins :PREFIX SELECT * FROM hyper_ts WHERE tag_id IN (SELECT id FROM tag WHERE tag.id=1) and time < to_timestamp(10) and device_id = 'dev1' ORDER BY value; @@ -850,8 +815,8 @@ SELECT * FROM cte ORDER BY value; QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------- Sort - Sort Key: _hyper_3_116_chunk.value - -> Append + Sort Key: hyper_ts.value + -> Custom Scan (ChunkAppend) on hyper_ts -> Seq Scan on _hyper_3_116_chunk Filter: ((hashed SubPlan 1) OR (("time" < 'Wed Dec 31 16:00:10 1969 PST'::timestamp with time zone) AND (device_id = 'dev1'::text))) SubPlan 1 diff --git a/test/expected/plan_ordered_append-10.out b/test/expected/plan_ordered_append-10.out index 2b8018908..4d6ed7ac4 100644 --- a/test/expected/plan_ordered_append-10.out +++ b/test/expected/plan_ordered_append-10.out @@ -1241,8 +1241,7 @@ LEFT OUTER JOIN LATERAL( -> Custom Scan (ChunkAppend) on ordered_append o (actual rows=0 loops=3) Order: o."time" DESC Chunks excluded during startup: 3 - Chunks excluded during runtime: 0 -(7 rows) +(6 rows) -- test CTE -- no chunk exclusion for CTE because cte query is not pulled up @@ -1321,13 +1320,14 @@ ORDER BY o1.time; FROM ordered_append WHERE time = (SELECT max(time) FROM ordered_append) ORDER BY time; QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------- - Append (actual rows=3 loops=1) + Custom Scan (ChunkAppend) on ordered_append (actual rows=3 loops=1) + Chunks excluded during runtime: 2 InitPlan 2 (returns $1) -> Result (actual rows=1 loops=1) InitPlan 1 (returns $0) -> Limit (actual rows=1 loops=1) - -> Custom Scan (ChunkAppend) on ordered_append (actual rows=1 loops=1) - Order: ordered_append."time" DESC + -> Custom Scan (ChunkAppend) on ordered_append ordered_append_1 (actual rows=1 loops=1) + Order: ordered_append_1."time" DESC -> Index Only Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk _hyper_1_3_chunk_1 (actual rows=1 loops=1) Index Cond: ("time" IS NOT NULL) Heap Fetches: 1 @@ -1337,13 +1337,13 @@ FROM ordered_append WHERE time = (SELECT max(time) FROM ordered_append) ORDER BY -> Index Only Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk _hyper_1_1_chunk_1 (never executed) Index Cond: ("time" IS NOT NULL) Heap Fetches: 0 - -> Index Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk (actual rows=0 loops=1) + -> Index Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk (never executed) Index Cond: ("time" = $1) - -> Index Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk (actual rows=0 loops=1) + -> Index Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk (never executed) Index Cond: ("time" = $1) -> Index Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk (actual rows=3 loops=1) Index Cond: ("time" = $1) -(22 rows) +(23 rows) -- test join against max query -- not ChunkAppend so no chunk exclusion @@ -1569,6 +1569,92 @@ LEFT OUTER JOIN LATERAL( -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed) (14 rows) +-- test JOIN on time column with USING +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 USING(time) ORDER BY o1.time LIMIT 100; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------- + Limit (actual rows=100 loops=1) + -> Merge Join (actual rows=100 loops=1) + Merge Cond: (o1."time" = o2."time") + -> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=34 loops=1) + Order: o1."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=34 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed) + -> Materialize (actual rows=100 loops=1) + -> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=34 loops=1) + Order: o2."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=34 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed) +(14 rows) + +-- test NATURAL JOIN on time column +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 NATURAL INNER JOIN ordered_append o2 ORDER BY o1.time LIMIT 100; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------- + Limit (actual rows=100 loops=1) + -> Merge Join (actual rows=100 loops=1) + Merge Cond: (o1."time" = o2."time") + Join Filter: ((o1.device_id = o2.device_id) AND (o1.value = o2.value)) + Rows Removed by Join Filter: 198 + -> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=100 loops=1) + Order: o1."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=100 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed) + -> Materialize (actual rows=298 loops=1) + -> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=100 loops=1) + Order: o2."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=100 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed) +(16 rows) + +-- test LEFT JOIN on time column +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 LEFT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o1.time LIMIT 100; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------- + Limit (actual rows=100 loops=1) + -> Merge Left Join (actual rows=100 loops=1) + Merge Cond: (o1."time" = o2."time") + -> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=34 loops=1) + Order: o1."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=34 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed) + -> Materialize (actual rows=100 loops=1) + -> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=34 loops=1) + Order: o2."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=34 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed) +(14 rows) + +-- test RIGHT JOIN on time column +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 RIGHT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o2.time LIMIT 100; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------- + Limit (actual rows=100 loops=1) + -> Merge Left Join (actual rows=100 loops=1) + Merge Cond: (o2."time" = o1."time") + -> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=34 loops=1) + Order: o2."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=34 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed) + -> Materialize (actual rows=100 loops=1) + -> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=34 loops=1) + Order: o1."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=34 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed) +(14 rows) + -- test JOIN on time column with ON clause expression order switched -- should use 2 ChunkAppend :PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 ON o2.time = o1.time ORDER BY o1.time LIMIT 100; diff --git a/test/expected/plan_ordered_append-11.out b/test/expected/plan_ordered_append-11.out index 6f900c809..5bc0184f9 100644 --- a/test/expected/plan_ordered_append-11.out +++ b/test/expected/plan_ordered_append-11.out @@ -1241,8 +1241,7 @@ LEFT OUTER JOIN LATERAL( -> Custom Scan (ChunkAppend) on ordered_append o (actual rows=0 loops=3) Order: o."time" DESC Chunks excluded during startup: 3 - Chunks excluded during runtime: 0 -(7 rows) +(6 rows) -- test CTE -- no chunk exclusion for CTE because cte query is not pulled up @@ -1321,13 +1320,14 @@ ORDER BY o1.time; FROM ordered_append WHERE time = (SELECT max(time) FROM ordered_append) ORDER BY time; QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------- - Append (actual rows=3 loops=1) + Custom Scan (ChunkAppend) on ordered_append (actual rows=3 loops=1) + Chunks excluded during runtime: 2 InitPlan 2 (returns $1) -> Result (actual rows=1 loops=1) InitPlan 1 (returns $0) -> Limit (actual rows=1 loops=1) - -> Custom Scan (ChunkAppend) on ordered_append (actual rows=1 loops=1) - Order: ordered_append."time" DESC + -> Custom Scan (ChunkAppend) on ordered_append ordered_append_1 (actual rows=1 loops=1) + Order: ordered_append_1."time" DESC -> Index Only Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk _hyper_1_3_chunk_1 (actual rows=1 loops=1) Index Cond: ("time" IS NOT NULL) Heap Fetches: 1 @@ -1337,13 +1337,13 @@ FROM ordered_append WHERE time = (SELECT max(time) FROM ordered_append) ORDER BY -> Index Only Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk _hyper_1_1_chunk_1 (never executed) Index Cond: ("time" IS NOT NULL) Heap Fetches: 0 - -> Index Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk (actual rows=0 loops=1) + -> Index Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk (never executed) Index Cond: ("time" = $1) - -> Index Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk (actual rows=0 loops=1) + -> Index Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk (never executed) Index Cond: ("time" = $1) -> Index Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk (actual rows=3 loops=1) Index Cond: ("time" = $1) -(22 rows) +(23 rows) -- test join against max query -- not ChunkAppend so no chunk exclusion @@ -1569,6 +1569,92 @@ LEFT OUTER JOIN LATERAL( -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed) (14 rows) +-- test JOIN on time column with USING +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 USING(time) ORDER BY o1.time LIMIT 100; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------- + Limit (actual rows=100 loops=1) + -> Merge Join (actual rows=100 loops=1) + Merge Cond: (o1."time" = o2."time") + -> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=34 loops=1) + Order: o1."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=34 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed) + -> Materialize (actual rows=100 loops=1) + -> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=34 loops=1) + Order: o2."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=34 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed) +(14 rows) + +-- test NATURAL JOIN on time column +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 NATURAL INNER JOIN ordered_append o2 ORDER BY o1.time LIMIT 100; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------- + Limit (actual rows=100 loops=1) + -> Merge Join (actual rows=100 loops=1) + Merge Cond: (o1."time" = o2."time") + Join Filter: ((o1.device_id = o2.device_id) AND (o1.value = o2.value)) + Rows Removed by Join Filter: 198 + -> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=100 loops=1) + Order: o1."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=100 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed) + -> Materialize (actual rows=298 loops=1) + -> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=100 loops=1) + Order: o2."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=100 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed) +(16 rows) + +-- test LEFT JOIN on time column +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 LEFT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o1.time LIMIT 100; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------- + Limit (actual rows=100 loops=1) + -> Merge Left Join (actual rows=100 loops=1) + Merge Cond: (o1."time" = o2."time") + -> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=34 loops=1) + Order: o1."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=34 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed) + -> Materialize (actual rows=100 loops=1) + -> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=34 loops=1) + Order: o2."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=34 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed) +(14 rows) + +-- test RIGHT JOIN on time column +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 RIGHT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o2.time LIMIT 100; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------- + Limit (actual rows=100 loops=1) + -> Merge Left Join (actual rows=100 loops=1) + Merge Cond: (o2."time" = o1."time") + -> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=34 loops=1) + Order: o2."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=34 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed) + -> Materialize (actual rows=100 loops=1) + -> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=34 loops=1) + Order: o1."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=34 loops=1) + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed) + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed) +(14 rows) + -- test JOIN on time column with ON clause expression order switched -- should use 2 ChunkAppend :PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 ON o2.time = o1.time ORDER BY o1.time LIMIT 100; diff --git a/test/expected/plan_ordered_append-9.6.out b/test/expected/plan_ordered_append-9.6.out index b59bf7366..29da94ab0 100644 --- a/test/expected/plan_ordered_append-9.6.out +++ b/test/expected/plan_ordered_append-9.6.out @@ -1129,14 +1129,13 @@ LEFT OUTER JOIN LATERAL( -> Custom Scan (ChunkAppend) on ordered_append o Order: o."time" DESC Chunks excluded during startup: 0 - Chunks excluded during runtime: 3 -> Index Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o_1 Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval))) -> Index Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o_2 Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval))) -> Index Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o_3 Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval))) -(13 rows) +(12 rows) -- test LATERAL with correlated query -- only 2nd chunk should be executed @@ -1154,14 +1153,13 @@ LEFT OUTER JOIN LATERAL( -> Custom Scan (ChunkAppend) on ordered_append o Order: o."time" Chunks excluded during startup: 0 - Chunks excluded during runtime: 3 -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o_1 Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval))) -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o_2 Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval))) -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o_3 Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval))) -(13 rows) +(12 rows) -- test startup and runtime exclusion together :PREFIX SELECT * @@ -1178,14 +1176,13 @@ LEFT OUTER JOIN LATERAL( -> Custom Scan (ChunkAppend) on ordered_append o Order: o."time" DESC Chunks excluded during startup: 0 - Chunks excluded during runtime: 3 -> Index Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o_1 Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval)) AND ("time" < now())) -> Index Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o_2 Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval)) AND ("time" < now())) -> Index Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o_3 Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval)) AND ("time" < now())) -(13 rows) +(12 rows) -- test startup and runtime exclusion together -- all chunks should be filtered @@ -1203,8 +1200,7 @@ LEFT OUTER JOIN LATERAL( -> Custom Scan (ChunkAppend) on ordered_append o Order: o."time" DESC Chunks excluded during startup: 3 - Chunks excluded during runtime: 0 -(7 rows) +(6 rows) -- test CTE -- no chunk exclusion for CTE because cte query is not pulled up @@ -1283,13 +1279,13 @@ ORDER BY o1.time; FROM ordered_append WHERE time = (SELECT max(time) FROM ordered_append) ORDER BY time; QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------- - Append + Custom Scan (ChunkAppend) on ordered_append InitPlan 2 (returns $1) -> Result InitPlan 1 (returns $0) -> Limit - -> Custom Scan (ChunkAppend) on ordered_append - Order: ordered_append."time" DESC + -> Custom Scan (ChunkAppend) on ordered_append ordered_append_1 + Order: ordered_append_1."time" DESC -> Index Only Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk _hyper_1_3_chunk_1 Index Cond: ("time" IS NOT NULL) -> Index Only Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk _hyper_1_2_chunk_1 @@ -1524,6 +1520,91 @@ LEFT OUTER JOIN LATERAL( -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (14 rows) +-- test JOIN on time column with USING +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 USING(time) ORDER BY o1.time LIMIT 100; + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------- + Limit + -> Merge Join + Merge Cond: (o1."time" = o2."time") + -> Custom Scan (ChunkAppend) on ordered_append o1 + Order: o1."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 + -> Materialize + -> Custom Scan (ChunkAppend) on ordered_append o2 + Order: o2."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 +(14 rows) + +-- test NATURAL JOIN on time column +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 NATURAL INNER JOIN ordered_append o2 ORDER BY o1.time LIMIT 100; + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------- + Limit + -> Merge Join + Merge Cond: (o1."time" = o2."time") + Join Filter: ((o1.device_id = o2.device_id) AND (o1.value = o2.value)) + -> Custom Scan (ChunkAppend) on ordered_append o1 + Order: o1."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 + -> Materialize + -> Custom Scan (ChunkAppend) on ordered_append o2 + Order: o2."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 +(15 rows) + +-- test LEFT JOIN on time column +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 LEFT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o1.time LIMIT 100; + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------- + Limit + -> Merge Left Join + Merge Cond: (o1."time" = o2."time") + -> Custom Scan (ChunkAppend) on ordered_append o1 + Order: o1."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 + -> Materialize + -> Custom Scan (ChunkAppend) on ordered_append o2 + Order: o2."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 +(14 rows) + +-- test RIGHT JOIN on time column +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 RIGHT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o2.time LIMIT 100; + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------- + Limit + -> Merge Left Join + Merge Cond: (o2."time" = o1."time") + -> Custom Scan (ChunkAppend) on ordered_append o2 + Order: o2."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 + -> Materialize + -> Custom Scan (ChunkAppend) on ordered_append o1 + Order: o1."time" + -> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 + -> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 + -> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 +(14 rows) + -- test JOIN on time column with ON clause expression order switched -- should use 2 ChunkAppend :PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 ON o2.time = o1.time ORDER BY o1.time LIMIT 100; diff --git a/test/expected/rowsecurity-10.out b/test/expected/rowsecurity-10.out index 1099b72d1..39da224bd 100644 --- a/test/expected/rowsecurity-10.out +++ b/test/expected/rowsecurity-10.out @@ -262,15 +262,37 @@ SELECT * FROM document TABLESAMPLE BERNOULLI(50) REPEATABLE(0) (0 rows) EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle); - QUERY PLAN ------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 6 + QUERY PLAN +----------------------------------------------------- + Custom Scan (ChunkAppend) on document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append + -> Seq Scan on _hyper_1_1_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_2_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_3_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_4_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_5_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_6_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) +(17 rows) + +EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle); + QUERY PLAN +----------------------------------------------------------- + Hash Join + Hash Cond: (document.cid = category.cid) + InitPlan 1 (returns $0) + -> Index Scan using uaccount_pkey on uaccount + Index Cond: (pguser = CURRENT_USER) + -> Custom Scan (ChunkAppend) on document + Chunks excluded during startup: 0 -> Seq Scan on _hyper_1_1_chunk Filter: ((dlevel <= $0) AND f_leak(dtitle)) -> Seq Scan on _hyper_1_2_chunk @@ -283,35 +305,9 @@ EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle); Filter: ((dlevel <= $0) AND f_leak(dtitle)) -> Seq Scan on _hyper_1_6_chunk Filter: ((dlevel <= $0) AND f_leak(dtitle)) -(19 rows) - -EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle); - QUERY PLAN ------------------------------------------------------------------ - Hash Join - Hash Cond: (document.cid = category.cid) - InitPlan 1 (returns $0) - -> Index Scan using uaccount_pkey on uaccount - Index Cond: (pguser = CURRENT_USER) - -> Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 6 - -> Append - -> Seq Scan on _hyper_1_1_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_2_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_3_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_4_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_5_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_6_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) -> Hash -> Seq Scan on category -(23 rows) +(21 rows) -- viewpoint from regress_rls_dave SET SESSION AUTHORIZATION regress_rls_dave; @@ -354,32 +350,30 @@ NOTICE: f_leak => awesome technology book (7 rows) EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle); - QUERY PLAN ----------------------------------------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 6 + QUERY PLAN +---------------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append - -> Seq Scan on _hyper_1_1_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_2_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_3_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_4_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_5_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_6_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) -(19 rows) + -> Seq Scan on _hyper_1_1_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_2_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_3_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_4_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_5_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_6_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) +(17 rows) EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle); - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------- Hash Join Hash Cond: (category.cid = document.cid) InitPlan 1 (returns $0) @@ -387,23 +381,21 @@ EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dt Index Cond: (pguser = CURRENT_USER) -> Seq Scan on category -> Hash - -> Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 6 - -> Append - -> Seq Scan on _hyper_1_1_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_2_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_3_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_4_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_5_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_6_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) -(23 rows) + -> Custom Scan (ChunkAppend) on document + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_1_1_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_2_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_3_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_4_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_5_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_6_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) +(21 rows) -- 44 would technically fail for both p2r and p1r, but we should get an error -- back from p1r for this because it sorts first @@ -476,12 +468,30 @@ NOTICE: f_leak => great manga (3 rows) EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle); + QUERY PLAN +--------------------------------------------------------------- + Custom Scan (ChunkAppend) on document + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_1_1_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_2_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_3_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_4_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_5_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_6_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) +(14 rows) + +EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle); QUERY PLAN --------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 6 - -> Append + Nested Loop + -> Custom Scan (ChunkAppend) on document + Chunks excluded during startup: 0 -> Seq Scan on _hyper_1_1_chunk Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) -> Seq Scan on _hyper_1_2_chunk @@ -494,31 +504,9 @@ EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle); Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) -> Seq Scan on _hyper_1_6_chunk Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) -(16 rows) - -EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle); - QUERY PLAN ---------------------------------------------------------------------------- - Nested Loop - -> Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 6 - -> Append - -> Seq Scan on _hyper_1_1_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_2_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_3_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_4_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_5_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_6_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) -> Index Scan using category_pkey on category Index Cond: (cid = document.cid) -(19 rows) +(17 rows) -- interaction of FK/PK constraints SET SESSION AUTHORIZATION regress_rls_alice; @@ -1074,28 +1062,26 @@ NOTICE: f_leak => awesome science fiction (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN ------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: hyper_document - Chunks left after exclusion: 6 + QUERY PLAN +----------------------------------------------------- + Custom Scan (ChunkAppend) on hyper_document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append - -> Seq Scan on _hyper_2_9_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_10_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_11_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_12_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_13_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_14_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) -(19 rows) + -> Seq Scan on _hyper_2_9_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_10_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_11_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_12_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_13_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_14_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) +(17 rows) -- viewpoint from regress_rls_carol SET SESSION AUTHORIZATION regress_rls_carol; @@ -1125,28 +1111,26 @@ NOTICE: f_leak => awesome technology book (10 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN ------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: hyper_document - Chunks left after exclusion: 6 + QUERY PLAN +----------------------------------------------------- + Custom Scan (ChunkAppend) on hyper_document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append - -> Seq Scan on _hyper_2_9_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_10_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_11_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_12_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_13_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_14_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) -(19 rows) + -> Seq Scan on _hyper_2_9_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_10_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_11_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_12_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_13_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_14_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) +(17 rows) -- viewpoint from regress_rls_dave SET SESSION AUTHORIZATION regress_rls_dave; @@ -1164,28 +1148,26 @@ NOTICE: f_leak => awesome science fiction (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN --------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: hyper_document - Chunks left after exclusion: 6 + QUERY PLAN +-------------------------------------------------------------------- + Custom Scan (ChunkAppend) on hyper_document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append - -> Seq Scan on _hyper_2_9_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_10_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_11_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_12_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_13_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_14_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) -(19 rows) + -> Seq Scan on _hyper_2_9_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_10_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_11_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_12_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_13_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_14_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) +(17 rows) -- pp1 ERROR INSERT INTO hyper_document VALUES (1, 11, 5, 'regress_rls_dave', 'testing pp1'); -- fail @@ -1254,28 +1236,26 @@ NOTICE: f_leak => awesome science fiction (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN --------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: hyper_document - Chunks left after exclusion: 6 + QUERY PLAN +-------------------------------------------------------------------- + Custom Scan (ChunkAppend) on hyper_document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append - -> Seq Scan on _hyper_2_9_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_10_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_11_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_12_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_13_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_14_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) -(19 rows) + -> Seq Scan on _hyper_2_9_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_10_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_11_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_12_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_13_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_14_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) +(17 rows) -- viewpoint from regress_rls_carol SET SESSION AUTHORIZATION regress_rls_carol; @@ -1307,28 +1287,26 @@ NOTICE: f_leak => awesome technology book (11 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN ------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: hyper_document - Chunks left after exclusion: 6 + QUERY PLAN +----------------------------------------------------- + Custom Scan (ChunkAppend) on hyper_document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append - -> Seq Scan on _hyper_2_9_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_10_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_11_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_12_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_13_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_14_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) -(19 rows) + -> Seq Scan on _hyper_2_9_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_10_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_11_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_12_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_13_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_14_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) +(17 rows) -- only owner can change policies ALTER POLICY pp1 ON hyper_document USING (true); --fail @@ -1368,25 +1346,23 @@ NOTICE: f_leak => great satire (3 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN ---------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: hyper_document - Chunks left after exclusion: 6 - -> Append - -> Seq Scan on _hyper_2_9_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_10_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_11_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_12_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_13_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_14_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) -(16 rows) + QUERY PLAN +--------------------------------------------------------------- + Custom Scan (ChunkAppend) on hyper_document + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_2_9_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_10_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_11_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_12_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_13_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_14_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) +(14 rows) -- database superuser does bypass RLS policy when enabled RESET SESSION AUTHORIZATION; @@ -1663,51 +1639,49 @@ NOTICE: f_leak => 1679091c5a880faf6fb5e6087eb1b2dc (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM s1 WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: s1 - Chunks left after exclusion: 11 - -> Append - -> Seq Scan on _hyper_6_16_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - SubPlan 1 - -> Append - -> Seq Scan on _hyper_7_27_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_28_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_29_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_30_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_31_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_32_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_33_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_6_17_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_18_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_19_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_20_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_21_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_22_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_23_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_24_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_25_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_26_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) -(42 rows) + QUERY PLAN +----------------------------------------------------------------------- + Custom Scan (ChunkAppend) on s1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_6_16_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + SubPlan 1 + -> Append + -> Seq Scan on _hyper_7_27_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_28_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_29_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_30_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_31_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_32_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_33_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_6_17_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_18_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_19_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_20_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_21_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_22_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_23_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_24_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_25_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_26_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) +(40 rows) SELECT (SELECT x FROM s1 LIMIT 1) xx, * FROM s2 WHERE y like '%28%'; xx | x | y @@ -1718,8 +1692,8 @@ SELECT (SELECT x FROM s1 LIMIT 1) xx, * FROM s2 WHERE y like '%28%'; (3 rows) EXPLAIN (COSTS OFF) SELECT (SELECT x FROM s1 LIMIT 1) xx, * FROM s2 WHERE y like '%28%'; - QUERY PLAN -------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------- Result -> Append -> Seq Scan on _hyper_7_27_chunk @@ -1738,47 +1712,46 @@ EXPLAIN (COSTS OFF) SELECT (SELECT x FROM s1 LIMIT 1) xx, * FROM s2 WHERE y like Filter: (((x % 2) = 0) AND (y ~~ '%28%'::text)) SubPlan 2 -> Limit - -> Result - -> Append - -> Seq Scan on _hyper_6_16_chunk - Filter: (hashed SubPlan 1) - SubPlan 1 - -> Append - -> Seq Scan on _hyper_7_27_chunk _hyper_7_27_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_28_chunk _hyper_7_28_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_29_chunk _hyper_7_29_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_30_chunk _hyper_7_30_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_31_chunk _hyper_7_31_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_32_chunk _hyper_7_32_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_33_chunk _hyper_7_33_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_6_17_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_18_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_19_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_20_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_21_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_22_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_23_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_24_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_25_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_26_chunk - Filter: (hashed SubPlan 1) -(58 rows) + -> Custom Scan (ChunkAppend) on s1 + -> Seq Scan on _hyper_6_16_chunk + Filter: (hashed SubPlan 1) + SubPlan 1 + -> Append + -> Seq Scan on _hyper_7_27_chunk _hyper_7_27_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_28_chunk _hyper_7_28_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_29_chunk _hyper_7_29_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_30_chunk _hyper_7_30_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_31_chunk _hyper_7_31_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_32_chunk _hyper_7_32_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_33_chunk _hyper_7_33_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_6_17_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_18_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_19_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_20_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_21_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_22_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_23_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_24_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_25_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_26_chunk + Filter: (hashed SubPlan 1) +(57 rows) SET SESSION AUTHORIZATION regress_rls_alice; ALTER POLICY p2 ON s2 USING (x in (select a from s1 where b like '%d2%')); @@ -2504,75 +2477,67 @@ NOTICE: f_leak => dad (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM z1 WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(10 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(8 rows) PREPARE plancache_test AS SELECT * FROM z1 WHERE f_leak(b); EXPLAIN (COSTS OFF) EXECUTE plancache_test; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(10 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(8 rows) PREPARE plancache_test2 AS WITH q AS (SELECT * FROM z1 WHERE f_leak(b)) SELECT * FROM q,z2; EXPLAIN (COSTS OFF) EXECUTE plancache_test2; - QUERY PLAN -------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------- Result One-Time Filter: false CTE q - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(13 rows) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(11 rows) PREPARE plancache_test3 AS WITH q AS (SELECT * FROM z2) SELECT * FROM q,z1 WHERE f_leak(z1.b); EXPLAIN (COSTS OFF) EXECUTE plancache_test3; - QUERY PLAN ------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------- Nested Loop CTE q -> Result One-Time Filter: false -> CTE Scan on q - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(15 rows) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(13 rows) SET ROLE regress_rls_group1; SELECT * FROM z1 WHERE f_leak(b); @@ -2585,72 +2550,64 @@ NOTICE: f_leak => dad (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM z1 WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(10 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(8 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(10 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(8 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test2; - QUERY PLAN -------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------- Result One-Time Filter: false CTE q - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(13 rows) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(11 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test3; - QUERY PLAN ------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------- Nested Loop CTE q -> Result One-Time Filter: false -> CTE Scan on q - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(15 rows) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(13 rows) SET SESSION AUTHORIZATION regress_rls_carol; SELECT * FROM z1 WHERE f_leak(b); @@ -2663,72 +2620,64 @@ NOTICE: f_leak => ccc (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM z1 WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(10 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(8 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(10 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(8 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test2; - QUERY PLAN -------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------- Result One-Time Filter: false CTE q - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(13 rows) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(11 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test3; - QUERY PLAN ------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------- Nested Loop CTE q -> Result One-Time Filter: false -> CTE Scan on q - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(15 rows) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(13 rows) SET ROLE regress_rls_group2; SELECT * FROM z1 WHERE f_leak(b); @@ -2741,72 +2690,64 @@ NOTICE: f_leak => ccc (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM z1 WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(10 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(8 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(10 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(8 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test2; - QUERY PLAN -------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------- Result One-Time Filter: false CTE q - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(13 rows) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(11 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test3; - QUERY PLAN ------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------- Nested Loop CTE q -> Result One-Time Filter: false -> CTE Scan on q - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(15 rows) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(13 rows) -- -- Views should follow policy for view owner. @@ -2831,19 +2772,17 @@ NOTICE: f_leak => dad (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM rls_view; - QUERY PLAN -------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: f_leak(b) - -> Seq Scan on _hyper_9_50_chunk - Filter: f_leak(b) - -> Seq Scan on _hyper_9_51_chunk - Filter: f_leak(b) -(10 rows) + QUERY PLAN +------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: f_leak(b) + -> Seq Scan on _hyper_9_50_chunk + Filter: f_leak(b) + -> Seq Scan on _hyper_9_51_chunk + Filter: f_leak(b) +(8 rows) -- Query as view/table owner. Should return all records. SET SESSION AUTHORIZATION regress_rls_alice; @@ -2861,19 +2800,17 @@ NOTICE: f_leak => dad (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM rls_view; - QUERY PLAN -------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: f_leak(b) - -> Seq Scan on _hyper_9_50_chunk - Filter: f_leak(b) - -> Seq Scan on _hyper_9_51_chunk - Filter: f_leak(b) -(10 rows) + QUERY PLAN +------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: f_leak(b) + -> Seq Scan on _hyper_9_50_chunk + Filter: f_leak(b) + -> Seq Scan on _hyper_9_51_chunk + Filter: f_leak(b) +(8 rows) DROP VIEW rls_view; -- View and Table owners are different. @@ -2893,19 +2830,17 @@ NOTICE: f_leak => dad (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM rls_view; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(10 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(8 rows) -- Query as role that is not owner of table but is owner of view. -- Should return records based on view owner policies. @@ -2920,19 +2855,17 @@ NOTICE: f_leak => dad (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM rls_view; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(10 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(8 rows) -- Query as role that is not the owner of the table or view without permissions. SET SESSION AUTHORIZATION regress_rls_carol; @@ -2953,19 +2886,17 @@ NOTICE: f_leak => dad (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM rls_view; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(10 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(8 rows) SET SESSION AUTHORIZATION regress_rls_bob; DROP VIEW rls_view; @@ -3124,16 +3055,14 @@ SET SESSION AUTHORIZATION regress_rls_alice; CREATE VIEW rls_sbv WITH (security_barrier) AS SELECT * FROM y1 WHERE f_leak(b); EXPLAIN (COSTS OFF) SELECT * FROM rls_sbv WHERE (a = 1); - QUERY PLAN --------------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: y1 - Chunks left after exclusion: 1 - -> Append - -> Index Scan using _hyper_12_57_chunk_y1_a_idx on _hyper_12_57_chunk - Index Cond: (a = 1) - Filter: f_leak(b) -(7 rows) + QUERY PLAN +-------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on y1 + Chunks excluded during startup: 0 + -> Index Scan using _hyper_12_57_chunk_y1_a_idx on _hyper_12_57_chunk + Index Cond: (a = 1) + Filter: f_leak(b) +(5 rows) DROP VIEW rls_sbv; -- Create view as role that does not own table. RLS should be applied. @@ -3141,16 +3070,14 @@ SET SESSION AUTHORIZATION regress_rls_bob; CREATE VIEW rls_sbv WITH (security_barrier) AS SELECT * FROM y1 WHERE f_leak(b); EXPLAIN (COSTS OFF) SELECT * FROM rls_sbv WHERE (a = 1); - QUERY PLAN --------------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: y1 - Chunks left after exclusion: 1 - -> Append - -> Index Scan using _hyper_12_57_chunk_y1_a_idx on _hyper_12_57_chunk - Index Cond: (a = 1) - Filter: (((a > 2) OR ((a % 2) = 0)) AND f_leak(b)) -(7 rows) + QUERY PLAN +-------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on y1 + Chunks excluded during startup: 0 + -> Index Scan using _hyper_12_57_chunk_y1_a_idx on _hyper_12_57_chunk + Index Cond: (a = 1) + Filter: (((a > 2) OR ((a % 2) = 0)) AND f_leak(b)) +(5 rows) DROP VIEW rls_sbv; -- @@ -3195,35 +3122,33 @@ NOTICE: f_leak => 98f13708210194c475687be6106a3b84 (14 rows) EXPLAIN (COSTS OFF) SELECT * FROM y2 WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: y2 - Chunks left after exclusion: 11 - -> Append - -> Seq Scan on _hyper_13_58_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_59_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_60_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_61_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_62_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_63_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_64_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_65_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_66_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_67_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_68_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) -(26 rows) + QUERY PLAN +----------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on y2 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_13_58_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_59_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_60_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_61_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_62_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_63_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_64_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_65_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_66_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_67_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_68_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) +(24 rows) -- -- Qual push-down of leaky functions, when not referring to table @@ -3269,35 +3194,33 @@ NOTICE: f_leak => abc (14 rows) EXPLAIN (COSTS OFF) SELECT * FROM y2 WHERE f_leak('abc'); - QUERY PLAN ---------------------------------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: y2 - Chunks left after exclusion: 11 - -> Append - -> Seq Scan on _hyper_13_58_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_59_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_60_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_61_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_62_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_63_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_64_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_65_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_66_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_67_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_68_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) -(26 rows) + QUERY PLAN +--------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on y2 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_13_58_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_59_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_60_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_61_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_62_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_63_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_64_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_65_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_66_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_67_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_68_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) +(24 rows) CREATE TABLE test_qual_pushdown ( abc text @@ -3363,39 +3286,37 @@ NOTICE: f_leak => 98f13708210194c475687be6106a3b84 (0 rows) EXPLAIN (COSTS OFF) SELECT * FROM y2 JOIN test_qual_pushdown ON (b = abc) WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------- Hash Join Hash Cond: (test_qual_pushdown.abc = y2.b) -> Seq Scan on test_qual_pushdown -> Hash - -> Custom Scan (ConstraintAwareAppend) - Hypertable: y2 - Chunks left after exclusion: 11 - -> Append - -> Seq Scan on _hyper_13_58_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_59_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_60_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_61_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_62_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_63_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_64_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_65_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_66_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_67_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_68_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) -(30 rows) + -> Custom Scan (ChunkAppend) on y2 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_13_58_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_59_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_60_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_61_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_62_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_63_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_64_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_65_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_66_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_67_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_68_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) +(28 rows) DROP TABLE test_qual_pushdown; -- @@ -3497,37 +3418,35 @@ NOTICE: f_leak => 98f13708210194c475687be6106a3b84 (11 rows) EXPLAIN (COSTS OFF) WITH cte1 AS (SELECT * FROM t1 WHERE f_leak(b)) SELECT * FROM cte1; - QUERY PLAN -------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------- CTE Scan on cte1 CTE cte1 - -> Custom Scan (ConstraintAwareAppend) - Hypertable: t1 - Chunks left after exclusion: 11 - -> Append - -> Seq Scan on _hyper_15_69_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_70_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_71_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_72_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_73_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_74_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_75_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_76_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_77_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_78_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_79_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(28 rows) + -> Custom Scan (ChunkAppend) on t1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_15_69_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_70_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_71_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_72_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_73_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_74_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_75_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_76_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_77_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_78_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_79_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(26 rows) WITH cte1 AS (UPDATE t1 SET a = a + 1 RETURNING *) SELECT * FROM cte1; --fail ERROR: new row violates row-level security policy for table "t1" @@ -4073,6 +3992,9 @@ DROP TABLE copy_t; DROP TABLE copy_rel_to CASCADE; -- Check WHERE CURRENT OF SET SESSION AUTHORIZATION regress_rls_alice; +-- WHERE CURRENT OF does not work with custom scan nodes +-- so we have to disable chunk append here +SET timescaledb.disable_optimizations TO true; CREATE TABLE current_check (currentid int, payload text, rlsuser text); SELECT public.create_hypertable('current_check', 'currentid', chunk_time_interval=>10); NOTICE: adding not-null constraint to column "currentid" @@ -4119,28 +4041,76 @@ FETCH ABSOLUTE 1 FROM current_check_cursor; -- Still cannot UPDATE row 2 through cursor UPDATE current_check SET payload = payload || '_new' WHERE CURRENT OF current_check_cursor RETURNING *; -ERROR: cursor "current_check_cursor" is not a simply updatable scan of table "current_check" + currentid | payload | rlsuser +-----------+---------+--------- +(0 rows) + -- Can update row 4 through cursor, which is the next visible row FETCH RELATIVE 1 FROM current_check_cursor; -ERROR: current transaction is aborted, commands ignored until end of transaction block + currentid | payload | rlsuser +-----------+---------+----------------- + 4 | def | regress_rls_bob +(1 row) + UPDATE current_check SET payload = payload || '_new' WHERE CURRENT OF current_check_cursor RETURNING *; -ERROR: current transaction is aborted, commands ignored until end of transaction block + currentid | payload | rlsuser +-----------+---------+----------------- + 4 | def_new | regress_rls_bob +(1 row) + SELECT * FROM current_check; -ERROR: current transaction is aborted, commands ignored until end of transaction block + currentid | payload | rlsuser +-----------+---------+----------------- + 2 | bcd | regress_rls_bob + 4 | def_new | regress_rls_bob +(2 rows) + -- Plan should be a subquery TID scan EXPLAIN (COSTS OFF) UPDATE current_check SET payload = payload WHERE CURRENT OF current_check_cursor; -ERROR: current transaction is aborted, commands ignored until end of transaction block + QUERY PLAN +------------------------------------------------------------- + Update on current_check + Update on current_check + Update on _hyper_21_104_chunk + -> Tid Scan on current_check + TID Cond: CURRENT OF current_check_cursor + Filter: ((currentid = 4) AND ((currentid % 2) = 0)) + -> Tid Scan on _hyper_21_104_chunk + TID Cond: CURRENT OF current_check_cursor + Filter: ((currentid = 4) AND ((currentid % 2) = 0)) +(9 rows) + -- Similarly can only delete row 4 FETCH ABSOLUTE 1 FROM current_check_cursor; -ERROR: current transaction is aborted, commands ignored until end of transaction block + currentid | payload | rlsuser +-----------+---------+----------------- + 2 | bcd | regress_rls_bob +(1 row) + DELETE FROM current_check WHERE CURRENT OF current_check_cursor RETURNING *; -ERROR: current transaction is aborted, commands ignored until end of transaction block + currentid | payload | rlsuser +-----------+---------+--------- +(0 rows) + FETCH RELATIVE 1 FROM current_check_cursor; -ERROR: current transaction is aborted, commands ignored until end of transaction block + currentid | payload | rlsuser +-----------+---------+----------------- + 4 | def | regress_rls_bob +(1 row) + DELETE FROM current_check WHERE CURRENT OF current_check_cursor RETURNING *; -ERROR: current transaction is aborted, commands ignored until end of transaction block + currentid | payload | rlsuser +-----------+---------+----------------- + 4 | def_new | regress_rls_bob +(1 row) + SELECT * FROM current_check; -ERROR: current transaction is aborted, commands ignored until end of transaction block + currentid | payload | rlsuser +-----------+---------+----------------- + 2 | bcd | regress_rls_bob +(1 row) + +RESET timescaledb.disable_optimizations; COMMIT; -- -- check pg_stats view filtering diff --git a/test/expected/rowsecurity-11.out b/test/expected/rowsecurity-11.out index 861bd937e..c21cb35f4 100644 --- a/test/expected/rowsecurity-11.out +++ b/test/expected/rowsecurity-11.out @@ -263,15 +263,39 @@ SELECT * FROM document TABLESAMPLE BERNOULLI(50) REPEATABLE(0) (0 rows) EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle); - QUERY PLAN ------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 7 + QUERY PLAN +----------------------------------------------------- + Custom Scan (ChunkAppend) on document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append + -> Seq Scan on document document_1 + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_1_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_2_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_3_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_4_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_5_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_6_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) +(19 rows) + +EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle); + QUERY PLAN +----------------------------------------------------------- + Hash Join + Hash Cond: (document.cid = category.cid) + InitPlan 1 (returns $0) + -> Index Scan using uaccount_pkey on uaccount + Index Cond: (pguser = CURRENT_USER) + -> Custom Scan (ChunkAppend) on document + Chunks excluded during startup: 0 -> Seq Scan on document document_1 Filter: ((dlevel <= $0) AND f_leak(dtitle)) -> Seq Scan on _hyper_1_1_chunk @@ -286,37 +310,9 @@ EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle); Filter: ((dlevel <= $0) AND f_leak(dtitle)) -> Seq Scan on _hyper_1_6_chunk Filter: ((dlevel <= $0) AND f_leak(dtitle)) -(21 rows) - -EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle); - QUERY PLAN ------------------------------------------------------------------ - Hash Join - Hash Cond: (document.cid = category.cid) - InitPlan 1 (returns $0) - -> Index Scan using uaccount_pkey on uaccount - Index Cond: (pguser = CURRENT_USER) - -> Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 7 - -> Append - -> Seq Scan on document document_1 - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_1_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_2_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_3_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_4_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_5_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_6_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) -> Hash -> Seq Scan on category -(25 rows) +(23 rows) -- viewpoint from regress_rls_dave SET SESSION AUTHORIZATION regress_rls_dave; @@ -359,34 +355,32 @@ NOTICE: f_leak => awesome technology book (7 rows) EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle); - QUERY PLAN ----------------------------------------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 7 + QUERY PLAN +---------------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append - -> Seq Scan on document document_1 - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_1_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_2_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_3_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_4_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_5_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_6_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) -(21 rows) + -> Seq Scan on document document_1 + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_1_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_2_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_3_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_4_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_5_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_6_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) +(19 rows) EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle); - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------- Hash Join Hash Cond: (category.cid = document.cid) InitPlan 1 (returns $0) @@ -394,25 +388,23 @@ EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dt Index Cond: (pguser = CURRENT_USER) -> Seq Scan on category -> Hash - -> Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 7 - -> Append - -> Seq Scan on document document_1 - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_1_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_2_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_3_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_4_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_5_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_6_chunk - Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) -(25 rows) + -> Custom Scan (ChunkAppend) on document + Chunks excluded during startup: 0 + -> Seq Scan on document document_1 + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_1_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_2_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_3_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_4_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_5_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_6_chunk + Filter: ((cid <> 44) AND (cid <> 44) AND (cid < 50) AND (dlevel <= $0) AND f_leak(dtitle)) +(23 rows) -- 44 would technically fail for both p2r and p1r, but we should get an error -- back from p1r for this because it sorts first @@ -485,12 +477,32 @@ NOTICE: f_leak => great manga (3 rows) EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle); + QUERY PLAN +--------------------------------------------------------------- + Custom Scan (ChunkAppend) on document + Chunks excluded during startup: 0 + -> Seq Scan on document document_1 + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_1_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_2_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_3_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_4_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_5_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_1_6_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) +(16 rows) + +EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle); QUERY PLAN --------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 7 - -> Append + Nested Loop + -> Custom Scan (ChunkAppend) on document + Chunks excluded during startup: 0 -> Seq Scan on document document_1 Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) -> Seq Scan on _hyper_1_1_chunk @@ -505,33 +517,9 @@ EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle); Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) -> Seq Scan on _hyper_1_6_chunk Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) -(18 rows) - -EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle); - QUERY PLAN ---------------------------------------------------------------------------- - Nested Loop - -> Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 7 - -> Append - -> Seq Scan on document document_1 - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_1_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_2_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_3_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_4_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_5_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_1_6_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) -> Index Scan using category_pkey on category Index Cond: (cid = document.cid) -(21 rows) +(19 rows) -- interaction of FK/PK constraints SET SESSION AUTHORIZATION regress_rls_alice; @@ -1087,30 +1075,28 @@ NOTICE: f_leak => awesome science fiction (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN ------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: hyper_document - Chunks left after exclusion: 7 + QUERY PLAN +----------------------------------------------------- + Custom Scan (ChunkAppend) on hyper_document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append - -> Seq Scan on hyper_document hyper_document_1 - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_9_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_10_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_11_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_12_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_13_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_14_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) -(21 rows) + -> Seq Scan on hyper_document hyper_document_1 + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_9_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_10_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_11_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_12_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_13_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_14_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) +(19 rows) -- viewpoint from regress_rls_carol SET SESSION AUTHORIZATION regress_rls_carol; @@ -1140,30 +1126,28 @@ NOTICE: f_leak => awesome technology book (10 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN ------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: hyper_document - Chunks left after exclusion: 7 + QUERY PLAN +----------------------------------------------------- + Custom Scan (ChunkAppend) on hyper_document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append - -> Seq Scan on hyper_document hyper_document_1 - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_9_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_10_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_11_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_12_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_13_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_14_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) -(21 rows) + -> Seq Scan on hyper_document hyper_document_1 + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_9_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_10_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_11_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_12_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_13_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_14_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) +(19 rows) -- viewpoint from regress_rls_dave SET SESSION AUTHORIZATION regress_rls_dave; @@ -1181,30 +1165,28 @@ NOTICE: f_leak => awesome science fiction (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN --------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: hyper_document - Chunks left after exclusion: 7 + QUERY PLAN +-------------------------------------------------------------------- + Custom Scan (ChunkAppend) on hyper_document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append - -> Seq Scan on hyper_document hyper_document_1 - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_9_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_10_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_11_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_12_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_13_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_14_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) -(21 rows) + -> Seq Scan on hyper_document hyper_document_1 + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_9_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_10_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_11_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_12_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_13_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_14_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) +(19 rows) -- pp1 ERROR INSERT INTO hyper_document VALUES (1, 11, 5, 'regress_rls_dave', 'testing pp1'); -- fail @@ -1273,30 +1255,28 @@ NOTICE: f_leak => awesome science fiction (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN --------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: hyper_document - Chunks left after exclusion: 7 + QUERY PLAN +-------------------------------------------------------------------- + Custom Scan (ChunkAppend) on hyper_document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append - -> Seq Scan on hyper_document hyper_document_1 - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_9_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_10_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_11_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_12_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_13_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_14_chunk - Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) -(21 rows) + -> Seq Scan on hyper_document hyper_document_1 + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_9_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_10_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_11_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_12_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_13_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_14_chunk + Filter: ((cid < 55) AND (dlevel <= $0) AND f_leak(dtitle)) +(19 rows) -- viewpoint from regress_rls_carol SET SESSION AUTHORIZATION regress_rls_carol; @@ -1328,30 +1308,28 @@ NOTICE: f_leak => awesome technology book (11 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN ------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: hyper_document - Chunks left after exclusion: 7 + QUERY PLAN +----------------------------------------------------- + Custom Scan (ChunkAppend) on hyper_document + Chunks excluded during startup: 0 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = CURRENT_USER) - -> Append - -> Seq Scan on hyper_document hyper_document_1 - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_9_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_10_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_11_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_12_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_13_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_14_chunk - Filter: ((dlevel <= $0) AND f_leak(dtitle)) -(21 rows) + -> Seq Scan on hyper_document hyper_document_1 + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_9_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_10_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_11_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_12_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_13_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_14_chunk + Filter: ((dlevel <= $0) AND f_leak(dtitle)) +(19 rows) -- only owner can change policies ALTER POLICY pp1 ON hyper_document USING (true); --fail @@ -1391,27 +1369,25 @@ NOTICE: f_leak => great satire (3 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN ---------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: hyper_document - Chunks left after exclusion: 7 - -> Append - -> Seq Scan on hyper_document hyper_document_1 - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_9_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_10_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_11_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_12_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_13_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) - -> Seq Scan on _hyper_2_14_chunk - Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) -(18 rows) + QUERY PLAN +--------------------------------------------------------------- + Custom Scan (ChunkAppend) on hyper_document + Chunks excluded during startup: 0 + -> Seq Scan on hyper_document hyper_document_1 + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_9_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_10_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_11_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_12_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_13_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) + -> Seq Scan on _hyper_2_14_chunk + Filter: ((dauthor = CURRENT_USER) AND f_leak(dtitle)) +(16 rows) -- database superuser does bypass RLS policy when enabled RESET SESSION AUTHORIZATION; @@ -1689,55 +1665,53 @@ NOTICE: f_leak => 1679091c5a880faf6fb5e6087eb1b2dc (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM s1 WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: s1 - Chunks left after exclusion: 12 - -> Append - -> Seq Scan on s1 s1_1 - Filter: ((hashed SubPlan 1) AND f_leak(b)) - SubPlan 1 - -> Append - -> Seq Scan on s2 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_27_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_28_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_29_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_30_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_31_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_32_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_33_chunk - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_6_16_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_17_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_18_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_19_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_20_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_21_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_22_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_23_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_24_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_25_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) - -> Seq Scan on _hyper_6_26_chunk - Filter: ((hashed SubPlan 1) AND f_leak(b)) -(46 rows) + QUERY PLAN +----------------------------------------------------------------------- + Custom Scan (ChunkAppend) on s1 + Chunks excluded during startup: 0 + -> Seq Scan on s1 s1_1 + Filter: ((hashed SubPlan 1) AND f_leak(b)) + SubPlan 1 + -> Append + -> Seq Scan on s2 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_27_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_28_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_29_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_30_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_31_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_32_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_33_chunk + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_6_16_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_17_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_18_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_19_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_20_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_21_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_22_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_23_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_24_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_25_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) + -> Seq Scan on _hyper_6_26_chunk + Filter: ((hashed SubPlan 1) AND f_leak(b)) +(44 rows) SELECT (SELECT x FROM s1 LIMIT 1) xx, * FROM s2 WHERE y like '%28%'; xx | x | y @@ -1748,8 +1722,8 @@ SELECT (SELECT x FROM s1 LIMIT 1) xx, * FROM s2 WHERE y like '%28%'; (3 rows) EXPLAIN (COSTS OFF) SELECT (SELECT x FROM s1 LIMIT 1) xx, * FROM s2 WHERE y like '%28%'; - QUERY PLAN -------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------- Result -> Append -> Seq Scan on s2 @@ -1770,51 +1744,50 @@ EXPLAIN (COSTS OFF) SELECT (SELECT x FROM s1 LIMIT 1) xx, * FROM s2 WHERE y like Filter: (((x % 2) = 0) AND (y ~~ '%28%'::text)) SubPlan 2 -> Limit - -> Result - -> Append - -> Seq Scan on s1 - Filter: (hashed SubPlan 1) - SubPlan 1 - -> Append - -> Seq Scan on s2 s2_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_27_chunk _hyper_7_27_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_28_chunk _hyper_7_28_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_29_chunk _hyper_7_29_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_30_chunk _hyper_7_30_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_31_chunk _hyper_7_31_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_32_chunk _hyper_7_32_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_7_33_chunk _hyper_7_33_chunk_1 - Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) - -> Seq Scan on _hyper_6_16_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_17_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_18_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_19_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_20_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_21_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_22_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_23_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_24_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_25_chunk - Filter: (hashed SubPlan 1) - -> Seq Scan on _hyper_6_26_chunk - Filter: (hashed SubPlan 1) -(64 rows) + -> Custom Scan (ChunkAppend) on s1 + -> Seq Scan on s1 s1_1 + Filter: (hashed SubPlan 1) + SubPlan 1 + -> Append + -> Seq Scan on s2 s2_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_27_chunk _hyper_7_27_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_28_chunk _hyper_7_28_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_29_chunk _hyper_7_29_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_30_chunk _hyper_7_30_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_31_chunk _hyper_7_31_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_32_chunk _hyper_7_32_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_7_33_chunk _hyper_7_33_chunk_1 + Filter: (((x % 2) = 0) AND (y ~~ '%af%'::text)) + -> Seq Scan on _hyper_6_16_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_17_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_18_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_19_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_20_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_21_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_22_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_23_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_24_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_25_chunk + Filter: (hashed SubPlan 1) + -> Seq Scan on _hyper_6_26_chunk + Filter: (hashed SubPlan 1) +(63 rows) SET SESSION AUTHORIZATION regress_rls_alice; ALTER POLICY p2 ON s2 USING (x in (select a from s1 where b like '%d2%')); @@ -2542,85 +2515,77 @@ NOTICE: f_leak => dad (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM z1 WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(12 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(10 rows) PREPARE plancache_test AS SELECT * FROM z1 WHERE f_leak(b); EXPLAIN (COSTS OFF) EXECUTE plancache_test; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(12 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(10 rows) PREPARE plancache_test2 AS WITH q AS (SELECT * FROM z1 WHERE f_leak(b)) SELECT * FROM q,z2; EXPLAIN (COSTS OFF) EXECUTE plancache_test2; - QUERY PLAN -------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------- Nested Loop CTE q - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) -> CTE Scan on q -> Materialize -> Seq Scan on z2 -(17 rows) +(15 rows) PREPARE plancache_test3 AS WITH q AS (SELECT * FROM z2) SELECT * FROM q,z1 WHERE f_leak(z1.b); EXPLAIN (COSTS OFF) EXECUTE plancache_test3; - QUERY PLAN ------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------- Nested Loop CTE q -> Seq Scan on z2 -> CTE Scan on q -> Materialize - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(17 rows) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(15 rows) SET ROLE regress_rls_group1; SELECT * FROM z1 WHERE f_leak(b); @@ -2633,82 +2598,74 @@ NOTICE: f_leak => dad (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM z1 WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(12 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(10 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(12 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(10 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test2; - QUERY PLAN -------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------- Nested Loop CTE q - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) -> CTE Scan on q -> Materialize -> Seq Scan on z2 -(17 rows) +(15 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test3; - QUERY PLAN ------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------- Nested Loop CTE q -> Seq Scan on z2 -> CTE Scan on q -> Materialize - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(17 rows) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(15 rows) SET SESSION AUTHORIZATION regress_rls_carol; SELECT * FROM z1 WHERE f_leak(b); @@ -2721,82 +2678,74 @@ NOTICE: f_leak => ccc (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM z1 WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(12 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(10 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(12 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(10 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test2; - QUERY PLAN -------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------- Nested Loop CTE q - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) -> CTE Scan on q -> Materialize -> Seq Scan on z2 -(17 rows) +(15 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test3; - QUERY PLAN ------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------- Nested Loop CTE q -> Seq Scan on z2 -> CTE Scan on q -> Materialize - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(17 rows) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(15 rows) SET ROLE regress_rls_group2; SELECT * FROM z1 WHERE f_leak(b); @@ -2809,82 +2758,74 @@ NOTICE: f_leak => ccc (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM z1 WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(12 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(10 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(12 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(10 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test2; - QUERY PLAN -------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------- Nested Loop CTE q - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) -> CTE Scan on q -> Materialize -> Seq Scan on z2 -(17 rows) +(15 rows) EXPLAIN (COSTS OFF) EXECUTE plancache_test3; - QUERY PLAN ------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------- Nested Loop CTE q -> Seq Scan on z2 -> CTE Scan on q -> Materialize - -> Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 1) AND f_leak(b)) -(17 rows) + -> Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 1) AND f_leak(b)) +(15 rows) -- -- Views should follow policy for view owner. @@ -2909,21 +2850,19 @@ NOTICE: f_leak => dad (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM rls_view; - QUERY PLAN -------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: f_leak(b) - -> Seq Scan on _hyper_9_49_chunk - Filter: f_leak(b) - -> Seq Scan on _hyper_9_50_chunk - Filter: f_leak(b) - -> Seq Scan on _hyper_9_51_chunk - Filter: f_leak(b) -(12 rows) + QUERY PLAN +------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: f_leak(b) + -> Seq Scan on _hyper_9_49_chunk + Filter: f_leak(b) + -> Seq Scan on _hyper_9_50_chunk + Filter: f_leak(b) + -> Seq Scan on _hyper_9_51_chunk + Filter: f_leak(b) +(10 rows) -- Query as view/table owner. Should return all records. SET SESSION AUTHORIZATION regress_rls_alice; @@ -2941,21 +2880,19 @@ NOTICE: f_leak => dad (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM rls_view; - QUERY PLAN -------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: f_leak(b) - -> Seq Scan on _hyper_9_49_chunk - Filter: f_leak(b) - -> Seq Scan on _hyper_9_50_chunk - Filter: f_leak(b) - -> Seq Scan on _hyper_9_51_chunk - Filter: f_leak(b) -(12 rows) + QUERY PLAN +------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: f_leak(b) + -> Seq Scan on _hyper_9_49_chunk + Filter: f_leak(b) + -> Seq Scan on _hyper_9_50_chunk + Filter: f_leak(b) + -> Seq Scan on _hyper_9_51_chunk + Filter: f_leak(b) +(10 rows) DROP VIEW rls_view; -- View and Table owners are different. @@ -2975,21 +2912,19 @@ NOTICE: f_leak => dad (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM rls_view; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(12 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(10 rows) -- Query as role that is not owner of table but is owner of view. -- Should return records based on view owner policies. @@ -3004,21 +2939,19 @@ NOTICE: f_leak => dad (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM rls_view; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(12 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(10 rows) -- Query as role that is not the owner of the table or view without permissions. SET SESSION AUTHORIZATION regress_rls_carol; @@ -3039,21 +2972,19 @@ NOTICE: f_leak => dad (2 rows) EXPLAIN (COSTS OFF) SELECT * FROM rls_view; - QUERY PLAN ------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 4 - -> Append - -> Seq Scan on z1 z1_1 - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_49_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_50_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_9_51_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(12 rows) + QUERY PLAN +----------------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on z1 z1_1 + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_49_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_50_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_9_51_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(10 rows) SET SESSION AUTHORIZATION regress_rls_bob; DROP VIEW rls_view; @@ -3212,18 +3143,16 @@ SET SESSION AUTHORIZATION regress_rls_alice; CREATE VIEW rls_sbv WITH (security_barrier) AS SELECT * FROM y1 WHERE f_leak(b); EXPLAIN (COSTS OFF) SELECT * FROM rls_sbv WHERE (a = 1); - QUERY PLAN --------------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: y1 - Chunks left after exclusion: 2 - -> Append - -> Seq Scan on y1 y1_1 - Filter: (f_leak(b) AND (a = 1)) - -> Index Scan using _hyper_12_57_chunk_y1_a_idx on _hyper_12_57_chunk - Index Cond: (a = 1) - Filter: f_leak(b) -(9 rows) + QUERY PLAN +-------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on y1 + Chunks excluded during startup: 0 + -> Seq Scan on y1 y1_1 + Filter: (f_leak(b) AND (a = 1)) + -> Index Scan using _hyper_12_57_chunk_y1_a_idx on _hyper_12_57_chunk + Index Cond: (a = 1) + Filter: f_leak(b) +(7 rows) DROP VIEW rls_sbv; -- Create view as role that does not own table. RLS should be applied. @@ -3231,18 +3160,16 @@ SET SESSION AUTHORIZATION regress_rls_bob; CREATE VIEW rls_sbv WITH (security_barrier) AS SELECT * FROM y1 WHERE f_leak(b); EXPLAIN (COSTS OFF) SELECT * FROM rls_sbv WHERE (a = 1); - QUERY PLAN --------------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: y1 - Chunks left after exclusion: 2 - -> Append - -> Seq Scan on y1 y1_1 - Filter: ((a = 1) AND ((a > 2) OR ((a % 2) = 0)) AND f_leak(b)) - -> Index Scan using _hyper_12_57_chunk_y1_a_idx on _hyper_12_57_chunk - Index Cond: (a = 1) - Filter: (((a > 2) OR ((a % 2) = 0)) AND f_leak(b)) -(9 rows) + QUERY PLAN +-------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on y1 + Chunks excluded during startup: 0 + -> Seq Scan on y1 y1_1 + Filter: ((a = 1) AND ((a > 2) OR ((a % 2) = 0)) AND f_leak(b)) + -> Index Scan using _hyper_12_57_chunk_y1_a_idx on _hyper_12_57_chunk + Index Cond: (a = 1) + Filter: (((a > 2) OR ((a % 2) = 0)) AND f_leak(b)) +(7 rows) DROP VIEW rls_sbv; -- @@ -3287,37 +3214,35 @@ NOTICE: f_leak => 98f13708210194c475687be6106a3b84 (14 rows) EXPLAIN (COSTS OFF) SELECT * FROM y2 WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------------------------------------------ - Custom Scan (ConstraintAwareAppend) - Hypertable: y2 - Chunks left after exclusion: 12 - -> Append - -> Seq Scan on y2 y2_1 - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_58_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_59_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_60_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_61_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_62_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_63_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_64_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_65_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_66_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_67_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_68_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) -(28 rows) + QUERY PLAN +----------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on y2 + Chunks excluded during startup: 0 + -> Seq Scan on y2 y2_1 + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_58_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_59_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_60_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_61_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_62_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_63_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_64_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_65_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_66_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_67_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_68_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) +(26 rows) -- -- Qual push-down of leaky functions, when not referring to table @@ -3363,37 +3288,35 @@ NOTICE: f_leak => abc (14 rows) EXPLAIN (COSTS OFF) SELECT * FROM y2 WHERE f_leak('abc'); - QUERY PLAN ---------------------------------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: y2 - Chunks left after exclusion: 12 - -> Append - -> Seq Scan on y2 y2_1 - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_58_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_59_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_60_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_61_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_62_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_63_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_64_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_65_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_66_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_67_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_68_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) -(28 rows) + QUERY PLAN +--------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on y2 + Chunks excluded during startup: 0 + -> Seq Scan on y2 y2_1 + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_58_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_59_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_60_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_61_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_62_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_63_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_64_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_65_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_66_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_67_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_68_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) +(26 rows) CREATE TABLE test_qual_pushdown ( abc text @@ -3461,41 +3384,39 @@ NOTICE: f_leak => 98f13708210194c475687be6106a3b84 (0 rows) EXPLAIN (COSTS OFF) SELECT * FROM y2 JOIN test_qual_pushdown ON (b = abc) WHERE f_leak(b); - QUERY PLAN ------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------- Hash Join Hash Cond: (test_qual_pushdown.abc = y2.b) -> Seq Scan on test_qual_pushdown -> Hash - -> Custom Scan (ConstraintAwareAppend) - Hypertable: y2 - Chunks left after exclusion: 12 - -> Append - -> Seq Scan on y2 y2_1 - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_58_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_59_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_60_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_61_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_62_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_63_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_64_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_65_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_66_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_67_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) - -> Seq Scan on _hyper_13_68_chunk - Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) -(32 rows) + -> Custom Scan (ChunkAppend) on y2 + Chunks excluded during startup: 0 + -> Seq Scan on y2 y2_1 + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_58_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_59_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_60_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_61_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_62_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_63_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_64_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_65_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_66_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_67_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) + -> Seq Scan on _hyper_13_68_chunk + Filter: ((((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)) AND f_leak(b)) +(30 rows) DROP TABLE test_qual_pushdown; -- @@ -3597,39 +3518,37 @@ NOTICE: f_leak => 98f13708210194c475687be6106a3b84 (11 rows) EXPLAIN (COSTS OFF) WITH cte1 AS (SELECT * FROM t1 WHERE f_leak(b)) SELECT * FROM cte1; - QUERY PLAN -------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------- CTE Scan on cte1 CTE cte1 - -> Custom Scan (ConstraintAwareAppend) - Hypertable: t1 - Chunks left after exclusion: 12 - -> Append - -> Seq Scan on t1 t1_1 - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_69_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_70_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_71_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_72_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_73_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_74_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_75_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_76_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_77_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_78_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) - -> Seq Scan on _hyper_15_79_chunk - Filter: (((a % 2) = 0) AND f_leak(b)) -(30 rows) + -> Custom Scan (ChunkAppend) on t1 + Chunks excluded during startup: 0 + -> Seq Scan on t1 t1_1 + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_69_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_70_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_71_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_72_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_73_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_74_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_75_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_76_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_77_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_78_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) + -> Seq Scan on _hyper_15_79_chunk + Filter: (((a % 2) = 0) AND f_leak(b)) +(28 rows) WITH cte1 AS (UPDATE t1 SET a = a + 1 RETURNING *) SELECT * FROM cte1; --fail ERROR: new row violates row-level security policy for table "t1" @@ -4215,6 +4134,9 @@ UPDATE current_check SET payload = payload || '_new' WHERE currentid = 2 RETURNI (0 rows) BEGIN; +-- WHERE CURRENT OF does not work with custom scan nodes +-- so we have to disable chunk append here +SET timescaledb.enable_chunk_append TO false; DECLARE current_check_cursor SCROLL CURSOR FOR SELECT * FROM current_check; -- Returns rows that can be seen according to SELECT policy, like plain SELECT -- above (even rows) @@ -4295,6 +4217,7 @@ SELECT * FROM current_check; 2 | bcd | regress_rls_bob (1 row) +RESET timescaledb.enable_chunk_append; COMMIT; -- -- check pg_stats view filtering diff --git a/test/expected/rowsecurity-9.6.out b/test/expected/rowsecurity-9.6.out index c441045d3..de8b74ac8 100644 --- a/test/expected/rowsecurity-9.6.out +++ b/test/expected/rowsecurity-9.6.out @@ -190,7 +190,7 @@ EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle); ---------------------------------------------------------- Subquery Scan on document Filter: f_leak(document.dtitle) - -> Append + -> Custom Scan (ChunkAppend) on document document_1 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = "current_user"()) @@ -215,7 +215,7 @@ EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dt Hash Cond: (document.cid = category.cid) -> Subquery Scan on document Filter: f_leak(document.dtitle) - -> Append + -> Custom Scan (ChunkAppend) on document document_1 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = "current_user"()) @@ -299,14 +299,34 @@ NOTICE: f_leak => great manga (3 rows) EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle); - QUERY PLAN ----------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------- Subquery Scan on document Filter: f_leak(document.dtitle) - -> Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 6 - -> Append + -> Custom Scan (ChunkAppend) on document document_1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_1_1_chunk + Filter: (dauthor = "current_user"()) + -> Seq Scan on _hyper_1_2_chunk + Filter: (dauthor = "current_user"()) + -> Seq Scan on _hyper_1_3_chunk + Filter: (dauthor = "current_user"()) + -> Seq Scan on _hyper_1_4_chunk + Filter: (dauthor = "current_user"()) + -> Seq Scan on _hyper_1_5_chunk + Filter: (dauthor = "current_user"()) + -> Seq Scan on _hyper_1_6_chunk + Filter: (dauthor = "current_user"()) +(16 rows) + +EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle); + QUERY PLAN +-------------------------------------------------------------- + Nested Loop + -> Subquery Scan on document + Filter: f_leak(document.dtitle) + -> Custom Scan (ChunkAppend) on document document_1 + Chunks excluded during startup: 0 -> Seq Scan on _hyper_1_1_chunk Filter: (dauthor = "current_user"()) -> Seq Scan on _hyper_1_2_chunk @@ -319,33 +339,9 @@ EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle); Filter: (dauthor = "current_user"()) -> Seq Scan on _hyper_1_6_chunk Filter: (dauthor = "current_user"()) -(18 rows) - -EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle); - QUERY PLAN ----------------------------------------------------------------- - Nested Loop - -> Subquery Scan on document - Filter: f_leak(document.dtitle) - -> Custom Scan (ConstraintAwareAppend) - Hypertable: document - Chunks left after exclusion: 6 - -> Append - -> Seq Scan on _hyper_1_1_chunk - Filter: (dauthor = "current_user"()) - -> Seq Scan on _hyper_1_2_chunk - Filter: (dauthor = "current_user"()) - -> Seq Scan on _hyper_1_3_chunk - Filter: (dauthor = "current_user"()) - -> Seq Scan on _hyper_1_4_chunk - Filter: (dauthor = "current_user"()) - -> Seq Scan on _hyper_1_5_chunk - Filter: (dauthor = "current_user"()) - -> Seq Scan on _hyper_1_6_chunk - Filter: (dauthor = "current_user"()) -> Index Scan using category_pkey on category Index Cond: (cid = document.cid) -(21 rows) +(19 rows) -- interaction of FK/PK constraints SET SESSION AUTHORIZATION regress_rls_alice; @@ -903,11 +899,11 @@ NOTICE: f_leak => awesome science fiction (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN ----------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------- Subquery Scan on hyper_document Filter: f_leak(hyper_document.dtitle) - -> Append + -> Custom Scan (ChunkAppend) on hyper_document hyper_document_1 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = "current_user"()) @@ -953,11 +949,11 @@ NOTICE: f_leak => awesome technology book (10 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN ----------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------- Subquery Scan on hyper_document Filter: f_leak(hyper_document.dtitle) - -> Append + -> Custom Scan (ChunkAppend) on hyper_document hyper_document_1 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = "current_user"()) @@ -1066,11 +1062,11 @@ NOTICE: f_leak => awesome technology book (10 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN ----------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------- Subquery Scan on hyper_document Filter: f_leak(hyper_document.dtitle) - -> Append + -> Custom Scan (ChunkAppend) on hyper_document hyper_document_1 InitPlan 1 (returns $0) -> Index Scan using uaccount_pkey on uaccount Index Cond: (pguser = "current_user"()) @@ -1128,27 +1124,25 @@ NOTICE: f_leak => great satire (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle); - QUERY PLAN ----------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------- Subquery Scan on hyper_document Filter: f_leak(hyper_document.dtitle) - -> Custom Scan (ConstraintAwareAppend) - Hypertable: hyper_document - Chunks left after exclusion: 6 - -> Append - -> Seq Scan on _hyper_2_7_chunk - Filter: (dauthor = "current_user"()) - -> Seq Scan on _hyper_2_8_chunk - Filter: (dauthor = "current_user"()) - -> Seq Scan on _hyper_2_9_chunk - Filter: (dauthor = "current_user"()) - -> Seq Scan on _hyper_2_10_chunk - Filter: (dauthor = "current_user"()) - -> Seq Scan on _hyper_2_11_chunk - Filter: (dauthor = "current_user"()) - -> Seq Scan on _hyper_2_12_chunk - Filter: (dauthor = "current_user"()) -(18 rows) + -> Custom Scan (ChunkAppend) on hyper_document hyper_document_1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_2_7_chunk + Filter: (dauthor = "current_user"()) + -> Seq Scan on _hyper_2_8_chunk + Filter: (dauthor = "current_user"()) + -> Seq Scan on _hyper_2_9_chunk + Filter: (dauthor = "current_user"()) + -> Seq Scan on _hyper_2_10_chunk + Filter: (dauthor = "current_user"()) + -> Seq Scan on _hyper_2_11_chunk + Filter: (dauthor = "current_user"()) + -> Seq Scan on _hyper_2_12_chunk + Filter: (dauthor = "current_user"()) +(16 rows) -- database superuser does bypass RLS policy when enabled RESET SESSION AUTHORIZATION; @@ -2835,19 +2829,17 @@ NOTICE: f_leak => dad (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM rls_view; - QUERY PLAN -------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_46_chunk - Filter: f_leak(b) - -> Seq Scan on _hyper_9_47_chunk - Filter: f_leak(b) - -> Seq Scan on _hyper_9_48_chunk - Filter: f_leak(b) -(10 rows) + QUERY PLAN +------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_46_chunk + Filter: f_leak(b) + -> Seq Scan on _hyper_9_47_chunk + Filter: f_leak(b) + -> Seq Scan on _hyper_9_48_chunk + Filter: f_leak(b) +(8 rows) -- Query as view/table owner. Should return all records. SET SESSION AUTHORIZATION regress_rls_alice; @@ -2865,19 +2857,17 @@ NOTICE: f_leak => dad (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM rls_view; - QUERY PLAN -------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: z1 - Chunks left after exclusion: 3 - -> Append - -> Seq Scan on _hyper_9_46_chunk - Filter: f_leak(b) - -> Seq Scan on _hyper_9_47_chunk - Filter: f_leak(b) - -> Seq Scan on _hyper_9_48_chunk - Filter: f_leak(b) -(10 rows) + QUERY PLAN +------------------------------------- + Custom Scan (ChunkAppend) on z1 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_9_46_chunk + Filter: f_leak(b) + -> Seq Scan on _hyper_9_47_chunk + Filter: f_leak(b) + -> Seq Scan on _hyper_9_48_chunk + Filter: f_leak(b) +(8 rows) DROP VIEW rls_view; -- View and Table owners are different. @@ -3125,16 +3115,14 @@ SET SESSION AUTHORIZATION regress_rls_alice; CREATE VIEW rls_sbv WITH (security_barrier) AS SELECT * FROM y1 WHERE f_leak(b); EXPLAIN (COSTS OFF) SELECT * FROM rls_sbv WHERE (a = 1); - QUERY PLAN --------------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: y1 - Chunks left after exclusion: 1 - -> Append - -> Index Scan using _hyper_12_54_chunk_y1_a_idx on _hyper_12_54_chunk - Index Cond: (a = 1) - Filter: f_leak(b) -(7 rows) + QUERY PLAN +-------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on y1 + Chunks excluded during startup: 0 + -> Index Scan using _hyper_12_54_chunk_y1_a_idx on _hyper_12_54_chunk + Index Cond: (a = 1) + Filter: f_leak(b) +(5 rows) DROP VIEW rls_sbv; -- Create view as role that does not own table. RLS should be applied. @@ -3268,35 +3256,33 @@ NOTICE: f_leak => abc (14 rows) EXPLAIN (COSTS OFF) SELECT * FROM y2 WHERE f_leak('abc'); - QUERY PLAN ---------------------------------------------------------------------------------------------------- - Custom Scan (ConstraintAwareAppend) - Hypertable: y2 - Chunks left after exclusion: 11 - -> Append - -> Seq Scan on _hyper_13_55_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_56_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_57_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_58_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_59_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_60_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_61_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_62_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_63_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_64_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) - -> Seq Scan on _hyper_13_65_chunk - Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) -(26 rows) + QUERY PLAN +--------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on y2 + Chunks excluded during startup: 0 + -> Seq Scan on _hyper_13_55_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_56_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_57_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_58_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_59_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_60_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_61_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_62_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_63_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_64_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) + -> Seq Scan on _hyper_13_65_chunk + Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0))) +(24 rows) CREATE TABLE test_qual_pushdown ( abc text diff --git a/test/expected/update.out b/test/expected/update.out index 74d7cd3e9..404c02e8d 100644 --- a/test/expected/update.out +++ b/test/expected/update.out @@ -51,34 +51,30 @@ $BODY$; EXPLAIN (costs off) SELECT FROM "one_Partition" WHERE series_1 IN (SELECT series_1 FROM "one_Partition" WHERE series_1 > series_val()); - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------- Hash Join Hash Cond: ("one_Partition".series_1 = "one_Partition_1".series_1) - -> Custom Scan (ConstraintAwareAppend) - Hypertable: one_Partition - Chunks left after exclusion: 3 - -> Append - -> Index Only Scan using "_hyper_1_1_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_1_chunk - Index Cond: (series_1 > (series_val())::double precision) - -> Index Only Scan using "_hyper_1_2_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_2_chunk - Index Cond: (series_1 > (series_val())::double precision) - -> Index Only Scan using "_hyper_1_3_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_3_chunk - Index Cond: (series_1 > (series_val())::double precision) + -> Custom Scan (ChunkAppend) on "one_Partition" + Chunks excluded during startup: 0 + -> Index Only Scan using "_hyper_1_1_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_1_chunk + Index Cond: (series_1 > (series_val())::double precision) + -> Index Only Scan using "_hyper_1_2_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_2_chunk + Index Cond: (series_1 > (series_val())::double precision) + -> Index Only Scan using "_hyper_1_3_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_3_chunk + Index Cond: (series_1 > (series_val())::double precision) -> Hash -> HashAggregate Group Key: "one_Partition_1".series_1 - -> Custom Scan (ConstraintAwareAppend) - Hypertable: one_Partition - Chunks left after exclusion: 3 - -> Append - -> Index Only Scan using "_hyper_1_1_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_1_chunk _hyper_1_1_chunk_1 - Index Cond: (series_1 > (series_val())::double precision) - -> Index Only Scan using "_hyper_1_2_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_2_chunk _hyper_1_2_chunk_1 - Index Cond: (series_1 > (series_val())::double precision) - -> Index Only Scan using "_hyper_1_3_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_3_chunk _hyper_1_3_chunk_1 - Index Cond: (series_1 > (series_val())::double precision) -(25 rows) + -> Custom Scan (ChunkAppend) on "one_Partition" "one_Partition_1" + Chunks excluded during startup: 0 + -> Index Only Scan using "_hyper_1_1_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_1_chunk _hyper_1_1_chunk_1 + Index Cond: (series_1 > (series_val())::double precision) + -> Index Only Scan using "_hyper_1_2_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_2_chunk _hyper_1_2_chunk_1 + Index Cond: (series_1 > (series_val())::double precision) + -> Index Only Scan using "_hyper_1_3_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_3_chunk _hyper_1_3_chunk_1 + Index Cond: (series_1 > (series_val())::double precision) +(21 rows) -- ConstraintAwareAppend NOT applied for UPDATE EXPLAIN (costs off) diff --git a/test/sql/include/append_query.sql b/test/sql/include/append_query.sql index 45129cd58..3de289e3d 100644 --- a/test/sql/include/append_query.sql +++ b/test/sql/include/append_query.sql @@ -207,3 +207,23 @@ SELECT * FROM append_test TABLESAMPLE SYSTEM(1) WHERE TIME > now_s() - INTERVAL '400 day' ORDER BY time DESC; +-- test runtime exclusion + +-- test runtime exclusion with LATERAL and 2 hypertables +:PREFIX SELECT m1.time, m2.time FROM metrics_timestamptz m1 LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m2 WHERE m1.time = m2.time LIMIT 1) m2 ON true ORDER BY m1.time; + +-- test runtime exclusion with LATERAL and generate_series +:PREFIX SELECT g.time FROM generate_series('2000-01-01'::timestamptz, '2000-02-01'::timestamptz, '1d'::interval) g(time) LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m WHERE m.time=g.time LIMIT 1) m ON true; +:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time) m ON true; +:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time ORDER BY time) m ON true; + +-- test runtime exclusion with subquery +:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE m1.time=(SELECT max(time) FROM metrics_timestamptz); + +-- test runtime exclusion with correlated subquery +:PREFIX SELECT m1.time, (SELECT m2.time FROM metrics_timestamptz m2 WHERE m2.time < m1.time ORDER BY m2.time DESC LIMIT 1) FROM metrics_timestamptz m1 WHERE m1.time < '2000-01-10' ORDER BY m1.time; + +-- test EXISTS +:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE EXISTS(SELECT 1 FROM metrics_timestamptz m2 WHERE m1.time < m2.time) ORDER BY m1.time DESC limit 1000; + + diff --git a/test/sql/include/plan_ordered_append_query.sql b/test/sql/include/plan_ordered_append_query.sql index ed4c00e86..1820833c4 100644 --- a/test/sql/include/plan_ordered_append_query.sql +++ b/test/sql/include/plan_ordered_append_query.sql @@ -379,6 +379,22 @@ LEFT OUTER JOIN LATERAL( -- should use 2 ChunkAppend :PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 ON o1.time = o2.time ORDER BY o1.time LIMIT 100; +-- test JOIN on time column with USING +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 USING(time) ORDER BY o1.time LIMIT 100; + +-- test NATURAL JOIN on time column +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 NATURAL INNER JOIN ordered_append o2 ORDER BY o1.time LIMIT 100; + +-- test LEFT JOIN on time column +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 LEFT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o1.time LIMIT 100; + +-- test RIGHT JOIN on time column +-- should use 2 ChunkAppend +:PREFIX SELECT * FROM ordered_append o1 RIGHT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o2.time LIMIT 100; + -- test JOIN on time column with ON clause expression order switched -- should use 2 ChunkAppend :PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 ON o2.time = o1.time ORDER BY o1.time LIMIT 100; diff --git a/test/sql/rowsecurity-10.sql b/test/sql/rowsecurity-10.sql index 730740723..e8c34595f 100644 --- a/test/sql/rowsecurity-10.sql +++ b/test/sql/rowsecurity-10.sql @@ -1344,6 +1344,9 @@ DROP TABLE copy_rel_to CASCADE; -- Check WHERE CURRENT OF SET SESSION AUTHORIZATION regress_rls_alice; +-- WHERE CURRENT OF does not work with custom scan nodes +-- so we have to disable chunk append here +SET timescaledb.disable_optimizations TO true; CREATE TABLE current_check (currentid int, payload text, rlsuser text); SELECT public.create_hypertable('current_check', 'currentid', chunk_time_interval=>10); @@ -1390,6 +1393,8 @@ FETCH RELATIVE 1 FROM current_check_cursor; DELETE FROM current_check WHERE CURRENT OF current_check_cursor RETURNING *; SELECT * FROM current_check; +RESET timescaledb.disable_optimizations; + COMMIT; -- diff --git a/test/sql/rowsecurity-11.sql b/test/sql/rowsecurity-11.sql index 31b72ed3b..864c796c3 100644 --- a/test/sql/rowsecurity-11.sql +++ b/test/sql/rowsecurity-11.sql @@ -1373,6 +1373,10 @@ UPDATE current_check SET payload = payload || '_new' WHERE currentid = 2 RETURNI BEGIN; +-- WHERE CURRENT OF does not work with custom scan nodes +-- so we have to disable chunk append here +SET timescaledb.enable_chunk_append TO false; + DECLARE current_check_cursor SCROLL CURSOR FOR SELECT * FROM current_check; -- Returns rows that can be seen according to SELECT policy, like plain SELECT -- above (even rows) @@ -1392,6 +1396,8 @@ FETCH RELATIVE 1 FROM current_check_cursor; DELETE FROM current_check WHERE CURRENT OF current_check_cursor RETURNING *; SELECT * FROM current_check; +RESET timescaledb.enable_chunk_append; + COMMIT; --