mirror of
https://github.com/timescale/timescaledb.git
synced 2025-05-16 10:33:27 +08:00
Use ChunkAppend to replace Append nodes
This patch makes TimescaleDB use ChunkAppend in places where it used to used to use ConstraintAwareAppend before. ConstraintAwareAppend will still be used for MergeAppend nodes that cannot be changed to Ordered Append or when ChunkAppend is disabled. When a query on a hypertable is identified as benefitting from execution exclusion Append nodes will be replaced by ChunkAppend nodes. This will enable the use of runtime exclusion for joins, lateral joins, subqueries and correlated subqueries.
This commit is contained in:
parent
0406ba9a1b
commit
c5c13415f0
@ -6,6 +6,9 @@ accidentally triggering the load of a previous DB version.**
|
||||
|
||||
## 1.4.0 (unreleased)
|
||||
|
||||
**Major features**
|
||||
* #1270 Use ChunkAppend to replace Append nodes
|
||||
|
||||
**Minor features**
|
||||
* #1273 Propagate quals to joined hypertables
|
||||
|
||||
|
@ -7,6 +7,7 @@
|
||||
#include <nodes/nodeFuncs.h>
|
||||
#include <optimizer/clauses.h>
|
||||
#include <optimizer/pathnode.h>
|
||||
#include <optimizer/paths.h>
|
||||
#include <optimizer/tlist.h>
|
||||
#include <utils/builtins.h>
|
||||
#include <utils/typcache.h>
|
||||
@ -81,8 +82,16 @@ ts_chunk_append_path_create(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht,
|
||||
children = castNode(AppendPath, subpath)->subpaths;
|
||||
break;
|
||||
case T_MergeAppendPath:
|
||||
if (!ordered)
|
||||
return subpath;
|
||||
/*
|
||||
* check if ordered append is applicable, only assert ordered here
|
||||
* checked properly in ts_ordered_append_should_optimize
|
||||
*/
|
||||
Assert(ordered);
|
||||
|
||||
/*
|
||||
* we only push down LIMIT for ordered append
|
||||
*/
|
||||
path->pushdown_limit = true;
|
||||
children = castNode(MergeAppendPath, subpath)->subpaths;
|
||||
path->cpath.path.pathkeys = subpath->pathkeys;
|
||||
break;
|
||||
@ -184,7 +193,7 @@ ts_chunk_append_path_create(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht,
|
||||
* We do this to prevent planner choosing parallel plan which might
|
||||
* otherwise look preferable cost wise.
|
||||
*/
|
||||
if (root->limit_tuples == -1.0 || rows < root->limit_tuples)
|
||||
if (!path->pushdown_limit || root->limit_tuples == -1.0 || rows < root->limit_tuples)
|
||||
{
|
||||
total_cost += child->total_cost;
|
||||
rows += child->rows;
|
||||
@ -205,7 +214,7 @@ ts_chunk_append_path_create(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht,
|
||||
*/
|
||||
bool
|
||||
ts_ordered_append_should_optimize(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht,
|
||||
List *join_conditions, bool *reverse)
|
||||
List *join_conditions, int *order_attno, bool *reverse)
|
||||
{
|
||||
SortGroupClause *sort = linitial(root->parse->sortClause);
|
||||
TargetEntry *tle = get_sortgroupref_tle(sort->tleSortGroupRef, root->parse->targetList);
|
||||
@ -270,8 +279,9 @@ ts_ordered_append_should_optimize(PlannerInfo *root, RelOptInfo *rel, Hypertable
|
||||
if (namestrcmp(&ht->space->dimensions[0].fd.column_name, column) != 0)
|
||||
return false;
|
||||
|
||||
if (reverse != NULL)
|
||||
*reverse = sort->sortop == tce->lt_opr ? false : true;
|
||||
Assert(order_attno != NULL && reverse != NULL);
|
||||
*order_attno = ht_var->varattno;
|
||||
*reverse = sort->sortop == tce->lt_opr ? false : true;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -17,12 +17,14 @@ typedef struct ChunkAppendPath
|
||||
CustomPath cpath;
|
||||
bool startup_exclusion;
|
||||
bool runtime_exclusion;
|
||||
bool pushdown_limit;
|
||||
} ChunkAppendPath;
|
||||
|
||||
extern Path *ts_chunk_append_path_create(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht,
|
||||
Path *subpath, bool ordered, List *nested_oids);
|
||||
|
||||
extern bool ts_ordered_append_should_optimize(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht,
|
||||
List *join_conditions, bool *reverse);
|
||||
List *join_conditions, int *order_attno,
|
||||
bool *reverse);
|
||||
|
||||
#endif /* TIMESCALEDB_CHUNK_APPEND_H */
|
||||
|
@ -6,16 +6,22 @@
|
||||
|
||||
#include <postgres.h>
|
||||
#include <miscadmin.h>
|
||||
#include <executor/executor.h>
|
||||
#include <executor/nodeSubplan.h>
|
||||
#include <nodes/bitmapset.h>
|
||||
#include <nodes/makefuncs.h>
|
||||
#include <nodes/nodeFuncs.h>
|
||||
#include <nodes/relation.h>
|
||||
#include <optimizer/clauses.h>
|
||||
#include <optimizer/cost.h>
|
||||
#include <optimizer/plancat.h>
|
||||
#include <optimizer/predtest.h>
|
||||
#include <optimizer/prep.h>
|
||||
#include <optimizer/restrictinfo.h>
|
||||
#include <parser/parsetree.h>
|
||||
#include <rewrite/rewriteManip.h>
|
||||
#include <utils/typcache.h>
|
||||
#include <utils/memutils.h>
|
||||
#include <utils/typcache.h>
|
||||
|
||||
#include "chunk_append/chunk_append.h"
|
||||
#include "chunk_append/exec.h"
|
||||
@ -37,13 +43,12 @@ static CustomExecMethods chunk_append_state_methods = {
|
||||
};
|
||||
|
||||
static List *constify_restrictinfos(PlannerInfo *root, List *restrictinfos);
|
||||
static bool can_exclude_chunk(PlannerInfo *root, EState *estate, Index rt_index,
|
||||
List *restrictinfos);
|
||||
static bool can_exclude_chunk(List *constraints, List *restrictinfos);
|
||||
static void do_startup_exclusion(ChunkAppendState *state);
|
||||
static Node *constify_param_mutator(Node *node, void *context);
|
||||
static List *constify_restrictinfo_params(PlannerInfo *root, EState *state, List *restrictinfos);
|
||||
|
||||
static void adjust_ri_clauses(ChunkAppendState *state, List *initial_rt_indexes);
|
||||
static void initialize_constraints(ChunkAppendState *state, List *initial_rt_indexes);
|
||||
|
||||
Node *
|
||||
chunk_append_state_create(CustomScan *cscan)
|
||||
@ -54,20 +59,14 @@ chunk_append_state_create(CustomScan *cscan)
|
||||
|
||||
state->csstate.methods = &chunk_append_state_methods;
|
||||
|
||||
state->num_subplans = 0;
|
||||
state->subplanstates = NULL;
|
||||
|
||||
state->initial_subplans = cscan->custom_plans;
|
||||
state->initial_ri_clauses = lsecond(cscan->custom_private);
|
||||
adjust_ri_clauses(state, lthird(cscan->custom_private));
|
||||
state->sort_options = lfourth(cscan->custom_private);
|
||||
|
||||
state->startup_exclusion = (bool) linitial_oid(linitial(cscan->custom_private));
|
||||
state->runtime_exclusion = (bool) lsecond_oid(linitial(cscan->custom_private));
|
||||
state->limit = lthird_oid(linitial(cscan->custom_private));
|
||||
|
||||
state->current = 0;
|
||||
state->runtime_initialized = false;
|
||||
state->filtered_subplans = state->initial_subplans;
|
||||
state->filtered_ri_clauses = state->initial_ri_clauses;
|
||||
|
||||
@ -79,30 +78,33 @@ do_startup_exclusion(ChunkAppendState *state)
|
||||
{
|
||||
List *filtered_children = NIL;
|
||||
List *filtered_ri_clauses = NIL;
|
||||
List *filtered_constraints = NIL;
|
||||
ListCell *lc_plan;
|
||||
ListCell *lc_clauses;
|
||||
ListCell *lc_constraints;
|
||||
|
||||
/*
|
||||
* create skeleton plannerinfo to reuse some PostgreSQL planner functions
|
||||
* create skeleton plannerinfo for estimate_expression_value
|
||||
*/
|
||||
Query parse = {
|
||||
.resultRelation = InvalidOid,
|
||||
};
|
||||
PlannerGlobal glob = {
|
||||
.boundParams = NULL,
|
||||
};
|
||||
PlannerInfo root = {
|
||||
.glob = &glob,
|
||||
.parse = &parse,
|
||||
};
|
||||
|
||||
/*
|
||||
* clauses should always have the same length as appendplans because
|
||||
* the list of clauses is built from the list of appendplans
|
||||
* clauses and constraints should always have the same length as initial_subplans
|
||||
*/
|
||||
Assert(list_length(state->initial_subplans) == list_length(state->initial_ri_clauses));
|
||||
Assert(list_length(state->initial_subplans) == list_length(state->initial_constraints));
|
||||
|
||||
forboth (lc_plan, state->initial_subplans, lc_clauses, state->initial_ri_clauses)
|
||||
forthree (lc_plan,
|
||||
state->initial_subplans,
|
||||
lc_constraints,
|
||||
state->initial_constraints,
|
||||
lc_clauses,
|
||||
state->initial_ri_clauses)
|
||||
{
|
||||
List *restrictinfos = NIL;
|
||||
List *ri_clauses = lfirst(lc_clauses);
|
||||
@ -123,18 +125,18 @@ do_startup_exclusion(ChunkAppendState *state)
|
||||
}
|
||||
restrictinfos = constify_restrictinfos(&root, restrictinfos);
|
||||
|
||||
if (can_exclude_chunk(&root,
|
||||
state->csstate.ss.ps.state,
|
||||
scan->scanrelid,
|
||||
restrictinfos))
|
||||
if (can_exclude_chunk(lfirst(lc_constraints), restrictinfos))
|
||||
continue;
|
||||
}
|
||||
|
||||
filtered_children = lappend(filtered_children, lfirst(lc_plan));
|
||||
filtered_ri_clauses = lappend(filtered_ri_clauses, ri_clauses);
|
||||
filtered_constraints = lappend(filtered_constraints, lfirst(lc_constraints));
|
||||
}
|
||||
|
||||
state->filtered_subplans = filtered_children;
|
||||
state->filtered_ri_clauses = filtered_ri_clauses;
|
||||
state->filtered_constraints = filtered_constraints;
|
||||
}
|
||||
|
||||
static void
|
||||
@ -145,6 +147,9 @@ chunk_append_begin(CustomScanState *node, EState *estate, int eflags)
|
||||
ListCell *lc;
|
||||
int i;
|
||||
|
||||
Assert(list_length(cscan->custom_plans) == list_length(state->initial_subplans));
|
||||
initialize_constraints(state, lthird(cscan->custom_private));
|
||||
|
||||
if (state->startup_exclusion)
|
||||
do_startup_exclusion(state);
|
||||
|
||||
@ -175,29 +180,42 @@ chunk_append_begin(CustomScanState *node, EState *estate, int eflags)
|
||||
|
||||
i++;
|
||||
}
|
||||
|
||||
if (state->runtime_exclusion)
|
||||
{
|
||||
state->params = state->subplanstates[0]->plan->allParam;
|
||||
/*
|
||||
* make sure all params are initialized for runtime exclusion
|
||||
*/
|
||||
node->ss.ps.chgParam = state->subplanstates[0]->plan->allParam;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* build bitmap of valid subplans for runtime exclusion
|
||||
*/
|
||||
static void
|
||||
initialize_runtime_exclusion(ChunkAppendState *state)
|
||||
{
|
||||
ListCell *lc_clauses;
|
||||
ListCell *lc_clauses, *lc_constraints;
|
||||
int i = 0;
|
||||
|
||||
Query parse = {
|
||||
.resultRelation = InvalidOid,
|
||||
};
|
||||
PlannerGlobal glob = {
|
||||
.boundParams = NULL,
|
||||
};
|
||||
PlannerInfo root = {
|
||||
.glob = &glob,
|
||||
.parse = &parse,
|
||||
};
|
||||
|
||||
Assert(state->num_subplans == list_length(state->filtered_ri_clauses));
|
||||
|
||||
lc_clauses = list_head(state->filtered_ri_clauses);
|
||||
lc_constraints = list_head(state->filtered_constraints);
|
||||
|
||||
state->runtime_number_loops++;
|
||||
/*
|
||||
* mark subplans as active/inactive in valid_subplans
|
||||
*/
|
||||
for (i = 0; i < state->num_subplans; i++)
|
||||
{
|
||||
PlanState *ps = state->subplanstates[i];
|
||||
@ -219,11 +237,14 @@ initialize_runtime_exclusion(ChunkAppendState *state)
|
||||
}
|
||||
restrictinfos = constify_restrictinfo_params(&root, ps->state, restrictinfos);
|
||||
|
||||
if (!can_exclude_chunk(&root, ps->state, scan->scanrelid, restrictinfos))
|
||||
if (!can_exclude_chunk(lfirst(lc_constraints), restrictinfos))
|
||||
state->valid_subplans = bms_add_member(state->valid_subplans, i);
|
||||
else
|
||||
state->runtime_number_exclusions++;
|
||||
}
|
||||
|
||||
lc_clauses = lnext(lc_clauses);
|
||||
lc_constraints = lnext(lc_constraints);
|
||||
}
|
||||
|
||||
state->runtime_initialized = true;
|
||||
@ -251,7 +272,7 @@ chunk_append_exec(CustomScanState *node)
|
||||
{
|
||||
initialize_runtime_exclusion(state);
|
||||
|
||||
if (!state->valid_subplans || bms_num_members(state->valid_subplans) == 0)
|
||||
if (bms_is_empty(state->valid_subplans))
|
||||
return ExecClearTuple(node->ss.ps.ps_ResultTupleSlot);
|
||||
|
||||
state->current = bms_next_member(state->valid_subplans, -1);
|
||||
@ -288,9 +309,8 @@ chunk_append_exec(CustomScanState *node)
|
||||
if (!TupIsNull(subslot))
|
||||
{
|
||||
/*
|
||||
* If the subplan gave us something then return it as-is. We do
|
||||
* NOT make use of the result slot that was set up in
|
||||
* chunk_append_begin there's no need for it.
|
||||
* If the subplan gave us something check if we need
|
||||
* to do projection otherwise return as is.
|
||||
*/
|
||||
if (node->ss.ps.ps_ProjInfo == NULL)
|
||||
return subslot;
|
||||
@ -348,11 +368,17 @@ chunk_append_rescan(CustomScanState *node)
|
||||
|
||||
for (i = 0; i < state->num_subplans; i++)
|
||||
{
|
||||
if (node->ss.ps.chgParam != NULL)
|
||||
UpdateChangedParamSet(state->subplanstates[i], node->ss.ps.chgParam);
|
||||
|
||||
ExecReScan(state->subplanstates[i]);
|
||||
}
|
||||
state->current = 0;
|
||||
|
||||
if (state->runtime_exclusion)
|
||||
/*
|
||||
* detect changed params and reset runtime exclusion state
|
||||
*/
|
||||
if (state->runtime_exclusion && bms_overlap(node->ss.ps.chgParam, state->params))
|
||||
{
|
||||
bms_free(state->valid_subplans);
|
||||
state->valid_subplans = NULL;
|
||||
@ -411,20 +437,26 @@ constify_param_mutator(Node *node, void *context)
|
||||
if (IsA(node, Param))
|
||||
{
|
||||
Param *param = castNode(Param, node);
|
||||
EState *state = (EState *) context;
|
||||
EState *estate = (EState *) context;
|
||||
|
||||
if (param->paramkind == PARAM_EXEC)
|
||||
{
|
||||
TypeCacheEntry *tce = lookup_type_cache(param->paramtype, 0);
|
||||
ParamExecData value = state->es_param_exec_vals[param->paramid];
|
||||
ParamExecData prm = estate->es_param_exec_vals[param->paramid];
|
||||
|
||||
if (!value.execPlan)
|
||||
if (prm.execPlan != NULL)
|
||||
{
|
||||
ExprContext *econtext = GetPerTupleExprContext(estate);
|
||||
ExecSetParamPlan(prm.execPlan, econtext);
|
||||
}
|
||||
|
||||
if (prm.execPlan == NULL)
|
||||
return (Node *) makeConst(param->paramtype,
|
||||
param->paramtypmod,
|
||||
param->paramcollid,
|
||||
tce->typlen,
|
||||
value.value,
|
||||
value.isnull,
|
||||
prm.value,
|
||||
prm.isnull,
|
||||
tce->typbyval);
|
||||
}
|
||||
return node;
|
||||
@ -434,37 +466,166 @@ constify_param_mutator(Node *node, void *context)
|
||||
}
|
||||
|
||||
/*
|
||||
* Exclude child relations (chunks) at execution time based on constraints.
|
||||
*
|
||||
* This functions tries to reuse as much functionality as possible from standard
|
||||
* constraint exclusion in PostgreSQL that normally happens at planning
|
||||
* time. Therefore, we need to fake a number of planning-related data
|
||||
* structures.
|
||||
* stripped down version of postgres get_relation_constraints
|
||||
*/
|
||||
static bool
|
||||
can_exclude_chunk(PlannerInfo *root, EState *estate, Index rt_index, List *restrictinfos)
|
||||
static List *
|
||||
ca_get_relation_constraints(Oid relationObjectId, Index varno, bool include_notnull)
|
||||
{
|
||||
RangeTblEntry *rte = rt_fetch(rt_index, estate->es_range_table);
|
||||
RelOptInfo rel = {
|
||||
.type = T_RelOptInfo,
|
||||
.relid = rt_index,
|
||||
.reloptkind = RELOPT_OTHER_MEMBER_REL,
|
||||
.baserestrictinfo = restrictinfos,
|
||||
};
|
||||
List *result = NIL;
|
||||
Relation relation;
|
||||
TupleConstr *constr;
|
||||
|
||||
return rte->rtekind == RTE_RELATION && rte->relkind == RELKIND_RELATION && !rte->inh &&
|
||||
relation_excluded_by_constraints(root, &rel, rte);
|
||||
/*
|
||||
* We assume the relation has already been safely locked.
|
||||
*/
|
||||
relation = heap_open(relationObjectId, NoLock);
|
||||
|
||||
constr = relation->rd_att->constr;
|
||||
if (constr != NULL)
|
||||
{
|
||||
int num_check = constr->num_check;
|
||||
int i;
|
||||
|
||||
for (i = 0; i < num_check; i++)
|
||||
{
|
||||
Node *cexpr;
|
||||
|
||||
/*
|
||||
* If this constraint hasn't been fully validated yet, we must
|
||||
* ignore it here.
|
||||
*/
|
||||
if (!constr->check[i].ccvalid)
|
||||
continue;
|
||||
|
||||
cexpr = stringToNode(constr->check[i].ccbin);
|
||||
|
||||
/*
|
||||
* Run each expression through const-simplification and
|
||||
* canonicalization. This is not just an optimization, but is
|
||||
* necessary, because we will be comparing it to
|
||||
* similarly-processed qual clauses, and may fail to detect valid
|
||||
* matches without this. This must match the processing done to
|
||||
* qual clauses in preprocess_expression()! (We can skip the
|
||||
* stuff involving subqueries, however, since we don't allow any
|
||||
* in check constraints.)
|
||||
*/
|
||||
cexpr = eval_const_expressions(NULL, cexpr);
|
||||
|
||||
#if (PG96 && PG_VERSION_NUM < 90609) || (PG10 && PG_VERSION_NUM < 100004)
|
||||
cexpr = (Node *) canonicalize_qual((Expr *) cexpr);
|
||||
#elif PG96 || PG10
|
||||
cexpr = (Node *) canonicalize_qual_ext((Expr *) cexpr, true);
|
||||
#else
|
||||
cexpr = (Node *) canonicalize_qual((Expr *) cexpr, true);
|
||||
#endif
|
||||
|
||||
/* Fix Vars to have the desired varno */
|
||||
if (varno != 1)
|
||||
ChangeVarNodes(cexpr, 1, varno, 0);
|
||||
|
||||
/*
|
||||
* Finally, convert to implicit-AND format (that is, a List) and
|
||||
* append the resulting item(s) to our output list.
|
||||
*/
|
||||
result = list_concat(result, make_ands_implicit((Expr *) cexpr));
|
||||
}
|
||||
|
||||
/* Add NOT NULL constraints in expression form, if requested */
|
||||
if (include_notnull && constr->has_not_null)
|
||||
{
|
||||
int natts = relation->rd_att->natts;
|
||||
|
||||
for (i = 1; i <= natts; i++)
|
||||
{
|
||||
Form_pg_attribute att = TupleDescAttr(relation->rd_att, i - 1);
|
||||
|
||||
if (att->attnotnull && !att->attisdropped)
|
||||
{
|
||||
NullTest *ntest = makeNode(NullTest);
|
||||
|
||||
ntest->arg = (Expr *)
|
||||
makeVar(varno, i, att->atttypid, att->atttypmod, att->attcollation, 0);
|
||||
ntest->nulltesttype = IS_NOT_NULL;
|
||||
|
||||
/*
|
||||
* argisrow=false is correct even for a composite column,
|
||||
* because attnotnull does not represent a SQL-spec IS NOT
|
||||
* NULL test in such a case, just IS DISTINCT FROM NULL.
|
||||
*/
|
||||
ntest->argisrow = false;
|
||||
ntest->location = -1;
|
||||
result = lappend(result, ntest);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
heap_close(relation, NoLock);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
* Adjust the RangeTableEntry indexes in the restrictinfo
|
||||
* clauses because during planning subquery indexes will be
|
||||
* different from the final index after flattening.
|
||||
* Exclude child relations (chunks) at execution time based on constraints.
|
||||
*
|
||||
* constraints is the list of constraint expressions of the relation
|
||||
* baserestrictinfo is the list of RestrictInfos
|
||||
*/
|
||||
static bool
|
||||
can_exclude_chunk(List *constraints, List *baserestrictinfo)
|
||||
{
|
||||
/*
|
||||
* Regardless of the setting of constraint_exclusion, detect
|
||||
* constant-FALSE-or-NULL restriction clauses. Because const-folding will
|
||||
* reduce "anything AND FALSE" to just "FALSE", any such case should
|
||||
* result in exactly one baserestrictinfo entry. This doesn't fire very
|
||||
* often, but it seems cheap enough to be worth doing anyway. (Without
|
||||
* this, we'd miss some optimizations that 9.5 and earlier found via much
|
||||
* more roundabout methods.)
|
||||
*/
|
||||
if (list_length(baserestrictinfo) == 1)
|
||||
{
|
||||
RestrictInfo *rinfo = (RestrictInfo *) linitial(baserestrictinfo);
|
||||
Expr *clause = rinfo->clause;
|
||||
|
||||
if (clause && IsA(clause, Const) &&
|
||||
(((Const *) clause)->constisnull || !DatumGetBool(((Const *) clause)->constvalue)))
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* The constraints are effectively ANDed together, so we can just try to
|
||||
* refute the entire collection at once. This may allow us to make proofs
|
||||
* that would fail if we took them individually.
|
||||
*
|
||||
* Note: we use rel->baserestrictinfo, not safe_restrictions as might seem
|
||||
* an obvious optimization. Some of the clauses might be OR clauses that
|
||||
* have volatile and nonvolatile subclauses, and it's OK to make
|
||||
* deductions with the nonvolatile parts.
|
||||
*
|
||||
* We need strong refutation because we have to prove that the constraints
|
||||
* would yield false, not just NULL.
|
||||
*/
|
||||
#if PG96
|
||||
if (predicate_refuted_by(constraints, baserestrictinfo))
|
||||
#else
|
||||
if (predicate_refuted_by(constraints, baserestrictinfo, false))
|
||||
#endif
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* Fetch the constraints for a relation and adjust range table indexes
|
||||
* if necessary.
|
||||
*/
|
||||
static void
|
||||
adjust_ri_clauses(ChunkAppendState *state, List *initial_rt_indexes)
|
||||
initialize_constraints(ChunkAppendState *state, List *initial_rt_indexes)
|
||||
{
|
||||
ListCell *lc_clauses, *lc_plan, *lc_relid;
|
||||
List *constraints = NIL;
|
||||
EState *estate = state->csstate.ss.ps.state;
|
||||
|
||||
if (initial_rt_indexes == NIL)
|
||||
return;
|
||||
@ -481,10 +642,24 @@ adjust_ri_clauses(ChunkAppendState *state, List *initial_rt_indexes)
|
||||
{
|
||||
Scan *scan = chunk_append_get_scan_plan(lfirst(lc_plan));
|
||||
Index initial_index = lfirst_oid(lc_relid);
|
||||
List *relation_constraints = NIL;
|
||||
|
||||
if (scan != NULL && scan->scanrelid > 0 && scan->scanrelid != initial_index)
|
||||
if (scan != NULL && scan->scanrelid > 0)
|
||||
{
|
||||
ChangeVarNodes(lfirst(lc_clauses), initial_index, scan->scanrelid, 0);
|
||||
Index rt_index = scan->scanrelid;
|
||||
RangeTblEntry *rte = rt_fetch(rt_index, estate->es_range_table);
|
||||
relation_constraints = ca_get_relation_constraints(rte->relid, rt_index, true);
|
||||
|
||||
/*
|
||||
* Adjust the RangeTableEntry indexes in the restrictinfo
|
||||
* clauses because during planning subquery indexes may be
|
||||
* different from the final index after flattening.
|
||||
*/
|
||||
if (rt_index != initial_index)
|
||||
ChangeVarNodes(lfirst(lc_clauses), initial_index, scan->scanrelid, 0);
|
||||
}
|
||||
constraints = lappend(constraints, relation_constraints);
|
||||
}
|
||||
state->initial_constraints = constraints;
|
||||
state->filtered_constraints = constraints;
|
||||
}
|
||||
|
@ -26,20 +26,29 @@ typedef struct ChunkAppendState
|
||||
|
||||
/* list of subplans after planning */
|
||||
List *initial_subplans;
|
||||
/* list of restrictinfo clauses indexed similar to initial_subplans */
|
||||
/* list of constraints indexed like initial_subplans */
|
||||
List *initial_constraints;
|
||||
/* list of restrictinfo clauses indexed like initial_subplans */
|
||||
List *initial_ri_clauses;
|
||||
|
||||
/* list of subplans after startup exclusion */
|
||||
List *filtered_subplans;
|
||||
/* list of relation constraints after startup exclusion */
|
||||
List *filtered_constraints;
|
||||
/* list of restrictinfo clauses after startup exclusion */
|
||||
List *filtered_ri_clauses;
|
||||
|
||||
/* valid subplans for runtime exclusion */
|
||||
Bitmapset *valid_subplans;
|
||||
Bitmapset *params;
|
||||
|
||||
/* sort options if this append is ordered */
|
||||
/* sort options if this append is ordered, only used for EXPLAIN */
|
||||
List *sort_options;
|
||||
|
||||
/* number of loops and exclusions for EXPLAIN */
|
||||
int runtime_number_loops;
|
||||
int runtime_number_exclusions;
|
||||
|
||||
} ChunkAppendState;
|
||||
|
||||
extern Node *chunk_append_state_create(CustomScan *cscan);
|
||||
|
@ -45,12 +45,11 @@ chunk_append_explain(CustomScanState *node, List *ancestors, ExplainState *es)
|
||||
list_length(node->custom_ps),
|
||||
es);
|
||||
|
||||
if (state->runtime_exclusion)
|
||||
ExplainPropertyIntegerCompat("Chunks excluded during runtime",
|
||||
NULL,
|
||||
list_length(state->filtered_subplans) -
|
||||
bms_num_members(state->valid_subplans),
|
||||
es);
|
||||
if (state->runtime_exclusion && state->runtime_number_loops > 0)
|
||||
{
|
||||
int avg_excluded = state->runtime_number_exclusions / state->runtime_number_loops;
|
||||
ExplainPropertyIntegerCompat("Chunks excluded during runtime", NULL, avg_excluded, es);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -101,7 +101,21 @@ chunk_append_plan_create(PlannerInfo *root, RelOptInfo *rel, CustomPath *path, L
|
||||
cscan->custom_scan_tlist = tlist;
|
||||
cscan->scan.plan.targetlist = tlist;
|
||||
|
||||
if (path->path.pathkeys != NIL)
|
||||
if (path->path.pathkeys == NIL)
|
||||
{
|
||||
ListCell *lc_plan, *lc_path;
|
||||
forboth (lc_path, path->custom_paths, lc_plan, custom_plans)
|
||||
{
|
||||
Plan *child_plan = lfirst(lc_plan);
|
||||
Path *child_path = lfirst(lc_path);
|
||||
AppendRelInfo *appinfo = get_appendrelinfo(root, child_path->parent->relid);
|
||||
|
||||
/* push down targetlist to children */
|
||||
child_plan->targetlist =
|
||||
(List *) adjust_appendrel_attrs_compat(root, (Node *) tlist, appinfo);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
/*
|
||||
* If this is an ordered append node we need to ensure the columns
|
||||
@ -226,7 +240,7 @@ chunk_append_plan_create(PlannerInfo *root, RelOptInfo *rel, CustomPath *path, L
|
||||
Assert(list_length(chunk_ri_clauses) == list_length(chunk_rt_indexes));
|
||||
}
|
||||
|
||||
if (root->limit_tuples > 0 && root->limit_tuples <= PG_UINT32_MAX)
|
||||
if (capath->pushdown_limit && root->limit_tuples > 0 && root->limit_tuples <= PG_UINT32_MAX)
|
||||
limit = root->limit_tuples;
|
||||
|
||||
custom_private = list_make1(
|
||||
|
@ -570,7 +570,7 @@ find_children_oids(HypertableRestrictInfo *hri, Hypertable *ht, LOCKMODE lockmod
|
||||
|
||||
static bool
|
||||
should_order_append(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht, List *join_conditions,
|
||||
bool *reverse)
|
||||
int *order_attno, bool *reverse)
|
||||
{
|
||||
/* check if optimizations are enabled */
|
||||
if (ts_guc_disable_optimizations || !ts_guc_enable_ordered_append ||
|
||||
@ -584,7 +584,7 @@ should_order_append(PlannerInfo *root, RelOptInfo *rel, Hypertable *ht, List *jo
|
||||
if (root->parse->sortClause == NIL)
|
||||
return false;
|
||||
|
||||
return ts_ordered_append_should_optimize(root, rel, ht, join_conditions, reverse);
|
||||
return ts_ordered_append_should_optimize(root, rel, ht, join_conditions, order_attno, reverse);
|
||||
}
|
||||
|
||||
bool
|
||||
@ -659,6 +659,8 @@ get_explicit_chunk_oids(CollectQualCtx *ctx, Hypertable *ht)
|
||||
* takes precedence.
|
||||
*
|
||||
* If appends are returned in order appends_ordered on rel->fdw_private is set to true.
|
||||
* To make verifying pathkeys easier in set_rel_pathlist the attno of the column ordered by
|
||||
* is
|
||||
* If the hypertable uses space partitioning the nested oids are stored in nested_oids
|
||||
* on rel->fdw_private when appends are ordered.
|
||||
*/
|
||||
@ -666,6 +668,7 @@ static List *
|
||||
get_chunk_oids(CollectQualCtx *ctx, PlannerInfo *root, RelOptInfo *rel, Hypertable *ht)
|
||||
{
|
||||
bool reverse;
|
||||
int order_attno;
|
||||
|
||||
if (ctx->chunk_exclusion_func == NULL)
|
||||
{
|
||||
@ -678,21 +681,29 @@ get_chunk_oids(CollectQualCtx *ctx, PlannerInfo *root, RelOptInfo *rel, Hypertab
|
||||
*/
|
||||
ts_hypertable_restrict_info_add(hri, root, ctx->restrictions);
|
||||
|
||||
if (should_order_append(root, rel, ht, ctx->join_conditions, &reverse))
|
||||
/*
|
||||
* If fdw_private has not been setup by caller there is no point checking
|
||||
* for ordered append as we can't pass the required metadata in fdw_private
|
||||
* to signal that this is safe to transform in ordered append plan in
|
||||
* set_rel_pathlist.
|
||||
*/
|
||||
if (rel->fdw_private != NULL &&
|
||||
should_order_append(root, rel, ht, ctx->join_conditions, &order_attno, &reverse))
|
||||
{
|
||||
TimescaleDBPrivate *private = (TimescaleDBPrivate *) rel->fdw_private;
|
||||
List **nested_oids = NULL;
|
||||
|
||||
if (rel->fdw_private != NULL)
|
||||
{
|
||||
((TimescaleDBPrivate *) rel->fdw_private)->appends_ordered = true;
|
||||
private
|
||||
->appends_ordered = true;
|
||||
private
|
||||
->order_attno = order_attno;
|
||||
|
||||
/*
|
||||
* for space partitioning we need extra information about the
|
||||
* time slices of the chunks
|
||||
*/
|
||||
if (ht->space->num_dimensions > 1)
|
||||
nested_oids = &((TimescaleDBPrivate *) rel->fdw_private)->nested_oids;
|
||||
}
|
||||
/*
|
||||
* for space partitioning we need extra information about the
|
||||
* time slices of the chunks
|
||||
*/
|
||||
if (ht->space->num_dimensions > 1)
|
||||
nested_oids = &private->nested_oids;
|
||||
|
||||
return ts_hypertable_restrict_info_get_chunk_oids_ordered(hri,
|
||||
ht,
|
||||
|
@ -63,6 +63,7 @@ static planner_hook_type prev_planner_hook;
|
||||
static set_rel_pathlist_hook_type prev_set_rel_pathlist_hook;
|
||||
static get_relation_info_hook_type prev_get_relation_info_hook;
|
||||
static create_upper_paths_hook_type prev_create_upper_paths_hook;
|
||||
static bool contain_param(Node *node);
|
||||
|
||||
#define CTE_NAME_HYPERTABLES "hypertable_parent"
|
||||
|
||||
@ -198,6 +199,65 @@ should_optimize_append(const Path *path)
|
||||
return false;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
should_chunk_append(PlannerInfo *root, RelOptInfo *rel, Path *path, bool ordered, int order_attno)
|
||||
{
|
||||
if (root->parse->commandType != CMD_SELECT || !ts_guc_enable_chunk_append)
|
||||
return false;
|
||||
|
||||
switch (nodeTag(path))
|
||||
{
|
||||
case T_AppendPath:
|
||||
/*
|
||||
* If there are clauses that have mutable functions, or clauses that reference
|
||||
* Params this Path might benefit from startup or runtime exclusion
|
||||
*/
|
||||
{
|
||||
ListCell *lc;
|
||||
|
||||
foreach (lc, rel->baserestrictinfo)
|
||||
{
|
||||
RestrictInfo *rinfo = (RestrictInfo *) lfirst(lc);
|
||||
|
||||
if (contain_mutable_functions((Node *) rinfo->clause) ||
|
||||
contain_param((Node *) rinfo->clause))
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
break;
|
||||
}
|
||||
case T_MergeAppendPath:
|
||||
/*
|
||||
* Can we do ordered append
|
||||
*/
|
||||
{
|
||||
PathKey *pk;
|
||||
ListCell *lc;
|
||||
|
||||
if (!ordered || path->pathkeys == NIL)
|
||||
return false;
|
||||
|
||||
pk = linitial_node(PathKey, path->pathkeys);
|
||||
|
||||
/*
|
||||
* check pathkey is compatible with ordered append ordering
|
||||
* we created when expanding hypertable
|
||||
*/
|
||||
foreach (lc, pk->pk_eclass->ec_members)
|
||||
{
|
||||
EquivalenceMember *em = lfirst(lc);
|
||||
if (!em->em_is_child && IsA(em->em_expr, Var) &&
|
||||
castNode(Var, em->em_expr)->varattno == order_attno)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static inline bool
|
||||
is_append_child(RelOptInfo *rel, RangeTblEntry *rte)
|
||||
{
|
||||
@ -283,6 +343,7 @@ timescaledb_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, Rang
|
||||
{
|
||||
ListCell *lc;
|
||||
bool ordered = false;
|
||||
int order_attno = 0;
|
||||
List *nested_oids = NIL;
|
||||
|
||||
if (rel->fdw_private != NULL)
|
||||
@ -290,6 +351,7 @@ timescaledb_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, Rang
|
||||
TimescaleDBPrivate *private = (TimescaleDBPrivate *) rel->fdw_private;
|
||||
|
||||
ordered = private->appends_ordered;
|
||||
order_attno = private->order_attno;
|
||||
nested_oids = private->nested_oids;
|
||||
}
|
||||
|
||||
@ -300,11 +362,8 @@ timescaledb_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, Rang
|
||||
switch (nodeTag(*pathptr))
|
||||
{
|
||||
case T_AppendPath:
|
||||
if (should_optimize_append(*pathptr))
|
||||
*pathptr = ts_constraint_aware_append_path_create(root, ht, *pathptr);
|
||||
break;
|
||||
case T_MergeAppendPath:
|
||||
if (ordered)
|
||||
if (should_chunk_append(root, rel, *pathptr, ordered, order_attno))
|
||||
*pathptr = ts_chunk_append_path_create(root,
|
||||
rel,
|
||||
ht,
|
||||
@ -327,7 +386,14 @@ timescaledb_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, Rang
|
||||
{
|
||||
case T_AppendPath:
|
||||
case T_MergeAppendPath:
|
||||
if (should_optimize_append(*pathptr))
|
||||
if (should_chunk_append(root, rel, *pathptr, ordered, order_attno))
|
||||
*pathptr = ts_chunk_append_path_create(root,
|
||||
rel,
|
||||
ht,
|
||||
*pathptr,
|
||||
ordered,
|
||||
nested_oids);
|
||||
else if (should_optimize_append(*pathptr))
|
||||
*pathptr = ts_constraint_aware_append_path_create(root, ht, *pathptr);
|
||||
break;
|
||||
default:
|
||||
@ -565,6 +631,24 @@ timescale_create_upper_paths_hook(PlannerInfo *root, UpperRelationKind stage, Re
|
||||
}
|
||||
}
|
||||
|
||||
static bool
|
||||
contain_param_exec_walker(Node *node, void *context)
|
||||
{
|
||||
if (node == NULL)
|
||||
return false;
|
||||
|
||||
if (IsA(node, Param))
|
||||
return true;
|
||||
|
||||
return expression_tree_walker(node, contain_param_exec_walker, context);
|
||||
}
|
||||
|
||||
static bool
|
||||
contain_param(Node *node)
|
||||
{
|
||||
return contain_param_exec_walker(node, NULL);
|
||||
}
|
||||
|
||||
void
|
||||
_planner_init(void)
|
||||
{
|
||||
|
@ -9,6 +9,8 @@
|
||||
typedef struct TimescaleDBPrivate
|
||||
{
|
||||
bool appends_ordered;
|
||||
/* attno of the time dimension in the parent table if appends are ordered */
|
||||
int order_attno;
|
||||
List *nested_oids;
|
||||
} TimescaleDBPrivate;
|
||||
|
||||
|
@ -151,15 +151,13 @@ psql:include/append_query.sql:20: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:20: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:20: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:20: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------
|
||||
Custom Scan (ConstraintAwareAppend) (actual rows=1 loops=1)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 1
|
||||
-> Append (actual rows=1 loops=1)
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 2 mons'::interval))
|
||||
(6 rows)
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------
|
||||
Custom Scan (ChunkAppend) on append_test (actual rows=1 loops=1)
|
||||
Chunks excluded during startup: 2
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 2 mons'::interval))
|
||||
(4 rows)
|
||||
|
||||
-- adding ORDER BY and LIMIT should turn the plan into an optimized
|
||||
-- ordered append plan
|
||||
@ -263,22 +261,20 @@ psql:include/append_query.sql:58: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:58: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:58: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:58: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------
|
||||
Sort (actual rows=1 loops=1)
|
||||
Sort Key: (date_trunc('year'::text, append_test."time")) DESC
|
||||
Sort Method: quicksort Memory: 25kB
|
||||
-> HashAggregate (actual rows=1 loops=1)
|
||||
Group Key: date_trunc('year'::text, append_test."time")
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=3 loops=1)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append (actual rows=3 loops=1)
|
||||
-> Index Scan using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 4 mons'::interval))
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 4 mons'::interval))
|
||||
(13 rows)
|
||||
Group Key: (date_trunc('year'::text, append_test."time"))
|
||||
-> Custom Scan (ChunkAppend) on append_test (actual rows=3 loops=1)
|
||||
Chunks excluded during startup: 1
|
||||
-> Index Scan using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 4 mons'::interval))
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 4 mons'::interval))
|
||||
(11 rows)
|
||||
|
||||
-- querying outside the time range should return nothing. This tests
|
||||
-- that ConstraintAwareAppend can handle the case when an Append node
|
||||
@ -345,26 +341,24 @@ psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------------------------------------------------------------
|
||||
Merge Left Join (actual rows=6 loops=1)
|
||||
Merge Cond: (period.btime = data.btime)
|
||||
CTE data
|
||||
-> HashAggregate (actual rows=3 loops=1)
|
||||
Group Key: time_bucket('@ 30 days'::interval, append_test."time")
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=5 loops=1)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 3
|
||||
-> Append (actual rows=5 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_append_test_time_idx on _hyper_1_1_chunk (actual rows=2 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
Group Key: (time_bucket('@ 30 days'::interval, append_test."time"))
|
||||
-> Custom Scan (ChunkAppend) on append_test (actual rows=5 loops=1)
|
||||
Chunks excluded during startup: 0
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_append_test_time_idx on _hyper_1_1_chunk (actual rows=2 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
CTE period
|
||||
-> Function Scan on generate_series "time" (actual rows=6 loops=1)
|
||||
-> Sort (actual rows=6 loops=1)
|
||||
@ -375,7 +369,7 @@ psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
Sort Key: data.btime
|
||||
Sort Method: quicksort Memory: 25kB
|
||||
-> CTE Scan on data (actual rows=3 loops=1)
|
||||
(28 rows)
|
||||
(26 rows)
|
||||
|
||||
WITH data AS (
|
||||
SELECT time_bucket(INTERVAL '30 day', TIME) AS btime, AVG(temp) AS VALUE
|
||||
@ -435,23 +429,19 @@ psql:include/append_query.sql:126: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:126: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:126: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:126: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------------------------------------------------------
|
||||
Nested Loop (actual rows=1 loops=1)
|
||||
Join Filter: (a.colorid = j.colorid)
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=1 loops=1)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 1
|
||||
-> Append (actual rows=1 loops=1)
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk a_1 (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 3 hours'::interval))
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=1 loops=1)
|
||||
Hypertable: join_test
|
||||
Chunks left after exclusion: 1
|
||||
-> Append (actual rows=1 loops=1)
|
||||
-> Index Scan using _hyper_2_6_chunk_join_test_time_idx on _hyper_2_6_chunk j_1 (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 3 hours'::interval))
|
||||
(14 rows)
|
||||
-> Custom Scan (ChunkAppend) on append_test a (actual rows=1 loops=1)
|
||||
Chunks excluded during startup: 2
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk a_1 (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 3 hours'::interval))
|
||||
-> Custom Scan (ChunkAppend) on join_test j (actual rows=1 loops=1)
|
||||
Chunks excluded during startup: 2
|
||||
-> Index Scan using _hyper_2_6_chunk_join_test_time_idx on _hyper_2_6_chunk j_1 (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 3 hours'::interval))
|
||||
(10 rows)
|
||||
|
||||
reset enable_hashjoin;
|
||||
reset enable_mergejoin;
|
||||
@ -980,25 +970,248 @@ psql:include/append_query.sql:208: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:208: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:208: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:208: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
------------------------------------------------------------------------
|
||||
Sort (actual rows=0 loops=1)
|
||||
Sort Key: append_test."time" DESC
|
||||
Sort Method: quicksort Memory: 25kB
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=0 loops=1)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 3
|
||||
-> Append (actual rows=0 loops=1)
|
||||
-> Sample Scan on _hyper_1_3_chunk (actual rows=0 loops=1)
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
-> Sample Scan on _hyper_1_2_chunk (actual rows=0 loops=1)
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
-> Sample Scan on _hyper_1_1_chunk (actual rows=0 loops=1)
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
(16 rows)
|
||||
-> Custom Scan (ChunkAppend) on append_test (actual rows=0 loops=1)
|
||||
Chunks excluded during startup: 0
|
||||
-> Sample Scan on _hyper_1_3_chunk (actual rows=0 loops=1)
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
-> Sample Scan on _hyper_1_2_chunk (actual rows=0 loops=1)
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
-> Sample Scan on _hyper_1_1_chunk (actual rows=0 loops=1)
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
(14 rows)
|
||||
|
||||
-- test runtime exclusion
|
||||
-- test runtime exclusion with LATERAL and 2 hypertables
|
||||
:PREFIX SELECT m1.time, m2.time FROM metrics_timestamptz m1 LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m2 WHERE m1.time = m2.time LIMIT 1) m2 ON true ORDER BY m1.time;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Nested Loop Left Join (actual rows=745 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=745 loops=1)
|
||||
Order: m1."time"
|
||||
-> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 (actual rows=112 loops=1)
|
||||
Heap Fetches: 112
|
||||
-> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan Backward using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan Backward using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_4 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan Backward using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_5 (actual rows=129 loops=1)
|
||||
Heap Fetches: 129
|
||||
-> Limit (actual rows=1 loops=745)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m2 (actual rows=1 loops=745)
|
||||
Chunks excluded during runtime: 4
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2_1 (actual rows=1 loops=112)
|
||||
Index Cond: ("time" = m1."time")
|
||||
Heap Fetches: 112
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_2 (actual rows=1 loops=168)
|
||||
Index Cond: ("time" = m1."time")
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_3 (actual rows=1 loops=168)
|
||||
Index Cond: ("time" = m1."time")
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_4 (actual rows=1 loops=168)
|
||||
Index Cond: ("time" = m1."time")
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_5 (actual rows=1 loops=129)
|
||||
Index Cond: ("time" = m1."time")
|
||||
Heap Fetches: 129
|
||||
(31 rows)
|
||||
|
||||
-- test runtime exclusion with LATERAL and generate_series
|
||||
:PREFIX SELECT g.time FROM generate_series('2000-01-01'::timestamptz, '2000-02-01'::timestamptz, '1d'::interval) g(time) LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m WHERE m.time=g.time LIMIT 1) m ON true;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------
|
||||
Nested Loop Left Join (actual rows=32 loops=1)
|
||||
-> Function Scan on generate_series g (actual rows=32 loops=1)
|
||||
-> Limit (actual rows=1 loops=32)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m (actual rows=1 loops=32)
|
||||
Chunks excluded during runtime: 4
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m_1 (actual rows=1 loops=5)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 5
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_2 (actual rows=1 loops=7)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 7
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_3 (actual rows=1 loops=7)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 7
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_4 (actual rows=1 loops=7)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 7
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_5 (actual rows=1 loops=6)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 6
|
||||
(20 rows)
|
||||
|
||||
:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time) m ON true;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Merge Join (actual rows=32 loops=1)
|
||||
Merge Cond: (m."time" = g."time")
|
||||
-> Merge Append (actual rows=745 loops=1)
|
||||
Sort Key: m."time"
|
||||
-> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m (actual rows=112 loops=1)
|
||||
Heap Fetches: 112
|
||||
-> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_1 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan Backward using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_2 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan Backward using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_3 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan Backward using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_4 (actual rows=129 loops=1)
|
||||
Heap Fetches: 129
|
||||
-> Sort (actual rows=32 loops=1)
|
||||
Sort Key: g."time"
|
||||
Sort Method: quicksort Memory: 26kB
|
||||
-> Function Scan on generate_series g (actual rows=32 loops=1)
|
||||
(18 rows)
|
||||
|
||||
:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time ORDER BY time) m ON true;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------
|
||||
Nested Loop (actual rows=32 loops=1)
|
||||
-> Function Scan on generate_series g (actual rows=32 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m (actual rows=1 loops=32)
|
||||
Chunks excluded during runtime: 4
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m_1 (actual rows=1 loops=5)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 5
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_2 (actual rows=1 loops=7)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 7
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_3 (actual rows=1 loops=7)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 7
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_4 (actual rows=1 loops=7)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 7
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_5 (actual rows=1 loops=6)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 6
|
||||
(19 rows)
|
||||
|
||||
-- test runtime exclusion with subquery
|
||||
:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE m1.time=(SELECT max(time) FROM metrics_timestamptz);
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=1 loops=1)
|
||||
Chunks excluded during runtime: 4
|
||||
InitPlan 2 (returns $1)
|
||||
-> Result (actual rows=1 loops=1)
|
||||
InitPlan 1 (returns $0)
|
||||
-> Limit (actual rows=1 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz (actual rows=1 loops=1)
|
||||
Order: metrics_timestamptz."time" DESC
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 1
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk (never executed)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk (never executed)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk (never executed)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk (never executed)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 (never executed)
|
||||
Index Cond: ("time" = $1)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 (never executed)
|
||||
Index Cond: ("time" = $1)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 (never executed)
|
||||
Index Cond: ("time" = $1)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_4 (never executed)
|
||||
Index Cond: ("time" = $1)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_5 (actual rows=1 loops=1)
|
||||
Index Cond: ("time" = $1)
|
||||
Heap Fetches: 1
|
||||
(38 rows)
|
||||
|
||||
-- test runtime exclusion with correlated subquery
|
||||
:PREFIX SELECT m1.time, (SELECT m2.time FROM metrics_timestamptz m2 WHERE m2.time < m1.time ORDER BY m2.time DESC LIMIT 1) FROM metrics_timestamptz m1 WHERE m1.time < '2000-01-10' ORDER BY m1.time;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=216 loops=1)
|
||||
Order: m1."time"
|
||||
-> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 (actual rows=112 loops=1)
|
||||
Index Cond: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)
|
||||
Heap Fetches: 112
|
||||
SubPlan 1
|
||||
-> Limit (actual rows=1 loops=216)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m2 (actual rows=1 loops=216)
|
||||
Order: m2."time" DESC
|
||||
Chunks excluded during runtime: 3
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_1 (never executed)
|
||||
Index Cond: ("time" < m1_1."time")
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_2 (never executed)
|
||||
Index Cond: ("time" < m1_1."time")
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_3 (never executed)
|
||||
Index Cond: ("time" < m1_1."time")
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_4 (actual rows=1 loops=103)
|
||||
Index Cond: ("time" < m1_1."time")
|
||||
Heap Fetches: 103
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2_5 (actual rows=1 loops=113)
|
||||
Index Cond: ("time" < m1_1."time")
|
||||
Heap Fetches: 112
|
||||
-> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 (actual rows=104 loops=1)
|
||||
Index Cond: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)
|
||||
Heap Fetches: 104
|
||||
(28 rows)
|
||||
|
||||
-- test EXISTS
|
||||
:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE EXISTS(SELECT 1 FROM metrics_timestamptz m2 WHERE m1.time < m2.time) ORDER BY m1.time DESC limit 1000;
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Limit (actual rows=744 loops=1)
|
||||
-> Nested Loop Semi Join (actual rows=744 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=745 loops=1)
|
||||
Order: m1."time" DESC
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_1 (actual rows=129 loops=1)
|
||||
Heap Fetches: 129
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_2 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_4 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_5 (actual rows=112 loops=1)
|
||||
Heap Fetches: 112
|
||||
-> Append (actual rows=1 loops=745)
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2 (actual rows=0 loops=745)
|
||||
Index Cond: ("time" > m1."time")
|
||||
Heap Fetches: 111
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_1 (actual rows=0 loops=634)
|
||||
Index Cond: ("time" > m1."time")
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_2 (actual rows=0 loops=466)
|
||||
Index Cond: ("time" > m1."time")
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_3 (actual rows=1 loops=298)
|
||||
Index Cond: ("time" > m1."time")
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_4 (actual rows=1 loops=130)
|
||||
Index Cond: ("time" > m1."time")
|
||||
Heap Fetches: 129
|
||||
(30 rows)
|
||||
|
||||
--generate the results into two different files
|
||||
\set ECHO errors
|
||||
|
@ -151,15 +151,13 @@ psql:include/append_query.sql:20: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:20: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:20: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:20: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------
|
||||
Custom Scan (ConstraintAwareAppend) (actual rows=1 loops=1)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 1
|
||||
-> Append (actual rows=1 loops=1)
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 2 mons'::interval))
|
||||
(6 rows)
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------
|
||||
Custom Scan (ChunkAppend) on append_test (actual rows=1 loops=1)
|
||||
Chunks excluded during startup: 2
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 2 mons'::interval))
|
||||
(4 rows)
|
||||
|
||||
-- adding ORDER BY and LIMIT should turn the plan into an optimized
|
||||
-- ordered append plan
|
||||
@ -263,22 +261,20 @@ psql:include/append_query.sql:58: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:58: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:58: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:58: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------
|
||||
Sort (actual rows=1 loops=1)
|
||||
Sort Key: (date_trunc('year'::text, append_test."time")) DESC
|
||||
Sort Method: quicksort Memory: 25kB
|
||||
-> HashAggregate (actual rows=1 loops=1)
|
||||
Group Key: date_trunc('year'::text, append_test."time")
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=3 loops=1)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append (actual rows=3 loops=1)
|
||||
-> Index Scan using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 4 mons'::interval))
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 4 mons'::interval))
|
||||
(13 rows)
|
||||
-> Custom Scan (ChunkAppend) on append_test (actual rows=3 loops=1)
|
||||
Chunks excluded during startup: 1
|
||||
-> Index Scan using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 4 mons'::interval))
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 4 mons'::interval))
|
||||
(11 rows)
|
||||
|
||||
-- querying outside the time range should return nothing. This tests
|
||||
-- that ConstraintAwareAppend can handle the case when an Append node
|
||||
@ -345,26 +341,24 @@ psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------------------------------------------------------------
|
||||
Merge Left Join (actual rows=6 loops=1)
|
||||
Merge Cond: (period.btime = data.btime)
|
||||
CTE data
|
||||
-> HashAggregate (actual rows=3 loops=1)
|
||||
Group Key: time_bucket('@ 30 days'::interval, append_test."time")
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=5 loops=1)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 3
|
||||
-> Append (actual rows=5 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_append_test_time_idx on _hyper_1_1_chunk (actual rows=2 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
-> Custom Scan (ChunkAppend) on append_test (actual rows=5 loops=1)
|
||||
Chunks excluded during startup: 0
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_append_test_time_idx on _hyper_1_1_chunk (actual rows=2 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk (actual rows=2 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
CTE period
|
||||
-> Function Scan on generate_series "time" (actual rows=6 loops=1)
|
||||
-> Sort (actual rows=6 loops=1)
|
||||
@ -375,7 +369,7 @@ psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
Sort Key: data.btime
|
||||
Sort Method: quicksort Memory: 25kB
|
||||
-> CTE Scan on data (actual rows=3 loops=1)
|
||||
(28 rows)
|
||||
(26 rows)
|
||||
|
||||
WITH data AS (
|
||||
SELECT time_bucket(INTERVAL '30 day', TIME) AS btime, AVG(temp) AS VALUE
|
||||
@ -435,23 +429,19 @@ psql:include/append_query.sql:126: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:126: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:126: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:126: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------------------------------------------------------
|
||||
Nested Loop (actual rows=1 loops=1)
|
||||
Join Filter: (a.colorid = j.colorid)
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=1 loops=1)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 1
|
||||
-> Append (actual rows=1 loops=1)
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk a_1 (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 3 hours'::interval))
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=1 loops=1)
|
||||
Hypertable: join_test
|
||||
Chunks left after exclusion: 1
|
||||
-> Append (actual rows=1 loops=1)
|
||||
-> Index Scan using _hyper_2_6_chunk_join_test_time_idx on _hyper_2_6_chunk j_1 (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 3 hours'::interval))
|
||||
(14 rows)
|
||||
-> Custom Scan (ChunkAppend) on append_test a (actual rows=1 loops=1)
|
||||
Chunks excluded during startup: 2
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk a_1 (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 3 hours'::interval))
|
||||
-> Custom Scan (ChunkAppend) on join_test j (actual rows=1 loops=1)
|
||||
Chunks excluded during startup: 2
|
||||
-> Index Scan using _hyper_2_6_chunk_join_test_time_idx on _hyper_2_6_chunk j_1 (actual rows=1 loops=1)
|
||||
Index Cond: ("time" > (now_s() - '@ 3 hours'::interval))
|
||||
(10 rows)
|
||||
|
||||
reset enable_hashjoin;
|
||||
reset enable_mergejoin;
|
||||
@ -980,25 +970,248 @@ psql:include/append_query.sql:208: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:208: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:208: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:208: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
------------------------------------------------------------------------
|
||||
Sort (actual rows=0 loops=1)
|
||||
Sort Key: append_test."time" DESC
|
||||
Sort Method: quicksort Memory: 25kB
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=0 loops=1)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 3
|
||||
-> Append (actual rows=0 loops=1)
|
||||
-> Sample Scan on _hyper_1_3_chunk (actual rows=0 loops=1)
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
-> Sample Scan on _hyper_1_2_chunk (actual rows=0 loops=1)
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
-> Sample Scan on _hyper_1_1_chunk (actual rows=0 loops=1)
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
(16 rows)
|
||||
-> Custom Scan (ChunkAppend) on append_test (actual rows=0 loops=1)
|
||||
Chunks excluded during startup: 0
|
||||
-> Sample Scan on _hyper_1_3_chunk (actual rows=0 loops=1)
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
-> Sample Scan on _hyper_1_2_chunk (actual rows=0 loops=1)
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
-> Sample Scan on _hyper_1_1_chunk (actual rows=0 loops=1)
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
(14 rows)
|
||||
|
||||
-- test runtime exclusion
|
||||
-- test runtime exclusion with LATERAL and 2 hypertables
|
||||
:PREFIX SELECT m1.time, m2.time FROM metrics_timestamptz m1 LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m2 WHERE m1.time = m2.time LIMIT 1) m2 ON true ORDER BY m1.time;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Nested Loop Left Join (actual rows=745 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=745 loops=1)
|
||||
Order: m1."time"
|
||||
-> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 (actual rows=112 loops=1)
|
||||
Heap Fetches: 112
|
||||
-> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan Backward using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan Backward using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_4 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan Backward using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_5 (actual rows=129 loops=1)
|
||||
Heap Fetches: 129
|
||||
-> Limit (actual rows=1 loops=745)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m2 (actual rows=1 loops=745)
|
||||
Chunks excluded during runtime: 4
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2_1 (actual rows=1 loops=112)
|
||||
Index Cond: ("time" = m1."time")
|
||||
Heap Fetches: 112
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_2 (actual rows=1 loops=168)
|
||||
Index Cond: ("time" = m1."time")
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_3 (actual rows=1 loops=168)
|
||||
Index Cond: ("time" = m1."time")
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_4 (actual rows=1 loops=168)
|
||||
Index Cond: ("time" = m1."time")
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_5 (actual rows=1 loops=129)
|
||||
Index Cond: ("time" = m1."time")
|
||||
Heap Fetches: 129
|
||||
(31 rows)
|
||||
|
||||
-- test runtime exclusion with LATERAL and generate_series
|
||||
:PREFIX SELECT g.time FROM generate_series('2000-01-01'::timestamptz, '2000-02-01'::timestamptz, '1d'::interval) g(time) LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m WHERE m.time=g.time LIMIT 1) m ON true;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------
|
||||
Nested Loop Left Join (actual rows=32 loops=1)
|
||||
-> Function Scan on generate_series g (actual rows=32 loops=1)
|
||||
-> Limit (actual rows=1 loops=32)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m (actual rows=1 loops=32)
|
||||
Chunks excluded during runtime: 4
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m_1 (actual rows=1 loops=5)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 5
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_2 (actual rows=1 loops=7)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 7
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_3 (actual rows=1 loops=7)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 7
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_4 (actual rows=1 loops=7)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 7
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_5 (actual rows=1 loops=6)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 6
|
||||
(20 rows)
|
||||
|
||||
:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time) m ON true;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Merge Join (actual rows=32 loops=1)
|
||||
Merge Cond: (m."time" = g."time")
|
||||
-> Merge Append (actual rows=745 loops=1)
|
||||
Sort Key: m."time"
|
||||
-> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m (actual rows=112 loops=1)
|
||||
Heap Fetches: 112
|
||||
-> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_1 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan Backward using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_2 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan Backward using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_3 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan Backward using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_4 (actual rows=129 loops=1)
|
||||
Heap Fetches: 129
|
||||
-> Sort (actual rows=32 loops=1)
|
||||
Sort Key: g."time"
|
||||
Sort Method: quicksort Memory: 26kB
|
||||
-> Function Scan on generate_series g (actual rows=32 loops=1)
|
||||
(18 rows)
|
||||
|
||||
:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time ORDER BY time) m ON true;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------
|
||||
Nested Loop (actual rows=32 loops=1)
|
||||
-> Function Scan on generate_series g (actual rows=32 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m (actual rows=1 loops=32)
|
||||
Chunks excluded during runtime: 4
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m_1 (actual rows=1 loops=5)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 5
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_2 (actual rows=1 loops=7)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 7
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_3 (actual rows=1 loops=7)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 7
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_4 (actual rows=1 loops=7)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 7
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_5 (actual rows=1 loops=6)
|
||||
Index Cond: ("time" = g."time")
|
||||
Heap Fetches: 6
|
||||
(19 rows)
|
||||
|
||||
-- test runtime exclusion with subquery
|
||||
:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE m1.time=(SELECT max(time) FROM metrics_timestamptz);
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=1 loops=1)
|
||||
Chunks excluded during runtime: 4
|
||||
InitPlan 2 (returns $1)
|
||||
-> Result (actual rows=1 loops=1)
|
||||
InitPlan 1 (returns $0)
|
||||
-> Limit (actual rows=1 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz (actual rows=1 loops=1)
|
||||
Order: metrics_timestamptz."time" DESC
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk (actual rows=1 loops=1)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 1
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk (never executed)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk (never executed)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk (never executed)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk (never executed)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 (never executed)
|
||||
Index Cond: ("time" = $1)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 (never executed)
|
||||
Index Cond: ("time" = $1)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 (never executed)
|
||||
Index Cond: ("time" = $1)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_4 (never executed)
|
||||
Index Cond: ("time" = $1)
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_5 (actual rows=1 loops=1)
|
||||
Index Cond: ("time" = $1)
|
||||
Heap Fetches: 1
|
||||
(38 rows)
|
||||
|
||||
-- test runtime exclusion with correlated subquery
|
||||
:PREFIX SELECT m1.time, (SELECT m2.time FROM metrics_timestamptz m2 WHERE m2.time < m1.time ORDER BY m2.time DESC LIMIT 1) FROM metrics_timestamptz m1 WHERE m1.time < '2000-01-10' ORDER BY m1.time;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=216 loops=1)
|
||||
Order: m1."time"
|
||||
-> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1 (actual rows=112 loops=1)
|
||||
Index Cond: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)
|
||||
Heap Fetches: 112
|
||||
-> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2 (actual rows=104 loops=1)
|
||||
Index Cond: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)
|
||||
Heap Fetches: 104
|
||||
SubPlan 1
|
||||
-> Limit (actual rows=1 loops=216)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m2 (actual rows=1 loops=216)
|
||||
Order: m2."time" DESC
|
||||
Chunks excluded during runtime: 3
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_1 (never executed)
|
||||
Index Cond: ("time" < m1."time")
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_2 (never executed)
|
||||
Index Cond: ("time" < m1."time")
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_3 (never executed)
|
||||
Index Cond: ("time" < m1."time")
|
||||
Heap Fetches: 0
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_4 (actual rows=1 loops=103)
|
||||
Index Cond: ("time" < m1."time")
|
||||
Heap Fetches: 103
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2_5 (actual rows=1 loops=113)
|
||||
Index Cond: ("time" < m1."time")
|
||||
Heap Fetches: 112
|
||||
(28 rows)
|
||||
|
||||
-- test EXISTS
|
||||
:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE EXISTS(SELECT 1 FROM metrics_timestamptz m2 WHERE m1.time < m2.time) ORDER BY m1.time DESC limit 1000;
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Limit (actual rows=744 loops=1)
|
||||
-> Nested Loop Semi Join (actual rows=744 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m1 (actual rows=745 loops=1)
|
||||
Order: m1."time" DESC
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_1 (actual rows=129 loops=1)
|
||||
Heap Fetches: 129
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_2 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_4 (actual rows=168 loops=1)
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_5 (actual rows=112 loops=1)
|
||||
Heap Fetches: 112
|
||||
-> Append (actual rows=1 loops=745)
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2 (actual rows=0 loops=745)
|
||||
Index Cond: ("time" > m1."time")
|
||||
Heap Fetches: 111
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_1 (actual rows=0 loops=634)
|
||||
Index Cond: ("time" > m1."time")
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_2 (actual rows=0 loops=466)
|
||||
Index Cond: ("time" > m1."time")
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_3 (actual rows=1 loops=298)
|
||||
Index Cond: ("time" > m1."time")
|
||||
Heap Fetches: 168
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_4 (actual rows=1 loops=130)
|
||||
Index Cond: ("time" > m1."time")
|
||||
Heap Fetches: 129
|
||||
(30 rows)
|
||||
|
||||
--generate the results into two different files
|
||||
\set ECHO errors
|
||||
|
@ -150,15 +150,13 @@ psql:include/append_query.sql:20: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:20: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:20: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:20: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------
|
||||
Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 1
|
||||
-> Append
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk
|
||||
Index Cond: ("time" > (now_s() - '@ 2 mons'::interval))
|
||||
(6 rows)
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------
|
||||
Custom Scan (ChunkAppend) on append_test
|
||||
Chunks excluded during startup: 2
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk
|
||||
Index Cond: ("time" > (now_s() - '@ 2 mons'::interval))
|
||||
(4 rows)
|
||||
|
||||
-- adding ORDER BY and LIMIT should turn the plan into an optimized
|
||||
-- ordered append plan
|
||||
@ -251,21 +249,19 @@ psql:include/append_query.sql:58: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:58: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:58: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:58: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------
|
||||
Sort
|
||||
Sort Key: (date_trunc('year'::text, append_test."time")) DESC
|
||||
-> HashAggregate
|
||||
Group Key: date_trunc('year'::text, append_test."time")
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append
|
||||
-> Index Scan using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk
|
||||
Index Cond: ("time" > (now_s() - '@ 4 mons'::interval))
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk
|
||||
Index Cond: ("time" > (now_s() - '@ 4 mons'::interval))
|
||||
(12 rows)
|
||||
Group Key: (date_trunc('year'::text, append_test."time"))
|
||||
-> Custom Scan (ChunkAppend) on append_test
|
||||
Chunks excluded during startup: 1
|
||||
-> Index Scan using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk
|
||||
Index Cond: ("time" > (now_s() - '@ 4 mons'::interval))
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk
|
||||
Index Cond: ("time" > (now_s() - '@ 4 mons'::interval))
|
||||
(10 rows)
|
||||
|
||||
-- querying outside the time range should return nothing. This tests
|
||||
-- that ConstraintAwareAppend can handle the case when an Append node
|
||||
@ -328,26 +324,24 @@ psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------------------------------------
|
||||
Merge Left Join
|
||||
Merge Cond: (period.btime = data.btime)
|
||||
CTE data
|
||||
-> HashAggregate
|
||||
Group Key: time_bucket('@ 30 days'::interval, append_test."time")
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 3
|
||||
-> Append
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_append_test_time_idx on _hyper_1_1_chunk
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
Group Key: (time_bucket('@ 30 days'::interval, append_test."time"))
|
||||
-> Custom Scan (ChunkAppend) on append_test
|
||||
Chunks excluded during startup: 0
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_append_test_time_idx on _hyper_1_1_chunk
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_append_test_time_idx on _hyper_1_2_chunk
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk
|
||||
Index Cond: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
Filter: (colorid > 0)
|
||||
CTE period
|
||||
-> Function Scan on generate_series "time"
|
||||
-> Sort
|
||||
@ -356,7 +350,7 @@ psql:include/append_query.sql:98: NOTICE: Stable function now_s() called!
|
||||
-> Sort
|
||||
Sort Key: data.btime
|
||||
-> CTE Scan on data
|
||||
(26 rows)
|
||||
(24 rows)
|
||||
|
||||
WITH data AS (
|
||||
SELECT time_bucket(INTERVAL '30 day', TIME) AS btime, AVG(temp) AS VALUE
|
||||
@ -414,23 +408,19 @@ psql:include/append_query.sql:126: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:126: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:126: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:126: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------------------------------
|
||||
Nested Loop
|
||||
Join Filter: (a.colorid = j.colorid)
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 1
|
||||
-> Append
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk a_1
|
||||
Index Cond: ("time" > (now_s() - '@ 3 hours'::interval))
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: join_test
|
||||
Chunks left after exclusion: 1
|
||||
-> Append
|
||||
-> Index Scan using _hyper_2_6_chunk_join_test_time_idx on _hyper_2_6_chunk j_1
|
||||
Index Cond: ("time" > (now_s() - '@ 3 hours'::interval))
|
||||
(14 rows)
|
||||
-> Custom Scan (ChunkAppend) on append_test a
|
||||
Chunks excluded during startup: 2
|
||||
-> Index Scan using _hyper_1_3_chunk_append_test_time_idx on _hyper_1_3_chunk a_1
|
||||
Index Cond: ("time" > (now_s() - '@ 3 hours'::interval))
|
||||
-> Custom Scan (ChunkAppend) on join_test j
|
||||
Chunks excluded during startup: 2
|
||||
-> Index Scan using _hyper_2_6_chunk_join_test_time_idx on _hyper_2_6_chunk j_1
|
||||
Index Cond: ("time" > (now_s() - '@ 3 hours'::interval))
|
||||
(10 rows)
|
||||
|
||||
reset enable_hashjoin;
|
||||
reset enable_mergejoin;
|
||||
@ -887,24 +877,189 @@ psql:include/append_query.sql:208: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:208: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:208: NOTICE: Stable function now_s() called!
|
||||
psql:include/append_query.sql:208: NOTICE: Stable function now_s() called!
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------
|
||||
Sort
|
||||
Sort Key: append_test."time" DESC
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: append_test
|
||||
Chunks left after exclusion: 3
|
||||
-> Custom Scan (ChunkAppend) on append_test
|
||||
Chunks excluded during startup: 0
|
||||
-> Sample Scan on _hyper_1_3_chunk
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
-> Sample Scan on _hyper_1_2_chunk
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
-> Sample Scan on _hyper_1_1_chunk
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
(13 rows)
|
||||
|
||||
-- test runtime exclusion
|
||||
-- test runtime exclusion with LATERAL and 2 hypertables
|
||||
:PREFIX SELECT m1.time, m2.time FROM metrics_timestamptz m1 LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m2 WHERE m1.time = m2.time LIMIT 1) m2 ON true ORDER BY m1.time;
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------------------------------------------------
|
||||
Nested Loop Left Join
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m1
|
||||
Order: m1."time"
|
||||
-> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1
|
||||
-> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2
|
||||
-> Index Only Scan Backward using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3
|
||||
-> Index Only Scan Backward using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_4
|
||||
-> Index Only Scan Backward using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_5
|
||||
-> Limit
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m2
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2_1
|
||||
Index Cond: ("time" = m1."time")
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_2
|
||||
Index Cond: ("time" = m1."time")
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_3
|
||||
Index Cond: ("time" = m1."time")
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_4
|
||||
Index Cond: ("time" = m1."time")
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_5
|
||||
Index Cond: ("time" = m1."time")
|
||||
(20 rows)
|
||||
|
||||
-- test runtime exclusion with LATERAL and generate_series
|
||||
:PREFIX SELECT g.time FROM generate_series('2000-01-01'::timestamptz, '2000-02-01'::timestamptz, '1d'::interval) g(time) LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m WHERE m.time=g.time LIMIT 1) m ON true;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------
|
||||
Nested Loop Left Join
|
||||
-> Function Scan on generate_series g
|
||||
-> Limit
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m_1
|
||||
Index Cond: ("time" = g."time")
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_2
|
||||
Index Cond: ("time" = g."time")
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_3
|
||||
Index Cond: ("time" = g."time")
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_4
|
||||
Index Cond: ("time" = g."time")
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_5
|
||||
Index Cond: ("time" = g."time")
|
||||
(14 rows)
|
||||
|
||||
:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time) m ON true;
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------------------------------------------------------
|
||||
Merge Join
|
||||
Merge Cond: (m."time" = g."time")
|
||||
-> Merge Append
|
||||
Sort Key: m."time"
|
||||
-> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m
|
||||
-> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_1
|
||||
-> Index Only Scan Backward using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_2
|
||||
-> Index Only Scan Backward using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_3
|
||||
-> Index Only Scan Backward using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_4
|
||||
-> Sort
|
||||
Sort Key: g."time"
|
||||
-> Function Scan on generate_series g
|
||||
(12 rows)
|
||||
|
||||
:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time ORDER BY time) m ON true;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------
|
||||
Nested Loop
|
||||
-> Function Scan on generate_series g
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m_1
|
||||
Index Cond: ("time" = g."time")
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m_2
|
||||
Index Cond: ("time" = g."time")
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m_3
|
||||
Index Cond: ("time" = g."time")
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m_4
|
||||
Index Cond: ("time" = g."time")
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m_5
|
||||
Index Cond: ("time" = g."time")
|
||||
(13 rows)
|
||||
|
||||
-- test runtime exclusion with subquery
|
||||
:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE m1.time=(SELECT max(time) FROM metrics_timestamptz);
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------
|
||||
Custom Scan (ChunkAppend) on metrics_timestamptz m1
|
||||
InitPlan 2 (returns $1)
|
||||
-> Result
|
||||
InitPlan 1 (returns $0)
|
||||
-> Limit
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz
|
||||
Order: metrics_timestamptz."time" DESC
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1
|
||||
Index Cond: ("time" = $1)
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2
|
||||
Index Cond: ("time" = $1)
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3
|
||||
Index Cond: ("time" = $1)
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_4
|
||||
Index Cond: ("time" = $1)
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_5
|
||||
Index Cond: ("time" = $1)
|
||||
(27 rows)
|
||||
|
||||
-- test runtime exclusion with correlated subquery
|
||||
:PREFIX SELECT m1.time, (SELECT m2.time FROM metrics_timestamptz m2 WHERE m2.time < m1.time ORDER BY m2.time DESC LIMIT 1) FROM metrics_timestamptz m1 WHERE m1.time < '2000-01-10' ORDER BY m1.time;
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------------------------------------------------------------
|
||||
Custom Scan (ChunkAppend) on metrics_timestamptz m1
|
||||
Order: m1."time"
|
||||
-> Index Only Scan Backward using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_1
|
||||
Index Cond: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)
|
||||
SubPlan 1
|
||||
-> Limit
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m2
|
||||
Order: m2."time" DESC
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_1
|
||||
Index Cond: ("time" < m1_1."time")
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_2
|
||||
Index Cond: ("time" < m1_1."time")
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_3
|
||||
Index Cond: ("time" < m1_1."time")
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_4
|
||||
Index Cond: ("time" < m1_1."time")
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2_5
|
||||
Index Cond: ("time" < m1_1."time")
|
||||
-> Index Only Scan Backward using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_2
|
||||
Index Cond: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)
|
||||
(20 rows)
|
||||
|
||||
-- test EXISTS
|
||||
:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE EXISTS(SELECT 1 FROM metrics_timestamptz m2 WHERE m1.time < m2.time) ORDER BY m1.time DESC limit 1000;
|
||||
QUERY PLAN
|
||||
------------------------------------------------------------------------------------------------------------------
|
||||
Limit
|
||||
-> Nested Loop Semi Join
|
||||
-> Custom Scan (ChunkAppend) on metrics_timestamptz m1
|
||||
Order: m1."time" DESC
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m1_1
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m1_2
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m1_3
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m1_4
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m1_5
|
||||
-> Append
|
||||
-> Sample Scan on _hyper_1_3_chunk
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
-> Sample Scan on _hyper_1_2_chunk
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
-> Sample Scan on _hyper_1_1_chunk
|
||||
Sampling: system ('1'::real)
|
||||
Filter: ("time" > (now_s() - '@ 400 days'::interval))
|
||||
(15 rows)
|
||||
-> Index Only Scan using _hyper_5_17_chunk_metrics_timestamptz_time_idx on _hyper_5_17_chunk m2
|
||||
Index Cond: ("time" > m1."time")
|
||||
-> Index Only Scan using _hyper_5_18_chunk_metrics_timestamptz_time_idx on _hyper_5_18_chunk m2_1
|
||||
Index Cond: ("time" > m1."time")
|
||||
-> Index Only Scan using _hyper_5_19_chunk_metrics_timestamptz_time_idx on _hyper_5_19_chunk m2_2
|
||||
Index Cond: ("time" > m1."time")
|
||||
-> Index Only Scan using _hyper_5_20_chunk_metrics_timestamptz_time_idx on _hyper_5_20_chunk m2_3
|
||||
Index Cond: ("time" > m1."time")
|
||||
-> Index Only Scan using _hyper_5_21_chunk_metrics_timestamptz_time_idx on _hyper_5_21_chunk m2_4
|
||||
Index Cond: ("time" > m1."time")
|
||||
(20 rows)
|
||||
|
||||
--generate the results into two different files
|
||||
\set ECHO errors
|
||||
|
@ -78,38 +78,34 @@ $BODY$;
|
||||
EXPLAIN (costs off)
|
||||
SELECT FROM "two_Partitions"
|
||||
WHERE series_1 IN (SELECT series_1 FROM "two_Partitions" WHERE series_1 > series_val());
|
||||
QUERY PLAN
|
||||
------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Hash Join
|
||||
Hash Cond: ("two_Partitions".series_1 = "two_Partitions_1".series_1)
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: two_Partitions
|
||||
Chunks left after exclusion: 4
|
||||
-> Append
|
||||
-> Index Only Scan using "_hyper_1_1_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_1_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_2_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_2_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_3_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_3_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_4_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_4_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Custom Scan (ChunkAppend) on "two_Partitions"
|
||||
Chunks excluded during startup: 0
|
||||
-> Index Only Scan using "_hyper_1_1_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_1_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_2_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_2_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_3_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_3_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_4_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_4_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Hash
|
||||
-> HashAggregate
|
||||
Group Key: "two_Partitions_1".series_1
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: two_Partitions
|
||||
Chunks left after exclusion: 4
|
||||
-> Append
|
||||
-> Index Only Scan using "_hyper_1_1_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_1_chunk _hyper_1_1_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_2_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_2_chunk _hyper_1_2_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_3_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_3_chunk _hyper_1_3_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_4_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_4_chunk _hyper_1_4_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
(29 rows)
|
||||
-> Custom Scan (ChunkAppend) on "two_Partitions" "two_Partitions_1"
|
||||
Chunks excluded during startup: 0
|
||||
-> Index Only Scan using "_hyper_1_1_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_1_chunk _hyper_1_1_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_2_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_2_chunk _hyper_1_2_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_3_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_3_chunk _hyper_1_3_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_4_chunk_two_Partitions_timeCustom_series_1_idx" on _hyper_1_4_chunk _hyper_1_4_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
(25 rows)
|
||||
|
||||
-- ConstraintAwareAppend NOT applied for DELETE
|
||||
EXPLAIN (costs off)
|
||||
|
@ -180,13 +180,15 @@ SELECT histogram(i, 10, 100000, 5) FROM "test";
|
||||
Single Copy: true
|
||||
-> Result (actual rows=1000000 loops=1)
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append (actual rows=1000000 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1)
|
||||
Chunks excluded during startup: 0
|
||||
-> Result (actual rows=500000 loops=1)
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
-> Result (actual rows=500000 loops=1)
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
(12 rows)
|
||||
(14 rows)
|
||||
|
||||
-- test constraint aware append with parallel aggregation
|
||||
SET max_parallel_workers_per_gather = 1;
|
||||
@ -199,13 +201,15 @@ EXPLAIN (costs off) SELECT count(*) FROM "test" WHERE length(version()) > 0;
|
||||
-> Partial Aggregate
|
||||
-> Result
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append
|
||||
-> Custom Scan (ChunkAppend) on test
|
||||
Chunks excluded during startup: 0
|
||||
-> Result
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Parallel Seq Scan on _hyper_1_1_chunk
|
||||
-> Result
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Parallel Seq Scan on _hyper_1_2_chunk
|
||||
(12 rows)
|
||||
(14 rows)
|
||||
|
||||
SELECT count(*) FROM "test" WHERE length(version()) > 0;
|
||||
count
|
||||
@ -218,50 +222,44 @@ SET max_parallel_workers_per_gather = 4;
|
||||
-- in a query will prevent parallelism but CURRENT_TIMESTAMP and
|
||||
-- transaction_timestamp() are marked parallel safe
|
||||
:PREFIX SELECT i FROM "test" WHERE ts < CURRENT_TIMESTAMP;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------
|
||||
Gather (actual rows=1000000 loops=1)
|
||||
Workers Planned: 1
|
||||
Workers Launched: 1
|
||||
Single Copy: true
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append (actual rows=1000000 loops=1)
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < CURRENT_TIMESTAMP)
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < CURRENT_TIMESTAMP)
|
||||
(12 rows)
|
||||
-> Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1)
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < CURRENT_TIMESTAMP)
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < CURRENT_TIMESTAMP)
|
||||
(10 rows)
|
||||
|
||||
:PREFIX SELECT i FROM "test" WHERE ts < transaction_timestamp();
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------
|
||||
Gather (actual rows=1000000 loops=1)
|
||||
Workers Planned: 1
|
||||
Workers Launched: 1
|
||||
Single Copy: true
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append (actual rows=1000000 loops=1)
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < transaction_timestamp())
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < transaction_timestamp())
|
||||
(12 rows)
|
||||
-> Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1)
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < transaction_timestamp())
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < transaction_timestamp())
|
||||
(10 rows)
|
||||
|
||||
-- this won't be parallel query because now() is parallel restricted in PG < 12
|
||||
:PREFIX SELECT i FROM "test" WHERE ts < now();
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------
|
||||
Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append (actual rows=1000000 loops=1)
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < now())
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < now())
|
||||
(8 rows)
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------
|
||||
Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1)
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < now())
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < now())
|
||||
(6 rows)
|
||||
|
||||
|
@ -179,13 +179,15 @@ SELECT histogram(i, 10, 100000, 5) FROM "test";
|
||||
Single Copy: true
|
||||
-> Result (actual rows=1000000 loops=1)
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append (actual rows=1000000 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1)
|
||||
Chunks excluded during startup: 0
|
||||
-> Result (actual rows=500000 loops=1)
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
-> Result (actual rows=500000 loops=1)
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
(12 rows)
|
||||
(14 rows)
|
||||
|
||||
-- test constraint aware append with parallel aggregation
|
||||
SET max_parallel_workers_per_gather = 1;
|
||||
@ -198,13 +200,15 @@ EXPLAIN (costs off) SELECT count(*) FROM "test" WHERE length(version()) > 0;
|
||||
-> Partial Aggregate
|
||||
-> Result
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Parallel Append
|
||||
-> Custom Scan (ChunkAppend) on test
|
||||
Chunks excluded during startup: 0
|
||||
-> Result
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Parallel Seq Scan on _hyper_1_1_chunk
|
||||
-> Result
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Parallel Seq Scan on _hyper_1_2_chunk
|
||||
(12 rows)
|
||||
(14 rows)
|
||||
|
||||
SELECT count(*) FROM "test" WHERE length(version()) > 0;
|
||||
count
|
||||
@ -217,50 +221,44 @@ SET max_parallel_workers_per_gather = 4;
|
||||
-- in a query will prevent parallelism but CURRENT_TIMESTAMP and
|
||||
-- transaction_timestamp() are marked parallel safe
|
||||
:PREFIX SELECT i FROM "test" WHERE ts < CURRENT_TIMESTAMP;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------
|
||||
Gather (actual rows=1000000 loops=1)
|
||||
Workers Planned: 1
|
||||
Workers Launched: 1
|
||||
Single Copy: true
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append (actual rows=1000000 loops=1)
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < CURRENT_TIMESTAMP)
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < CURRENT_TIMESTAMP)
|
||||
(12 rows)
|
||||
-> Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1)
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < CURRENT_TIMESTAMP)
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < CURRENT_TIMESTAMP)
|
||||
(10 rows)
|
||||
|
||||
:PREFIX SELECT i FROM "test" WHERE ts < transaction_timestamp();
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------
|
||||
Gather (actual rows=1000000 loops=1)
|
||||
Workers Planned: 1
|
||||
Workers Launched: 1
|
||||
Single Copy: true
|
||||
-> Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append (actual rows=1000000 loops=1)
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < transaction_timestamp())
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < transaction_timestamp())
|
||||
(12 rows)
|
||||
-> Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1)
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < transaction_timestamp())
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < transaction_timestamp())
|
||||
(10 rows)
|
||||
|
||||
-- this won't be parallel query because now() is parallel restricted in PG < 12
|
||||
:PREFIX SELECT i FROM "test" WHERE ts < now();
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------
|
||||
Custom Scan (ConstraintAwareAppend) (actual rows=1000000 loops=1)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append (actual rows=1000000 loops=1)
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < now())
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < now())
|
||||
(8 rows)
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------
|
||||
Custom Scan (ChunkAppend) on test (actual rows=1000000 loops=1)
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_1_1_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < now())
|
||||
-> Seq Scan on _hyper_1_2_chunk (actual rows=500000 loops=1)
|
||||
Filter: (ts < now())
|
||||
(6 rows)
|
||||
|
||||
|
@ -172,20 +172,22 @@ SELECT histogram(i, 10, 100000, 5) FROM "test";
|
||||
|
||||
-- test constraint aware append
|
||||
:PREFIX SELECT i FROM "test" WHERE length(version()) > 0;
|
||||
QUERY PLAN
|
||||
------------------------------------------------------
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------
|
||||
Gather
|
||||
Workers Planned: 1
|
||||
Single Copy: true
|
||||
-> Result
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append
|
||||
-> Custom Scan (ChunkAppend) on test
|
||||
Chunks excluded during startup: 0
|
||||
-> Result
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Seq Scan on _hyper_1_1_chunk
|
||||
-> Result
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Seq Scan on _hyper_1_2_chunk
|
||||
(11 rows)
|
||||
(13 rows)
|
||||
|
||||
-- test constraint aware append with parallel aggregation
|
||||
SET max_parallel_workers_per_gather = 1;
|
||||
@ -198,13 +200,15 @@ EXPLAIN (costs off) SELECT count(*) FROM "test" WHERE length(version()) > 0;
|
||||
-> Partial Aggregate
|
||||
-> Result
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append
|
||||
-> Custom Scan (ChunkAppend) on test
|
||||
Chunks excluded during startup: 0
|
||||
-> Result
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Parallel Seq Scan on _hyper_1_1_chunk
|
||||
-> Result
|
||||
One-Time Filter: (length(version()) > 0)
|
||||
-> Parallel Seq Scan on _hyper_1_2_chunk
|
||||
(12 rows)
|
||||
(14 rows)
|
||||
|
||||
SELECT count(*) FROM "test" WHERE length(version()) > 0;
|
||||
count
|
||||
@ -217,45 +221,39 @@ SET max_parallel_workers_per_gather = 4;
|
||||
-- in a query will prevent parallelism but CURRENT_TIMESTAMP and
|
||||
-- transaction_timestamp() are marked parallel safe
|
||||
:PREFIX SELECT i FROM "test" WHERE ts < CURRENT_TIMESTAMP;
|
||||
QUERY PLAN
|
||||
------------------------------------------
|
||||
Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append
|
||||
-> Seq Scan on _hyper_1_1_chunk
|
||||
Filter: (ts < now())
|
||||
-> Seq Scan on _hyper_1_2_chunk
|
||||
Filter: (ts < now())
|
||||
(8 rows)
|
||||
QUERY PLAN
|
||||
-------------------------------------
|
||||
Custom Scan (ChunkAppend) on test
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_1_1_chunk
|
||||
Filter: (ts < now())
|
||||
-> Seq Scan on _hyper_1_2_chunk
|
||||
Filter: (ts < now())
|
||||
(6 rows)
|
||||
|
||||
:PREFIX SELECT i FROM "test" WHERE ts < transaction_timestamp();
|
||||
QUERY PLAN
|
||||
------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
------------------------------------------------------
|
||||
Gather
|
||||
Workers Planned: 1
|
||||
Single Copy: true
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append
|
||||
-> Seq Scan on _hyper_1_1_chunk
|
||||
Filter: (ts < transaction_timestamp())
|
||||
-> Seq Scan on _hyper_1_2_chunk
|
||||
Filter: (ts < transaction_timestamp())
|
||||
(11 rows)
|
||||
-> Custom Scan (ChunkAppend) on test
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_1_1_chunk
|
||||
Filter: (ts < transaction_timestamp())
|
||||
-> Seq Scan on _hyper_1_2_chunk
|
||||
Filter: (ts < transaction_timestamp())
|
||||
(9 rows)
|
||||
|
||||
-- this won't be parallel query because now() is parallel restricted in PG < 12
|
||||
:PREFIX SELECT i FROM "test" WHERE ts < now();
|
||||
QUERY PLAN
|
||||
------------------------------------------
|
||||
Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: test
|
||||
Chunks left after exclusion: 2
|
||||
-> Append
|
||||
-> Seq Scan on _hyper_1_1_chunk
|
||||
Filter: (ts < now())
|
||||
-> Seq Scan on _hyper_1_2_chunk
|
||||
Filter: (ts < now())
|
||||
(8 rows)
|
||||
QUERY PLAN
|
||||
-------------------------------------
|
||||
Custom Scan (ChunkAppend) on test
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_1_1_chunk
|
||||
Filter: (ts < now())
|
||||
-> Seq Scan on _hyper_1_2_chunk
|
||||
Filter: (ts < now())
|
||||
(6 rows)
|
||||
|
||||
|
@ -764,73 +764,38 @@ SELECT * FROM cte ORDER BY value;
|
||||
QUERY PLAN
|
||||
------------------------------------------------------------------------------------------
|
||||
Sort
|
||||
Sort Key: _hyper_3_116_chunk.value
|
||||
-> Append
|
||||
Sort Key: hyper_ts.value
|
||||
-> Custom Scan (ChunkAppend) on hyper_ts
|
||||
Chunks excluded during startup: 6
|
||||
-> Seq Scan on _hyper_3_116_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_117_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_118_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_119_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_120_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_121_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_122_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_123_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
(19 rows)
|
||||
(8 rows)
|
||||
|
||||
:PREFIX SELECT * FROM hyper_ts WHERE time < ('Wed Dec 31 16:00:10 1969'::timestamp::timestamptz) ORDER BY value;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------
|
||||
Sort
|
||||
Sort Key: _hyper_3_116_chunk.value
|
||||
-> Append
|
||||
Sort Key: hyper_ts.value
|
||||
-> Custom Scan (ChunkAppend) on hyper_ts
|
||||
Chunks excluded during startup: 6
|
||||
-> Seq Scan on _hyper_3_116_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_117_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_118_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_119_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_120_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_121_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_122_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_123_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
(19 rows)
|
||||
(8 rows)
|
||||
|
||||
:PREFIX SELECT * FROM hyper_ts WHERE NOW() < time ORDER BY value;
|
||||
QUERY PLAN
|
||||
--------------------------------------------
|
||||
QUERY PLAN
|
||||
---------------------------------------------
|
||||
Sort
|
||||
Sort Key: _hyper_3_116_chunk.value
|
||||
-> Append
|
||||
-> Seq Scan on _hyper_3_116_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_117_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_118_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_119_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_120_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_121_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_122_chunk
|
||||
Filter: (now() < "time")
|
||||
Sort Key: hyper_ts.value
|
||||
-> Custom Scan (ChunkAppend) on hyper_ts
|
||||
Chunks excluded during startup: 7
|
||||
-> Seq Scan on _hyper_3_123_chunk
|
||||
Filter: (now() < "time")
|
||||
(19 rows)
|
||||
(6 rows)
|
||||
|
||||
--joins
|
||||
:PREFIX SELECT * FROM hyper_ts WHERE tag_id IN (SELECT id FROM tag WHERE tag.id=1) and time < to_timestamp(10) and device_id = 'dev1' ORDER BY value;
|
||||
@ -850,8 +815,8 @@ SELECT * FROM cte ORDER BY value;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Sort
|
||||
Sort Key: _hyper_3_116_chunk.value
|
||||
-> Append
|
||||
Sort Key: hyper_ts.value
|
||||
-> Custom Scan (ChunkAppend) on hyper_ts
|
||||
-> Seq Scan on _hyper_3_116_chunk
|
||||
Filter: ((hashed SubPlan 1) OR (("time" < 'Wed Dec 31 16:00:10 1969 PST'::timestamp with time zone) AND (device_id = 'dev1'::text)))
|
||||
SubPlan 1
|
||||
|
@ -764,73 +764,38 @@ SELECT * FROM cte ORDER BY value;
|
||||
QUERY PLAN
|
||||
------------------------------------------------------------------------------------------
|
||||
Sort
|
||||
Sort Key: _hyper_3_116_chunk.value
|
||||
-> Append
|
||||
Sort Key: hyper_ts.value
|
||||
-> Custom Scan (ChunkAppend) on hyper_ts
|
||||
Chunks excluded during startup: 6
|
||||
-> Seq Scan on _hyper_3_116_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_117_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_118_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_119_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_120_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_121_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_122_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_123_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
(19 rows)
|
||||
(8 rows)
|
||||
|
||||
:PREFIX SELECT * FROM hyper_ts WHERE time < ('Wed Dec 31 16:00:10 1969'::timestamp::timestamptz) ORDER BY value;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------
|
||||
Sort
|
||||
Sort Key: _hyper_3_116_chunk.value
|
||||
-> Append
|
||||
Sort Key: hyper_ts.value
|
||||
-> Custom Scan (ChunkAppend) on hyper_ts
|
||||
Chunks excluded during startup: 6
|
||||
-> Seq Scan on _hyper_3_116_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_117_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_118_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_119_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_120_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_121_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_122_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_123_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
(19 rows)
|
||||
(8 rows)
|
||||
|
||||
:PREFIX SELECT * FROM hyper_ts WHERE NOW() < time ORDER BY value;
|
||||
QUERY PLAN
|
||||
--------------------------------------------
|
||||
QUERY PLAN
|
||||
---------------------------------------------
|
||||
Sort
|
||||
Sort Key: _hyper_3_116_chunk.value
|
||||
-> Append
|
||||
-> Seq Scan on _hyper_3_116_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_117_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_118_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_119_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_120_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_121_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_122_chunk
|
||||
Filter: (now() < "time")
|
||||
Sort Key: hyper_ts.value
|
||||
-> Custom Scan (ChunkAppend) on hyper_ts
|
||||
Chunks excluded during startup: 7
|
||||
-> Seq Scan on _hyper_3_123_chunk
|
||||
Filter: (now() < "time")
|
||||
(19 rows)
|
||||
(6 rows)
|
||||
|
||||
--joins
|
||||
:PREFIX SELECT * FROM hyper_ts WHERE tag_id IN (SELECT id FROM tag WHERE tag.id=1) and time < to_timestamp(10) and device_id = 'dev1' ORDER BY value;
|
||||
@ -850,8 +815,8 @@ SELECT * FROM cte ORDER BY value;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Sort
|
||||
Sort Key: _hyper_3_116_chunk.value
|
||||
-> Append
|
||||
Sort Key: hyper_ts.value
|
||||
-> Custom Scan (ChunkAppend) on hyper_ts
|
||||
-> Seq Scan on _hyper_3_116_chunk
|
||||
Filter: ((hashed SubPlan 1) OR (("time" < 'Wed Dec 31 16:00:10 1969 PST'::timestamp with time zone) AND (device_id = 'dev1'::text)))
|
||||
SubPlan 1
|
||||
|
@ -764,73 +764,38 @@ SELECT * FROM cte ORDER BY value;
|
||||
QUERY PLAN
|
||||
------------------------------------------------------------------------------------------
|
||||
Sort
|
||||
Sort Key: _hyper_3_116_chunk.value
|
||||
-> Append
|
||||
Sort Key: hyper_ts.value
|
||||
-> Custom Scan (ChunkAppend) on hyper_ts
|
||||
Chunks excluded during startup: 6
|
||||
-> Seq Scan on _hyper_3_116_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_117_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_118_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_119_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_120_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_121_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_122_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
-> Seq Scan on _hyper_3_123_chunk
|
||||
Filter: ("time" < 'Wed Dec 31 16:00:10 1969'::timestamp without time zone)
|
||||
(19 rows)
|
||||
(8 rows)
|
||||
|
||||
:PREFIX SELECT * FROM hyper_ts WHERE time < ('Wed Dec 31 16:00:10 1969'::timestamp::timestamptz) ORDER BY value;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------
|
||||
Sort
|
||||
Sort Key: _hyper_3_116_chunk.value
|
||||
-> Append
|
||||
Sort Key: hyper_ts.value
|
||||
-> Custom Scan (ChunkAppend) on hyper_ts
|
||||
Chunks excluded during startup: 6
|
||||
-> Seq Scan on _hyper_3_116_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_117_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_118_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_119_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_120_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_121_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_122_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
-> Seq Scan on _hyper_3_123_chunk
|
||||
Filter: ("time" < ('Wed Dec 31 16:00:10 1969'::timestamp without time zone)::timestamp with time zone)
|
||||
(19 rows)
|
||||
(8 rows)
|
||||
|
||||
:PREFIX SELECT * FROM hyper_ts WHERE NOW() < time ORDER BY value;
|
||||
QUERY PLAN
|
||||
--------------------------------------------
|
||||
QUERY PLAN
|
||||
---------------------------------------------
|
||||
Sort
|
||||
Sort Key: _hyper_3_116_chunk.value
|
||||
-> Append
|
||||
-> Seq Scan on _hyper_3_116_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_117_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_118_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_119_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_120_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_121_chunk
|
||||
Filter: (now() < "time")
|
||||
-> Seq Scan on _hyper_3_122_chunk
|
||||
Filter: (now() < "time")
|
||||
Sort Key: hyper_ts.value
|
||||
-> Custom Scan (ChunkAppend) on hyper_ts
|
||||
Chunks excluded during startup: 7
|
||||
-> Seq Scan on _hyper_3_123_chunk
|
||||
Filter: (now() < "time")
|
||||
(19 rows)
|
||||
(6 rows)
|
||||
|
||||
--joins
|
||||
:PREFIX SELECT * FROM hyper_ts WHERE tag_id IN (SELECT id FROM tag WHERE tag.id=1) and time < to_timestamp(10) and device_id = 'dev1' ORDER BY value;
|
||||
@ -850,8 +815,8 @@ SELECT * FROM cte ORDER BY value;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Sort
|
||||
Sort Key: _hyper_3_116_chunk.value
|
||||
-> Append
|
||||
Sort Key: hyper_ts.value
|
||||
-> Custom Scan (ChunkAppend) on hyper_ts
|
||||
-> Seq Scan on _hyper_3_116_chunk
|
||||
Filter: ((hashed SubPlan 1) OR (("time" < 'Wed Dec 31 16:00:10 1969 PST'::timestamp with time zone) AND (device_id = 'dev1'::text)))
|
||||
SubPlan 1
|
||||
|
@ -1241,8 +1241,7 @@ LEFT OUTER JOIN LATERAL(
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o (actual rows=0 loops=3)
|
||||
Order: o."time" DESC
|
||||
Chunks excluded during startup: 3
|
||||
Chunks excluded during runtime: 0
|
||||
(7 rows)
|
||||
(6 rows)
|
||||
|
||||
-- test CTE
|
||||
-- no chunk exclusion for CTE because cte query is not pulled up
|
||||
@ -1321,13 +1320,14 @@ ORDER BY o1.time;
|
||||
FROM ordered_append WHERE time = (SELECT max(time) FROM ordered_append) ORDER BY time;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Append (actual rows=3 loops=1)
|
||||
Custom Scan (ChunkAppend) on ordered_append (actual rows=3 loops=1)
|
||||
Chunks excluded during runtime: 2
|
||||
InitPlan 2 (returns $1)
|
||||
-> Result (actual rows=1 loops=1)
|
||||
InitPlan 1 (returns $0)
|
||||
-> Limit (actual rows=1 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on ordered_append (actual rows=1 loops=1)
|
||||
Order: ordered_append."time" DESC
|
||||
-> Custom Scan (ChunkAppend) on ordered_append ordered_append_1 (actual rows=1 loops=1)
|
||||
Order: ordered_append_1."time" DESC
|
||||
-> Index Only Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk _hyper_1_3_chunk_1 (actual rows=1 loops=1)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 1
|
||||
@ -1337,13 +1337,13 @@ FROM ordered_append WHERE time = (SELECT max(time) FROM ordered_append) ORDER BY
|
||||
-> Index Only Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk _hyper_1_1_chunk_1 (never executed)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 0
|
||||
-> Index Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk (actual rows=0 loops=1)
|
||||
-> Index Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk (never executed)
|
||||
Index Cond: ("time" = $1)
|
||||
-> Index Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk (actual rows=0 loops=1)
|
||||
-> Index Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk (never executed)
|
||||
Index Cond: ("time" = $1)
|
||||
-> Index Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk (actual rows=3 loops=1)
|
||||
Index Cond: ("time" = $1)
|
||||
(22 rows)
|
||||
(23 rows)
|
||||
|
||||
-- test join against max query
|
||||
-- not ChunkAppend so no chunk exclusion
|
||||
@ -1569,6 +1569,92 @@ LEFT OUTER JOIN LATERAL(
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed)
|
||||
(14 rows)
|
||||
|
||||
-- test JOIN on time column with USING
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 USING(time) ORDER BY o1.time LIMIT 100;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Limit (actual rows=100 loops=1)
|
||||
-> Merge Join (actual rows=100 loops=1)
|
||||
Merge Cond: (o1."time" = o2."time")
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=34 loops=1)
|
||||
Order: o1."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=34 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed)
|
||||
-> Materialize (actual rows=100 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=34 loops=1)
|
||||
Order: o2."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=34 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed)
|
||||
(14 rows)
|
||||
|
||||
-- test NATURAL JOIN on time column
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 NATURAL INNER JOIN ordered_append o2 ORDER BY o1.time LIMIT 100;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Limit (actual rows=100 loops=1)
|
||||
-> Merge Join (actual rows=100 loops=1)
|
||||
Merge Cond: (o1."time" = o2."time")
|
||||
Join Filter: ((o1.device_id = o2.device_id) AND (o1.value = o2.value))
|
||||
Rows Removed by Join Filter: 198
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=100 loops=1)
|
||||
Order: o1."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=100 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed)
|
||||
-> Materialize (actual rows=298 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=100 loops=1)
|
||||
Order: o2."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=100 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed)
|
||||
(16 rows)
|
||||
|
||||
-- test LEFT JOIN on time column
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 LEFT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o1.time LIMIT 100;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Limit (actual rows=100 loops=1)
|
||||
-> Merge Left Join (actual rows=100 loops=1)
|
||||
Merge Cond: (o1."time" = o2."time")
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=34 loops=1)
|
||||
Order: o1."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=34 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed)
|
||||
-> Materialize (actual rows=100 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=34 loops=1)
|
||||
Order: o2."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=34 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed)
|
||||
(14 rows)
|
||||
|
||||
-- test RIGHT JOIN on time column
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 RIGHT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o2.time LIMIT 100;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Limit (actual rows=100 loops=1)
|
||||
-> Merge Left Join (actual rows=100 loops=1)
|
||||
Merge Cond: (o2."time" = o1."time")
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=34 loops=1)
|
||||
Order: o2."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=34 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed)
|
||||
-> Materialize (actual rows=100 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=34 loops=1)
|
||||
Order: o1."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=34 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed)
|
||||
(14 rows)
|
||||
|
||||
-- test JOIN on time column with ON clause expression order switched
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 ON o2.time = o1.time ORDER BY o1.time LIMIT 100;
|
||||
|
@ -1241,8 +1241,7 @@ LEFT OUTER JOIN LATERAL(
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o (actual rows=0 loops=3)
|
||||
Order: o."time" DESC
|
||||
Chunks excluded during startup: 3
|
||||
Chunks excluded during runtime: 0
|
||||
(7 rows)
|
||||
(6 rows)
|
||||
|
||||
-- test CTE
|
||||
-- no chunk exclusion for CTE because cte query is not pulled up
|
||||
@ -1321,13 +1320,14 @@ ORDER BY o1.time;
|
||||
FROM ordered_append WHERE time = (SELECT max(time) FROM ordered_append) ORDER BY time;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Append (actual rows=3 loops=1)
|
||||
Custom Scan (ChunkAppend) on ordered_append (actual rows=3 loops=1)
|
||||
Chunks excluded during runtime: 2
|
||||
InitPlan 2 (returns $1)
|
||||
-> Result (actual rows=1 loops=1)
|
||||
InitPlan 1 (returns $0)
|
||||
-> Limit (actual rows=1 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on ordered_append (actual rows=1 loops=1)
|
||||
Order: ordered_append."time" DESC
|
||||
-> Custom Scan (ChunkAppend) on ordered_append ordered_append_1 (actual rows=1 loops=1)
|
||||
Order: ordered_append_1."time" DESC
|
||||
-> Index Only Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk _hyper_1_3_chunk_1 (actual rows=1 loops=1)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 1
|
||||
@ -1337,13 +1337,13 @@ FROM ordered_append WHERE time = (SELECT max(time) FROM ordered_append) ORDER BY
|
||||
-> Index Only Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk _hyper_1_1_chunk_1 (never executed)
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
Heap Fetches: 0
|
||||
-> Index Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk (actual rows=0 loops=1)
|
||||
-> Index Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk (never executed)
|
||||
Index Cond: ("time" = $1)
|
||||
-> Index Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk (actual rows=0 loops=1)
|
||||
-> Index Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk (never executed)
|
||||
Index Cond: ("time" = $1)
|
||||
-> Index Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk (actual rows=3 loops=1)
|
||||
Index Cond: ("time" = $1)
|
||||
(22 rows)
|
||||
(23 rows)
|
||||
|
||||
-- test join against max query
|
||||
-- not ChunkAppend so no chunk exclusion
|
||||
@ -1569,6 +1569,92 @@ LEFT OUTER JOIN LATERAL(
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed)
|
||||
(14 rows)
|
||||
|
||||
-- test JOIN on time column with USING
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 USING(time) ORDER BY o1.time LIMIT 100;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Limit (actual rows=100 loops=1)
|
||||
-> Merge Join (actual rows=100 loops=1)
|
||||
Merge Cond: (o1."time" = o2."time")
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=34 loops=1)
|
||||
Order: o1."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=34 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed)
|
||||
-> Materialize (actual rows=100 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=34 loops=1)
|
||||
Order: o2."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=34 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed)
|
||||
(14 rows)
|
||||
|
||||
-- test NATURAL JOIN on time column
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 NATURAL INNER JOIN ordered_append o2 ORDER BY o1.time LIMIT 100;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Limit (actual rows=100 loops=1)
|
||||
-> Merge Join (actual rows=100 loops=1)
|
||||
Merge Cond: (o1."time" = o2."time")
|
||||
Join Filter: ((o1.device_id = o2.device_id) AND (o1.value = o2.value))
|
||||
Rows Removed by Join Filter: 198
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=100 loops=1)
|
||||
Order: o1."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=100 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed)
|
||||
-> Materialize (actual rows=298 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=100 loops=1)
|
||||
Order: o2."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=100 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed)
|
||||
(16 rows)
|
||||
|
||||
-- test LEFT JOIN on time column
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 LEFT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o1.time LIMIT 100;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Limit (actual rows=100 loops=1)
|
||||
-> Merge Left Join (actual rows=100 loops=1)
|
||||
Merge Cond: (o1."time" = o2."time")
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=34 loops=1)
|
||||
Order: o1."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=34 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed)
|
||||
-> Materialize (actual rows=100 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=34 loops=1)
|
||||
Order: o2."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=34 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed)
|
||||
(14 rows)
|
||||
|
||||
-- test RIGHT JOIN on time column
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 RIGHT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o2.time LIMIT 100;
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Limit (actual rows=100 loops=1)
|
||||
-> Merge Left Join (actual rows=100 loops=1)
|
||||
Merge Cond: (o2."time" = o1."time")
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o2 (actual rows=34 loops=1)
|
||||
Order: o2."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1 (actual rows=34 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3 (never executed)
|
||||
-> Materialize (actual rows=100 loops=1)
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o1 (actual rows=34 loops=1)
|
||||
Order: o1."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1 (actual rows=34 loops=1)
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2 (never executed)
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3 (never executed)
|
||||
(14 rows)
|
||||
|
||||
-- test JOIN on time column with ON clause expression order switched
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 ON o2.time = o1.time ORDER BY o1.time LIMIT 100;
|
||||
|
@ -1129,14 +1129,13 @@ LEFT OUTER JOIN LATERAL(
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o
|
||||
Order: o."time" DESC
|
||||
Chunks excluded during startup: 0
|
||||
Chunks excluded during runtime: 3
|
||||
-> Index Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o_1
|
||||
Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval)))
|
||||
-> Index Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o_2
|
||||
Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval)))
|
||||
-> Index Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o_3
|
||||
Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval)))
|
||||
(13 rows)
|
||||
(12 rows)
|
||||
|
||||
-- test LATERAL with correlated query
|
||||
-- only 2nd chunk should be executed
|
||||
@ -1154,14 +1153,13 @@ LEFT OUTER JOIN LATERAL(
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o
|
||||
Order: o."time"
|
||||
Chunks excluded during startup: 0
|
||||
Chunks excluded during runtime: 3
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o_1
|
||||
Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval)))
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o_2
|
||||
Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval)))
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o_3
|
||||
Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval)))
|
||||
(13 rows)
|
||||
(12 rows)
|
||||
|
||||
-- test startup and runtime exclusion together
|
||||
:PREFIX SELECT *
|
||||
@ -1178,14 +1176,13 @@ LEFT OUTER JOIN LATERAL(
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o
|
||||
Order: o."time" DESC
|
||||
Chunks excluded during startup: 0
|
||||
Chunks excluded during runtime: 3
|
||||
-> Index Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o_1
|
||||
Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval)) AND ("time" < now()))
|
||||
-> Index Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o_2
|
||||
Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval)) AND ("time" < now()))
|
||||
-> Index Scan using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o_3
|
||||
Index Cond: (("time" >= g."time") AND ("time" < (g."time" + '@ 1 day'::interval)) AND ("time" < now()))
|
||||
(13 rows)
|
||||
(12 rows)
|
||||
|
||||
-- test startup and runtime exclusion together
|
||||
-- all chunks should be filtered
|
||||
@ -1203,8 +1200,7 @@ LEFT OUTER JOIN LATERAL(
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o
|
||||
Order: o."time" DESC
|
||||
Chunks excluded during startup: 3
|
||||
Chunks excluded during runtime: 0
|
||||
(7 rows)
|
||||
(6 rows)
|
||||
|
||||
-- test CTE
|
||||
-- no chunk exclusion for CTE because cte query is not pulled up
|
||||
@ -1283,13 +1279,13 @@ ORDER BY o1.time;
|
||||
FROM ordered_append WHERE time = (SELECT max(time) FROM ordered_append) ORDER BY time;
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------
|
||||
Append
|
||||
Custom Scan (ChunkAppend) on ordered_append
|
||||
InitPlan 2 (returns $1)
|
||||
-> Result
|
||||
InitPlan 1 (returns $0)
|
||||
-> Limit
|
||||
-> Custom Scan (ChunkAppend) on ordered_append
|
||||
Order: ordered_append."time" DESC
|
||||
-> Custom Scan (ChunkAppend) on ordered_append ordered_append_1
|
||||
Order: ordered_append_1."time" DESC
|
||||
-> Index Only Scan using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk _hyper_1_3_chunk_1
|
||||
Index Cond: ("time" IS NOT NULL)
|
||||
-> Index Only Scan using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk _hyper_1_2_chunk_1
|
||||
@ -1524,6 +1520,91 @@ LEFT OUTER JOIN LATERAL(
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3
|
||||
(14 rows)
|
||||
|
||||
-- test JOIN on time column with USING
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 USING(time) ORDER BY o1.time LIMIT 100;
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------------------------------------------------
|
||||
Limit
|
||||
-> Merge Join
|
||||
Merge Cond: (o1."time" = o2."time")
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o1
|
||||
Order: o1."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3
|
||||
-> Materialize
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o2
|
||||
Order: o2."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3
|
||||
(14 rows)
|
||||
|
||||
-- test NATURAL JOIN on time column
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 NATURAL INNER JOIN ordered_append o2 ORDER BY o1.time LIMIT 100;
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------------------------------------------------
|
||||
Limit
|
||||
-> Merge Join
|
||||
Merge Cond: (o1."time" = o2."time")
|
||||
Join Filter: ((o1.device_id = o2.device_id) AND (o1.value = o2.value))
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o1
|
||||
Order: o1."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3
|
||||
-> Materialize
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o2
|
||||
Order: o2."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3
|
||||
(15 rows)
|
||||
|
||||
-- test LEFT JOIN on time column
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 LEFT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o1.time LIMIT 100;
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------------------------------------------------
|
||||
Limit
|
||||
-> Merge Left Join
|
||||
Merge Cond: (o1."time" = o2."time")
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o1
|
||||
Order: o1."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3
|
||||
-> Materialize
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o2
|
||||
Order: o2."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3
|
||||
(14 rows)
|
||||
|
||||
-- test RIGHT JOIN on time column
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 RIGHT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o2.time LIMIT 100;
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------------------------------------------------
|
||||
Limit
|
||||
-> Merge Left Join
|
||||
Merge Cond: (o2."time" = o1."time")
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o2
|
||||
Order: o2."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o2_1
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o2_2
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o2_3
|
||||
-> Materialize
|
||||
-> Custom Scan (ChunkAppend) on ordered_append o1
|
||||
Order: o1."time"
|
||||
-> Index Scan Backward using _hyper_1_1_chunk_ordered_append_time_idx on _hyper_1_1_chunk o1_1
|
||||
-> Index Scan Backward using _hyper_1_2_chunk_ordered_append_time_idx on _hyper_1_2_chunk o1_2
|
||||
-> Index Scan Backward using _hyper_1_3_chunk_ordered_append_time_idx on _hyper_1_3_chunk o1_3
|
||||
(14 rows)
|
||||
|
||||
-- test JOIN on time column with ON clause expression order switched
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 ON o2.time = o1.time ORDER BY o1.time LIMIT 100;
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -190,7 +190,7 @@ EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle);
|
||||
----------------------------------------------------------
|
||||
Subquery Scan on document
|
||||
Filter: f_leak(document.dtitle)
|
||||
-> Append
|
||||
-> Custom Scan (ChunkAppend) on document document_1
|
||||
InitPlan 1 (returns $0)
|
||||
-> Index Scan using uaccount_pkey on uaccount
|
||||
Index Cond: (pguser = "current_user"())
|
||||
@ -215,7 +215,7 @@ EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dt
|
||||
Hash Cond: (document.cid = category.cid)
|
||||
-> Subquery Scan on document
|
||||
Filter: f_leak(document.dtitle)
|
||||
-> Append
|
||||
-> Custom Scan (ChunkAppend) on document document_1
|
||||
InitPlan 1 (returns $0)
|
||||
-> Index Scan using uaccount_pkey on uaccount
|
||||
Index Cond: (pguser = "current_user"())
|
||||
@ -299,14 +299,34 @@ NOTICE: f_leak => great manga
|
||||
(3 rows)
|
||||
|
||||
EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle);
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------
|
||||
Subquery Scan on document
|
||||
Filter: f_leak(document.dtitle)
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: document
|
||||
Chunks left after exclusion: 6
|
||||
-> Append
|
||||
-> Custom Scan (ChunkAppend) on document document_1
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_1_1_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_1_2_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_1_3_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_1_4_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_1_5_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_1_6_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
(16 rows)
|
||||
|
||||
EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle);
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------
|
||||
Nested Loop
|
||||
-> Subquery Scan on document
|
||||
Filter: f_leak(document.dtitle)
|
||||
-> Custom Scan (ChunkAppend) on document document_1
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_1_1_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_1_2_chunk
|
||||
@ -319,33 +339,9 @@ EXPLAIN (COSTS OFF) SELECT * FROM document WHERE f_leak(dtitle);
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_1_6_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
(18 rows)
|
||||
|
||||
EXPLAIN (COSTS OFF) SELECT * FROM document NATURAL JOIN category WHERE f_leak(dtitle);
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------------
|
||||
Nested Loop
|
||||
-> Subquery Scan on document
|
||||
Filter: f_leak(document.dtitle)
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: document
|
||||
Chunks left after exclusion: 6
|
||||
-> Append
|
||||
-> Seq Scan on _hyper_1_1_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_1_2_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_1_3_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_1_4_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_1_5_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_1_6_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Index Scan using category_pkey on category
|
||||
Index Cond: (cid = document.cid)
|
||||
(21 rows)
|
||||
(19 rows)
|
||||
|
||||
-- interaction of FK/PK constraints
|
||||
SET SESSION AUTHORIZATION regress_rls_alice;
|
||||
@ -903,11 +899,11 @@ NOTICE: f_leak => awesome science fiction
|
||||
(4 rows)
|
||||
|
||||
EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle);
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------
|
||||
Subquery Scan on hyper_document
|
||||
Filter: f_leak(hyper_document.dtitle)
|
||||
-> Append
|
||||
-> Custom Scan (ChunkAppend) on hyper_document hyper_document_1
|
||||
InitPlan 1 (returns $0)
|
||||
-> Index Scan using uaccount_pkey on uaccount
|
||||
Index Cond: (pguser = "current_user"())
|
||||
@ -953,11 +949,11 @@ NOTICE: f_leak => awesome technology book
|
||||
(10 rows)
|
||||
|
||||
EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle);
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------
|
||||
Subquery Scan on hyper_document
|
||||
Filter: f_leak(hyper_document.dtitle)
|
||||
-> Append
|
||||
-> Custom Scan (ChunkAppend) on hyper_document hyper_document_1
|
||||
InitPlan 1 (returns $0)
|
||||
-> Index Scan using uaccount_pkey on uaccount
|
||||
Index Cond: (pguser = "current_user"())
|
||||
@ -1066,11 +1062,11 @@ NOTICE: f_leak => awesome technology book
|
||||
(10 rows)
|
||||
|
||||
EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle);
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------
|
||||
Subquery Scan on hyper_document
|
||||
Filter: f_leak(hyper_document.dtitle)
|
||||
-> Append
|
||||
-> Custom Scan (ChunkAppend) on hyper_document hyper_document_1
|
||||
InitPlan 1 (returns $0)
|
||||
-> Index Scan using uaccount_pkey on uaccount
|
||||
Index Cond: (pguser = "current_user"())
|
||||
@ -1128,27 +1124,25 @@ NOTICE: f_leak => great satire
|
||||
(4 rows)
|
||||
|
||||
EXPLAIN (COSTS OFF) SELECT * FROM hyper_document WHERE f_leak(dtitle);
|
||||
QUERY PLAN
|
||||
----------------------------------------------------------
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------
|
||||
Subquery Scan on hyper_document
|
||||
Filter: f_leak(hyper_document.dtitle)
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: hyper_document
|
||||
Chunks left after exclusion: 6
|
||||
-> Append
|
||||
-> Seq Scan on _hyper_2_7_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_2_8_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_2_9_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_2_10_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_2_11_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_2_12_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
(18 rows)
|
||||
-> Custom Scan (ChunkAppend) on hyper_document hyper_document_1
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_2_7_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_2_8_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_2_9_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_2_10_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_2_11_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
-> Seq Scan on _hyper_2_12_chunk
|
||||
Filter: (dauthor = "current_user"())
|
||||
(16 rows)
|
||||
|
||||
-- database superuser does bypass RLS policy when enabled
|
||||
RESET SESSION AUTHORIZATION;
|
||||
@ -2835,19 +2829,17 @@ NOTICE: f_leak => dad
|
||||
(4 rows)
|
||||
|
||||
EXPLAIN (COSTS OFF) SELECT * FROM rls_view;
|
||||
QUERY PLAN
|
||||
-------------------------------------------
|
||||
Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: z1
|
||||
Chunks left after exclusion: 3
|
||||
-> Append
|
||||
-> Seq Scan on _hyper_9_46_chunk
|
||||
Filter: f_leak(b)
|
||||
-> Seq Scan on _hyper_9_47_chunk
|
||||
Filter: f_leak(b)
|
||||
-> Seq Scan on _hyper_9_48_chunk
|
||||
Filter: f_leak(b)
|
||||
(10 rows)
|
||||
QUERY PLAN
|
||||
-------------------------------------
|
||||
Custom Scan (ChunkAppend) on z1
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_9_46_chunk
|
||||
Filter: f_leak(b)
|
||||
-> Seq Scan on _hyper_9_47_chunk
|
||||
Filter: f_leak(b)
|
||||
-> Seq Scan on _hyper_9_48_chunk
|
||||
Filter: f_leak(b)
|
||||
(8 rows)
|
||||
|
||||
-- Query as view/table owner. Should return all records.
|
||||
SET SESSION AUTHORIZATION regress_rls_alice;
|
||||
@ -2865,19 +2857,17 @@ NOTICE: f_leak => dad
|
||||
(4 rows)
|
||||
|
||||
EXPLAIN (COSTS OFF) SELECT * FROM rls_view;
|
||||
QUERY PLAN
|
||||
-------------------------------------------
|
||||
Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: z1
|
||||
Chunks left after exclusion: 3
|
||||
-> Append
|
||||
-> Seq Scan on _hyper_9_46_chunk
|
||||
Filter: f_leak(b)
|
||||
-> Seq Scan on _hyper_9_47_chunk
|
||||
Filter: f_leak(b)
|
||||
-> Seq Scan on _hyper_9_48_chunk
|
||||
Filter: f_leak(b)
|
||||
(10 rows)
|
||||
QUERY PLAN
|
||||
-------------------------------------
|
||||
Custom Scan (ChunkAppend) on z1
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_9_46_chunk
|
||||
Filter: f_leak(b)
|
||||
-> Seq Scan on _hyper_9_47_chunk
|
||||
Filter: f_leak(b)
|
||||
-> Seq Scan on _hyper_9_48_chunk
|
||||
Filter: f_leak(b)
|
||||
(8 rows)
|
||||
|
||||
DROP VIEW rls_view;
|
||||
-- View and Table owners are different.
|
||||
@ -3125,16 +3115,14 @@ SET SESSION AUTHORIZATION regress_rls_alice;
|
||||
CREATE VIEW rls_sbv WITH (security_barrier) AS
|
||||
SELECT * FROM y1 WHERE f_leak(b);
|
||||
EXPLAIN (COSTS OFF) SELECT * FROM rls_sbv WHERE (a = 1);
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------------------
|
||||
Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: y1
|
||||
Chunks left after exclusion: 1
|
||||
-> Append
|
||||
-> Index Scan using _hyper_12_54_chunk_y1_a_idx on _hyper_12_54_chunk
|
||||
Index Cond: (a = 1)
|
||||
Filter: f_leak(b)
|
||||
(7 rows)
|
||||
QUERY PLAN
|
||||
--------------------------------------------------------------------------
|
||||
Custom Scan (ChunkAppend) on y1
|
||||
Chunks excluded during startup: 0
|
||||
-> Index Scan using _hyper_12_54_chunk_y1_a_idx on _hyper_12_54_chunk
|
||||
Index Cond: (a = 1)
|
||||
Filter: f_leak(b)
|
||||
(5 rows)
|
||||
|
||||
DROP VIEW rls_sbv;
|
||||
-- Create view as role that does not own table. RLS should be applied.
|
||||
@ -3268,35 +3256,33 @@ NOTICE: f_leak => abc
|
||||
(14 rows)
|
||||
|
||||
EXPLAIN (COSTS OFF) SELECT * FROM y2 WHERE f_leak('abc');
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------------------------------
|
||||
Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: y2
|
||||
Chunks left after exclusion: 11
|
||||
-> Append
|
||||
-> Seq Scan on _hyper_13_55_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_56_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_57_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_58_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_59_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_60_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_61_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_62_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_63_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_64_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_65_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
(26 rows)
|
||||
QUERY PLAN
|
||||
---------------------------------------------------------------------------------------------
|
||||
Custom Scan (ChunkAppend) on y2
|
||||
Chunks excluded during startup: 0
|
||||
-> Seq Scan on _hyper_13_55_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_56_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_57_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_58_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_59_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_60_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_61_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_62_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_63_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_64_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
-> Seq Scan on _hyper_13_65_chunk
|
||||
Filter: (f_leak('abc'::text) AND (((a % 4) = 0) OR ((a % 3) = 0) OR ((a % 2) = 0)))
|
||||
(24 rows)
|
||||
|
||||
CREATE TABLE test_qual_pushdown (
|
||||
abc text
|
||||
|
@ -51,34 +51,30 @@ $BODY$;
|
||||
EXPLAIN (costs off)
|
||||
SELECT FROM "one_Partition"
|
||||
WHERE series_1 IN (SELECT series_1 FROM "one_Partition" WHERE series_1 > series_val());
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
QUERY PLAN
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
Hash Join
|
||||
Hash Cond: ("one_Partition".series_1 = "one_Partition_1".series_1)
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: one_Partition
|
||||
Chunks left after exclusion: 3
|
||||
-> Append
|
||||
-> Index Only Scan using "_hyper_1_1_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_1_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_2_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_2_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_3_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_3_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Custom Scan (ChunkAppend) on "one_Partition"
|
||||
Chunks excluded during startup: 0
|
||||
-> Index Only Scan using "_hyper_1_1_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_1_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_2_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_2_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_3_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_3_chunk
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Hash
|
||||
-> HashAggregate
|
||||
Group Key: "one_Partition_1".series_1
|
||||
-> Custom Scan (ConstraintAwareAppend)
|
||||
Hypertable: one_Partition
|
||||
Chunks left after exclusion: 3
|
||||
-> Append
|
||||
-> Index Only Scan using "_hyper_1_1_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_1_chunk _hyper_1_1_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_2_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_2_chunk _hyper_1_2_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_3_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_3_chunk _hyper_1_3_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
(25 rows)
|
||||
-> Custom Scan (ChunkAppend) on "one_Partition" "one_Partition_1"
|
||||
Chunks excluded during startup: 0
|
||||
-> Index Only Scan using "_hyper_1_1_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_1_chunk _hyper_1_1_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_2_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_2_chunk _hyper_1_2_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
-> Index Only Scan using "_hyper_1_3_chunk_one_Partition_timeCustom_series_1_idx" on _hyper_1_3_chunk _hyper_1_3_chunk_1
|
||||
Index Cond: (series_1 > (series_val())::double precision)
|
||||
(21 rows)
|
||||
|
||||
-- ConstraintAwareAppend NOT applied for UPDATE
|
||||
EXPLAIN (costs off)
|
||||
|
@ -207,3 +207,23 @@ SELECT * FROM append_test TABLESAMPLE SYSTEM(1)
|
||||
WHERE TIME > now_s() - INTERVAL '400 day'
|
||||
ORDER BY time DESC;
|
||||
|
||||
-- test runtime exclusion
|
||||
|
||||
-- test runtime exclusion with LATERAL and 2 hypertables
|
||||
:PREFIX SELECT m1.time, m2.time FROM metrics_timestamptz m1 LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m2 WHERE m1.time = m2.time LIMIT 1) m2 ON true ORDER BY m1.time;
|
||||
|
||||
-- test runtime exclusion with LATERAL and generate_series
|
||||
:PREFIX SELECT g.time FROM generate_series('2000-01-01'::timestamptz, '2000-02-01'::timestamptz, '1d'::interval) g(time) LEFT JOIN LATERAL(SELECT time FROM metrics_timestamptz m WHERE m.time=g.time LIMIT 1) m ON true;
|
||||
:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time) m ON true;
|
||||
:PREFIX SELECT * FROM generate_series('2000-01-01'::timestamptz,'2000-02-01'::timestamptz,'1d'::interval) AS g(time) INNER JOIN LATERAL (SELECT time FROM metrics_timestamptz m WHERE time=g.time ORDER BY time) m ON true;
|
||||
|
||||
-- test runtime exclusion with subquery
|
||||
:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE m1.time=(SELECT max(time) FROM metrics_timestamptz);
|
||||
|
||||
-- test runtime exclusion with correlated subquery
|
||||
:PREFIX SELECT m1.time, (SELECT m2.time FROM metrics_timestamptz m2 WHERE m2.time < m1.time ORDER BY m2.time DESC LIMIT 1) FROM metrics_timestamptz m1 WHERE m1.time < '2000-01-10' ORDER BY m1.time;
|
||||
|
||||
-- test EXISTS
|
||||
:PREFIX SELECT m1.time FROM metrics_timestamptz m1 WHERE EXISTS(SELECT 1 FROM metrics_timestamptz m2 WHERE m1.time < m2.time) ORDER BY m1.time DESC limit 1000;
|
||||
|
||||
|
||||
|
@ -379,6 +379,22 @@ LEFT OUTER JOIN LATERAL(
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 ON o1.time = o2.time ORDER BY o1.time LIMIT 100;
|
||||
|
||||
-- test JOIN on time column with USING
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 USING(time) ORDER BY o1.time LIMIT 100;
|
||||
|
||||
-- test NATURAL JOIN on time column
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 NATURAL INNER JOIN ordered_append o2 ORDER BY o1.time LIMIT 100;
|
||||
|
||||
-- test LEFT JOIN on time column
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 LEFT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o1.time LIMIT 100;
|
||||
|
||||
-- test RIGHT JOIN on time column
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 RIGHT JOIN ordered_append o2 ON o1.time=o2.time ORDER BY o2.time LIMIT 100;
|
||||
|
||||
-- test JOIN on time column with ON clause expression order switched
|
||||
-- should use 2 ChunkAppend
|
||||
:PREFIX SELECT * FROM ordered_append o1 INNER JOIN ordered_append o2 ON o2.time = o1.time ORDER BY o1.time LIMIT 100;
|
||||
|
@ -1344,6 +1344,9 @@ DROP TABLE copy_rel_to CASCADE;
|
||||
|
||||
-- Check WHERE CURRENT OF
|
||||
SET SESSION AUTHORIZATION regress_rls_alice;
|
||||
-- WHERE CURRENT OF does not work with custom scan nodes
|
||||
-- so we have to disable chunk append here
|
||||
SET timescaledb.disable_optimizations TO true;
|
||||
|
||||
CREATE TABLE current_check (currentid int, payload text, rlsuser text);
|
||||
SELECT public.create_hypertable('current_check', 'currentid', chunk_time_interval=>10);
|
||||
@ -1390,6 +1393,8 @@ FETCH RELATIVE 1 FROM current_check_cursor;
|
||||
DELETE FROM current_check WHERE CURRENT OF current_check_cursor RETURNING *;
|
||||
SELECT * FROM current_check;
|
||||
|
||||
RESET timescaledb.disable_optimizations;
|
||||
|
||||
COMMIT;
|
||||
|
||||
--
|
||||
|
@ -1373,6 +1373,10 @@ UPDATE current_check SET payload = payload || '_new' WHERE currentid = 2 RETURNI
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- WHERE CURRENT OF does not work with custom scan nodes
|
||||
-- so we have to disable chunk append here
|
||||
SET timescaledb.enable_chunk_append TO false;
|
||||
|
||||
DECLARE current_check_cursor SCROLL CURSOR FOR SELECT * FROM current_check;
|
||||
-- Returns rows that can be seen according to SELECT policy, like plain SELECT
|
||||
-- above (even rows)
|
||||
@ -1392,6 +1396,8 @@ FETCH RELATIVE 1 FROM current_check_cursor;
|
||||
DELETE FROM current_check WHERE CURRENT OF current_check_cursor RETURNING *;
|
||||
SELECT * FROM current_check;
|
||||
|
||||
RESET timescaledb.enable_chunk_append;
|
||||
|
||||
COMMIT;
|
||||
|
||||
--
|
||||
|
Loading…
x
Reference in New Issue
Block a user