mirror of
https://github.com/timescale/timescaledb.git
synced 2025-05-18 03:23:37 +08:00
Revert "Pushdown of gapfill to data nodes"
This reverts commit eaf3a38fe9553659e515fac72aaad86cf1a06d1e.
This commit is contained in:
parent
4083e48a1c
commit
047d6b175b
@ -52,7 +52,6 @@ typedef struct Hypertable
|
|||||||
Oid chunk_sizing_func;
|
Oid chunk_sizing_func;
|
||||||
Hyperspace *space;
|
Hyperspace *space;
|
||||||
SubspaceStore *chunk_cache;
|
SubspaceStore *chunk_cache;
|
||||||
bool push_gapfill;
|
|
||||||
/*
|
/*
|
||||||
* Allows restricting the data nodes to use for the hypertable. Default is to
|
* Allows restricting the data nodes to use for the hypertable. Default is to
|
||||||
* use all available data nodes.
|
* use all available data nodes.
|
||||||
|
@ -40,8 +40,6 @@
|
|||||||
#include "data_node_scan_exec.h"
|
#include "data_node_scan_exec.h"
|
||||||
#include "fdw_utils.h"
|
#include "fdw_utils.h"
|
||||||
|
|
||||||
#include "nodes/gapfill/planner.h"
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* DataNodeScan is a custom scan implementation for scanning hypertables on
|
* DataNodeScan is a custom scan implementation for scanning hypertables on
|
||||||
* remote data nodes instead of scanning individual remote chunks.
|
* remote data nodes instead of scanning individual remote chunks.
|
||||||
@ -375,7 +373,7 @@ force_group_by_push_down(PlannerInfo *root, RelOptInfo *hyper_rel)
|
|||||||
*/
|
*/
|
||||||
static void
|
static void
|
||||||
push_down_group_bys(PlannerInfo *root, RelOptInfo *hyper_rel, Hyperspace *hs,
|
push_down_group_bys(PlannerInfo *root, RelOptInfo *hyper_rel, Hyperspace *hs,
|
||||||
DataNodeChunkAssignments *scas, bool gapfill_safe)
|
DataNodeChunkAssignments *scas)
|
||||||
{
|
{
|
||||||
const Dimension *dim;
|
const Dimension *dim;
|
||||||
bool overlaps;
|
bool overlaps;
|
||||||
@ -416,9 +414,6 @@ push_down_group_bys(PlannerInfo *root, RelOptInfo *hyper_rel, Hyperspace *hs,
|
|||||||
Assert(NULL != dim);
|
Assert(NULL != dim);
|
||||||
hyper_rel->partexprs[0] = ts_dimension_get_partexprs(dim, hyper_rel->relid);
|
hyper_rel->partexprs[0] = ts_dimension_get_partexprs(dim, hyper_rel->relid);
|
||||||
hyper_rel->part_scheme->partnatts = 1;
|
hyper_rel->part_scheme->partnatts = 1;
|
||||||
|
|
||||||
if (gapfill_safe)
|
|
||||||
force_group_by_push_down(root, hyper_rel);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -445,7 +440,6 @@ data_node_scan_add_node_paths(PlannerInfo *root, RelOptInfo *hyper_rel)
|
|||||||
int ndata_node_rels;
|
int ndata_node_rels;
|
||||||
DataNodeChunkAssignments scas;
|
DataNodeChunkAssignments scas;
|
||||||
int i;
|
int i;
|
||||||
bool gapfill_safe = false;
|
|
||||||
|
|
||||||
Assert(NULL != ht);
|
Assert(NULL != ht);
|
||||||
|
|
||||||
@ -465,11 +459,8 @@ data_node_scan_add_node_paths(PlannerInfo *root, RelOptInfo *hyper_rel)
|
|||||||
/* Assign chunks to data nodes */
|
/* Assign chunks to data nodes */
|
||||||
data_node_chunk_assignment_assign_chunks(&scas, chunk_rels, nchunk_rels);
|
data_node_chunk_assignment_assign_chunks(&scas, chunk_rels, nchunk_rels);
|
||||||
|
|
||||||
/* Check if we can push down gapfill to data nodes */
|
|
||||||
gapfill_safe = pushdown_gapfill(root, hyper_rel, ht->space, &scas);
|
|
||||||
|
|
||||||
/* Try to push down GROUP BY expressions and bucketing, if possible */
|
/* Try to push down GROUP BY expressions and bucketing, if possible */
|
||||||
push_down_group_bys(root, hyper_rel, ht->space, &scas, gapfill_safe);
|
push_down_group_bys(root, hyper_rel, ht->space, &scas);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Create estimates and paths for each data node rel based on data node chunk
|
* Create estimates and paths for each data node rel based on data node chunk
|
||||||
@ -496,18 +487,10 @@ data_node_scan_add_node_paths(PlannerInfo *root, RelOptInfo *hyper_rel)
|
|||||||
data_node_rel,
|
data_node_rel,
|
||||||
data_node_rel->serverid,
|
data_node_rel->serverid,
|
||||||
hyper_rte->relid,
|
hyper_rte->relid,
|
||||||
TS_FDW_RELINFO_HYPERTABLE_DATA_NODE,
|
TS_FDW_RELINFO_HYPERTABLE_DATA_NODE);
|
||||||
gapfill_safe);
|
|
||||||
|
|
||||||
fpinfo->sca = sca;
|
fpinfo->sca = sca;
|
||||||
|
|
||||||
/*
|
|
||||||
* Since we can not always call pushdown_gapfill where scas are not available,
|
|
||||||
* remember if gapfill is safe to be pushed down for this relation for later
|
|
||||||
* uses e.g. in add_foreign_grouping_paths.
|
|
||||||
*/
|
|
||||||
ht->push_gapfill = gapfill_safe;
|
|
||||||
|
|
||||||
if (!bms_is_empty(sca->chunk_relids))
|
if (!bms_is_empty(sca->chunk_relids))
|
||||||
{
|
{
|
||||||
add_data_node_scan_paths(root, data_node_rel);
|
add_data_node_scan_paths(root, data_node_rel);
|
||||||
|
@ -69,7 +69,6 @@
|
|||||||
#include <utils/typcache.h>
|
#include <utils/typcache.h>
|
||||||
|
|
||||||
#include <func_cache.h>
|
#include <func_cache.h>
|
||||||
#include <hypertable_cache.h>
|
|
||||||
#include <remote/utils.h>
|
#include <remote/utils.h>
|
||||||
|
|
||||||
#include "relinfo.h"
|
#include "relinfo.h"
|
||||||
@ -106,7 +105,6 @@ typedef struct deparse_expr_cxt
|
|||||||
StringInfo buf; /* output buffer to append to */
|
StringInfo buf; /* output buffer to append to */
|
||||||
List **params_list; /* exprs that will become remote Params */
|
List **params_list; /* exprs that will become remote Params */
|
||||||
DataNodeChunkAssignment *sca;
|
DataNodeChunkAssignment *sca;
|
||||||
bool has_gapfill;
|
|
||||||
} deparse_expr_cxt;
|
} deparse_expr_cxt;
|
||||||
|
|
||||||
#define REL_ALIAS_PREFIX "r"
|
#define REL_ALIAS_PREFIX "r"
|
||||||
@ -418,8 +416,10 @@ is_foreign_expr(PlannerInfo *root, RelOptInfo *baserel, Expr *expr)
|
|||||||
if (!foreign_expr_walker((Node *) expr, &glob_cxt))
|
if (!foreign_expr_walker((Node *) expr, &glob_cxt))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
/* It is safe to pushdown gapfill in limited cases */
|
/*
|
||||||
if (gapfill_in_expression(expr) && !fpinfo->pushdown_gapfill)
|
* It is not supported to execute time_bucket_gapfill on data node.
|
||||||
|
*/
|
||||||
|
if (gapfill_in_expression(expr))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -817,7 +817,6 @@ deparseSelectStmtForRel(StringInfo buf, PlannerInfo *root, RelOptInfo *rel, List
|
|||||||
context.scanrel = IS_UPPER_REL(rel) ? fpinfo->outerrel : rel;
|
context.scanrel = IS_UPPER_REL(rel) ? fpinfo->outerrel : rel;
|
||||||
context.params_list = params_list;
|
context.params_list = params_list;
|
||||||
context.sca = sca;
|
context.sca = sca;
|
||||||
context.has_gapfill = false;
|
|
||||||
|
|
||||||
/* Construct SELECT clause */
|
/* Construct SELECT clause */
|
||||||
deparseSelectSql(tlist, is_subquery, retrieved_attrs, &context, pathkeys);
|
deparseSelectSql(tlist, is_subquery, retrieved_attrs, &context, pathkeys);
|
||||||
@ -2101,8 +2100,6 @@ deparseExpr(Expr *node, deparse_expr_cxt *context)
|
|||||||
deparseSubscriptingRef(castNode(SubscriptingRef, node), context);
|
deparseSubscriptingRef(castNode(SubscriptingRef, node), context);
|
||||||
break;
|
break;
|
||||||
case T_FuncExpr:
|
case T_FuncExpr:
|
||||||
if (gapfill_in_expression(node))
|
|
||||||
context->has_gapfill = true;
|
|
||||||
deparseFuncExpr(castNode(FuncExpr, node), context);
|
deparseFuncExpr(castNode(FuncExpr, node), context);
|
||||||
break;
|
break;
|
||||||
case T_OpExpr:
|
case T_OpExpr:
|
||||||
@ -2719,7 +2716,7 @@ deparseAggref(Aggref *node, deparse_expr_cxt *context)
|
|||||||
use_variadic = node->aggvariadic;
|
use_variadic = node->aggvariadic;
|
||||||
|
|
||||||
/* Find aggregate name from aggfnoid which is a pg_proc entry */
|
/* Find aggregate name from aggfnoid which is a pg_proc entry */
|
||||||
if (!context->has_gapfill && partial_agg)
|
if (partial_agg)
|
||||||
appendStringInfoString(buf, INTERNAL_SCHEMA_NAME "." PARTIALIZE_FUNC_NAME "(");
|
appendStringInfoString(buf, INTERNAL_SCHEMA_NAME "." PARTIALIZE_FUNC_NAME "(");
|
||||||
|
|
||||||
appendFunctionName(node->aggfnoid, context);
|
appendFunctionName(node->aggfnoid, context);
|
||||||
@ -2795,7 +2792,7 @@ deparseAggref(Aggref *node, deparse_expr_cxt *context)
|
|||||||
deparseExpr((Expr *) node->aggfilter, context);
|
deparseExpr((Expr *) node->aggfilter, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
appendStringInfoString(buf, !context->has_gapfill && partial_agg ? "))" : ")");
|
appendStringInfoString(buf, partial_agg ? "))" : ")");
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -65,12 +65,7 @@ get_foreign_rel_size(PlannerInfo *root, RelOptInfo *baserel, Oid foreigntableid)
|
|||||||
* kind of regular table that will ever have this callback called on it. */
|
* kind of regular table that will ever have this callback called on it. */
|
||||||
if (RELKIND_RELATION == rte->relkind)
|
if (RELKIND_RELATION == rte->relkind)
|
||||||
{
|
{
|
||||||
fdw_relinfo_create(root,
|
fdw_relinfo_create(root, baserel, InvalidOid, foreigntableid, TS_FDW_RELINFO_HYPERTABLE);
|
||||||
baserel,
|
|
||||||
InvalidOid,
|
|
||||||
foreigntableid,
|
|
||||||
TS_FDW_RELINFO_HYPERTABLE,
|
|
||||||
false);
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -80,8 +75,7 @@ get_foreign_rel_size(PlannerInfo *root, RelOptInfo *baserel, Oid foreigntableid)
|
|||||||
baserel,
|
baserel,
|
||||||
table->serverid,
|
table->serverid,
|
||||||
foreigntableid,
|
foreigntableid,
|
||||||
TS_FDW_RELINFO_FOREIGN_TABLE,
|
TS_FDW_RELINFO_FOREIGN_TABLE);
|
||||||
false);
|
|
||||||
|
|
||||||
apply_table_options(table, fdw_relinfo_get(baserel));
|
apply_table_options(table, fdw_relinfo_get(baserel));
|
||||||
}
|
}
|
||||||
|
@ -373,7 +373,7 @@ estimate_chunk_size(PlannerInfo *root, RelOptInfo *chunk_rel)
|
|||||||
|
|
||||||
TsFdwRelInfo *
|
TsFdwRelInfo *
|
||||||
fdw_relinfo_create(PlannerInfo *root, RelOptInfo *rel, Oid server_oid, Oid local_table_id,
|
fdw_relinfo_create(PlannerInfo *root, RelOptInfo *rel, Oid server_oid, Oid local_table_id,
|
||||||
TsFdwRelInfoType type, bool gapfill_safe)
|
TsFdwRelInfoType type)
|
||||||
{
|
{
|
||||||
TsFdwRelInfo *fpinfo;
|
TsFdwRelInfo *fpinfo;
|
||||||
ListCell *lc;
|
ListCell *lc;
|
||||||
@ -406,8 +406,6 @@ fdw_relinfo_create(PlannerInfo *root, RelOptInfo *rel, Oid server_oid, Oid local
|
|||||||
if (*refname && strcmp(refname, get_rel_name(rte->relid)) != 0)
|
if (*refname && strcmp(refname, get_rel_name(rte->relid)) != 0)
|
||||||
appendStringInfo(fpinfo->relation_name, " %s", quote_identifier(rte->eref->aliasname));
|
appendStringInfo(fpinfo->relation_name, " %s", quote_identifier(rte->eref->aliasname));
|
||||||
|
|
||||||
fpinfo->pushdown_gapfill = gapfill_safe;
|
|
||||||
|
|
||||||
if (type == TS_FDW_RELINFO_HYPERTABLE)
|
if (type == TS_FDW_RELINFO_HYPERTABLE)
|
||||||
{
|
{
|
||||||
/* nothing more to do for hypertables */
|
/* nothing more to do for hypertables */
|
||||||
|
@ -49,7 +49,6 @@ typedef struct TsFdwRelInfo
|
|||||||
* foreign scan.
|
* foreign scan.
|
||||||
*/
|
*/
|
||||||
bool pushdown_safe;
|
bool pushdown_safe;
|
||||||
bool pushdown_gapfill;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Restriction clauses, divided into safe and unsafe to pushdown subsets.
|
* Restriction clauses, divided into safe and unsafe to pushdown subsets.
|
||||||
@ -147,8 +146,7 @@ typedef struct TsFdwRelInfo
|
|||||||
} TsFdwRelInfo;
|
} TsFdwRelInfo;
|
||||||
|
|
||||||
extern TsFdwRelInfo *fdw_relinfo_create(PlannerInfo *root, RelOptInfo *rel, Oid server_oid,
|
extern TsFdwRelInfo *fdw_relinfo_create(PlannerInfo *root, RelOptInfo *rel, Oid server_oid,
|
||||||
Oid local_table_id, TsFdwRelInfoType type,
|
Oid local_table_id, TsFdwRelInfoType type);
|
||||||
bool gapfill_safe);
|
|
||||||
extern TsFdwRelInfo *fdw_relinfo_alloc_or_get(RelOptInfo *rel);
|
extern TsFdwRelInfo *fdw_relinfo_alloc_or_get(RelOptInfo *rel);
|
||||||
extern TsFdwRelInfo *fdw_relinfo_get(RelOptInfo *rel);
|
extern TsFdwRelInfo *fdw_relinfo_get(RelOptInfo *rel);
|
||||||
|
|
||||||
|
@ -905,9 +905,6 @@ add_foreign_grouping_paths(PlannerInfo *root, RelOptInfo *input_rel, RelOptInfo
|
|||||||
fpinfo->sca = ifpinfo->sca;
|
fpinfo->sca = ifpinfo->sca;
|
||||||
merge_fdw_options(fpinfo, ifpinfo, NULL);
|
merge_fdw_options(fpinfo, ifpinfo, NULL);
|
||||||
|
|
||||||
if (ifpinfo->pushdown_gapfill)
|
|
||||||
fpinfo->pushdown_gapfill = true;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Assess if it is safe to push down aggregation and grouping.
|
* Assess if it is safe to push down aggregation and grouping.
|
||||||
*
|
*
|
||||||
@ -926,23 +923,6 @@ add_foreign_grouping_paths(PlannerInfo *root, RelOptInfo *input_rel, RelOptInfo
|
|||||||
fpinfo->startup_cost = startup_cost;
|
fpinfo->startup_cost = startup_cost;
|
||||||
fpinfo->total_cost = total_cost;
|
fpinfo->total_cost = total_cost;
|
||||||
|
|
||||||
if (ifpinfo->pushdown_gapfill)
|
|
||||||
{
|
|
||||||
/*
|
|
||||||
* If pushdown of gapfill is possible then also check if it would
|
|
||||||
* be beneficial to actually push it down. Since, it can create
|
|
||||||
* more tuples and they need to be transferred to the data node.
|
|
||||||
* However, still pushing of gapfill to the data nodes could make
|
|
||||||
* sense because aggregating over it could be then done at the data
|
|
||||||
* nodes itself, hence ignore pushing down gapfill to data nodes
|
|
||||||
* when it produces a "really" larger amount of tuples.
|
|
||||||
*/
|
|
||||||
if (10 * ifpinfo->rows > fpinfo->rows)
|
|
||||||
{
|
|
||||||
fpinfo->pushdown_gapfill = false;
|
|
||||||
ifpinfo->pushdown_gapfill = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/* Create and add path to the grouping relation. */
|
/* Create and add path to the grouping relation. */
|
||||||
grouppath = (Path *) create_path(root,
|
grouppath = (Path *) create_path(root,
|
||||||
grouped_rel,
|
grouped_rel,
|
||||||
|
@ -21,8 +21,6 @@
|
|||||||
#include "nodes/gapfill/gapfill.h"
|
#include "nodes/gapfill/gapfill.h"
|
||||||
#include "nodes/gapfill/planner.h"
|
#include "nodes/gapfill/planner.h"
|
||||||
#include "nodes/gapfill/exec.h"
|
#include "nodes/gapfill/exec.h"
|
||||||
#include "func_cache.h"
|
|
||||||
#include "estimate.h"
|
|
||||||
|
|
||||||
static CustomScanMethods gapfill_plan_methods = {
|
static CustomScanMethods gapfill_plan_methods = {
|
||||||
.CustomName = "GapFill",
|
.CustomName = "GapFill",
|
||||||
@ -575,96 +573,3 @@ gapfill_adjust_window_targetlist(PlannerInfo *root, RelOptInfo *input_rel, RelOp
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* Check if it is safe to push down gapfill to data nodes.
|
|
||||||
* Currently, we allow only in the following cases,
|
|
||||||
*
|
|
||||||
* 1. when only one data node has all the chunks
|
|
||||||
* 2. when relation has at least one closed dimension and chunks
|
|
||||||
* do not overlap across data nodes.
|
|
||||||
* 3. when group by matches space dimension
|
|
||||||
* and is not an expression of space dimension.
|
|
||||||
*/
|
|
||||||
bool
|
|
||||||
pushdown_gapfill(PlannerInfo *root, RelOptInfo *hyper_rel, Hyperspace *hs,
|
|
||||||
DataNodeChunkAssignments *scas)
|
|
||||||
{
|
|
||||||
const Dimension *dim;
|
|
||||||
ListCell *lc;
|
|
||||||
TargetEntry *tle;
|
|
||||||
bool space_dim_in_group_by = false;
|
|
||||||
|
|
||||||
Query *parse = root->parse;
|
|
||||||
gapfill_walker_context context = { .call.node = NULL, .count = 0 };
|
|
||||||
|
|
||||||
if (CMD_SELECT != parse->commandType || parse->groupClause == NIL)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if (!enable_partitionwise_aggregate)
|
|
||||||
return false;
|
|
||||||
/*
|
|
||||||
* Only check for queries with gapfill call.
|
|
||||||
*/
|
|
||||||
gapfill_function_walker((Node *) parse->targetList, &context);
|
|
||||||
|
|
||||||
if (context.count == 0)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if (context.count > 1)
|
|
||||||
ereport(ERROR,
|
|
||||||
(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
|
|
||||||
errmsg("multiple time_bucket_gapfill calls not allowed")));
|
|
||||||
|
|
||||||
Assert(hs->num_dimensions >= 1);
|
|
||||||
|
|
||||||
/* Avoid push down of gapfill when window funcs are present */
|
|
||||||
if (parse->hasWindowFuncs)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Check for special case when there is only one data node with chunks. This
|
|
||||||
* can always be safely pushed down irrespective of partitioning
|
|
||||||
*/
|
|
||||||
if (scas->num_nodes_with_chunks == 1)
|
|
||||||
return true;
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Get first closed dimension that we use for assigning chunks to
|
|
||||||
* data nodes. If there is no closed dimension, then pushing gapfill
|
|
||||||
* to data nodes is not possible.
|
|
||||||
*/
|
|
||||||
dim = hyperspace_get_closed_dimension(hs, 0);
|
|
||||||
|
|
||||||
if (dim == NULL)
|
|
||||||
return false;
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if (parse->groupClause)
|
|
||||||
{
|
|
||||||
foreach (lc, parse->groupClause)
|
|
||||||
{
|
|
||||||
/*
|
|
||||||
* Check if the group by matches dimension and
|
|
||||||
* group by clause has exact dimension and not
|
|
||||||
* an expression of that attribute.
|
|
||||||
*/
|
|
||||||
SortGroupClause *sort = (SortGroupClause *) lfirst(lc);
|
|
||||||
tle = get_sortgroupref_tle(sort->tleSortGroupRef, parse->targetList);
|
|
||||||
|
|
||||||
if (tle->resno == dim->column_attno)
|
|
||||||
{
|
|
||||||
space_dim_in_group_by = true;
|
|
||||||
|
|
||||||
if (IsA(tle->expr, Var))
|
|
||||||
break;
|
|
||||||
else
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!space_dim_in_group_by)
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return !data_node_chunk_assignments_are_overlapping(scas, dim->fd.id);
|
|
||||||
}
|
|
||||||
|
@ -8,28 +8,10 @@
|
|||||||
|
|
||||||
#include <postgres.h>
|
#include <postgres.h>
|
||||||
|
|
||||||
#include "fdw/data_node_scan_plan.h"
|
|
||||||
#include <hypertable_cache.h>
|
|
||||||
#include <planner/planner.h>
|
|
||||||
#include <import/allpaths.h>
|
|
||||||
#include <import/planner.h>
|
|
||||||
#include <func_cache.h>
|
|
||||||
#include <dimension.h>
|
|
||||||
#include <compat/compat.h>
|
|
||||||
#include <debug_guc.h>
|
|
||||||
#include <debug.h>
|
|
||||||
|
|
||||||
#include "fdw/data_node_chunk_assignment.h"
|
|
||||||
#include "fdw/scan_plan.h"
|
|
||||||
#include "fdw/data_node_scan_plan.h"
|
|
||||||
#include "fdw/data_node_scan_exec.h"
|
|
||||||
|
|
||||||
bool gapfill_in_expression(Expr *node);
|
bool gapfill_in_expression(Expr *node);
|
||||||
void plan_add_gapfill(PlannerInfo *root, RelOptInfo *group_rel);
|
void plan_add_gapfill(PlannerInfo *root, RelOptInfo *group_rel);
|
||||||
void gapfill_adjust_window_targetlist(PlannerInfo *root, RelOptInfo *input_rel,
|
void gapfill_adjust_window_targetlist(PlannerInfo *root, RelOptInfo *input_rel,
|
||||||
RelOptInfo *output_rel);
|
RelOptInfo *output_rel);
|
||||||
bool pushdown_gapfill(PlannerInfo *root, RelOptInfo *hyper_rel, Hyperspace *hs,
|
|
||||||
DataNodeChunkAssignments *scas);
|
|
||||||
|
|
||||||
typedef struct GapFillPath
|
typedef struct GapFillPath
|
||||||
{
|
{
|
||||||
|
@ -71,11 +71,7 @@ tsl_create_upper_paths_hook(PlannerInfo *root, UpperRelationKind stage, RelOptIn
|
|||||||
{
|
{
|
||||||
case UPPERREL_GROUP_AGG:
|
case UPPERREL_GROUP_AGG:
|
||||||
if (input_reltype != TS_REL_HYPERTABLE_CHILD)
|
if (input_reltype != TS_REL_HYPERTABLE_CHILD)
|
||||||
{
|
|
||||||
/* Avoid adding gapfill node to the access node if it is pushed to data node */
|
|
||||||
if (!dist_ht || !ht->push_gapfill)
|
|
||||||
plan_add_gapfill(root, output_rel);
|
plan_add_gapfill(root, output_rel);
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
case UPPERREL_WINDOW:
|
case UPPERREL_WINDOW:
|
||||||
if (IsA(linitial(input_rel->pathlist), CustomPath))
|
if (IsA(linitial(input_rel->pathlist), CustomPath))
|
||||||
|
@ -132,9 +132,9 @@ select * from disttable_with_ct;
|
|||||||
-----------------------------------------------------------------------------------------------------------------------------------------------------------
|
-----------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
Custom Scan (DataNodeScan) on public.disttable_with_ct (actual rows=2 loops=1)
|
Custom Scan (DataNodeScan) on public.disttable_with_ct (actual rows=2 loops=1)
|
||||||
Output: disttable_with_ct."time", disttable_with_ct.txn_id, disttable_with_ct.val, disttable_with_ct.info
|
Output: disttable_with_ct."time", disttable_with_ct.txn_id, disttable_with_ct.val, disttable_with_ct.info
|
||||||
Data node: data_node_1
|
Data node: data_node_2
|
||||||
Fetcher Type: Cursor
|
Fetcher Type: Cursor
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
Chunks: _dist_hyper_X_X_chunk
|
||||||
Remote SQL: SELECT "time", txn_id, val, info FROM public.disttable_with_ct WHERE _timescaledb_internal.chunks_in(public.disttable_with_ct.*, ARRAY[33])
|
Remote SQL: SELECT "time", txn_id, val, info FROM public.disttable_with_ct WHERE _timescaledb_internal.chunks_in(public.disttable_with_ct.*, ARRAY[20])
|
||||||
(6 rows)
|
(6 rows)
|
||||||
|
|
||||||
|
@ -36,8 +36,7 @@ SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:0
|
|||||||
first(value, time),
|
first(value, time),
|
||||||
avg(value)
|
avg(value)
|
||||||
FROM :CONDITIONS
|
FROM :CONDITIONS
|
||||||
GROUP BY 1,2
|
GROUP BY 1,2;
|
||||||
ORDER BY 2,1;
|
|
||||||
time_bucket_gapfill | device | first | avg
|
time_bucket_gapfill | device | first | avg
|
||||||
------------------------------+--------+----------+----------
|
------------------------------+--------+----------+----------
|
||||||
Sun Jan 01 04:00:00 2017 PST | 1 | 1.2 | 1.2
|
Sun Jan 01 04:00:00 2017 PST | 1 | 1.2 | 1.2
|
||||||
@ -87,8 +86,7 @@ SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:0
|
|||||||
first(value, time),
|
first(value, time),
|
||||||
avg(value)
|
avg(value)
|
||||||
FROM :CONDITIONS
|
FROM :CONDITIONS
|
||||||
GROUP BY 2,1
|
GROUP BY 2,1;
|
||||||
ORDER BY 2,1;
|
|
||||||
time_bucket_gapfill | device | first | avg
|
time_bucket_gapfill | device | first | avg
|
||||||
------------------------------+--------+----------+----------
|
------------------------------+--------+----------+----------
|
||||||
Sun Jan 01 04:00:00 2017 PST | 1 | 1.2 | 1.2
|
Sun Jan 01 04:00:00 2017 PST | 1 | 1.2 | 1.2
|
||||||
|
@ -1,316 +0,0 @@
|
|||||||
-- This file and its contents are licensed under the Timescale License.
|
|
||||||
-- Please see the included NOTICE for copyright information and
|
|
||||||
-- LICENSE-TIMESCALE for a copy of the license.
|
|
||||||
\set ON_ERROR_STOP 0
|
|
||||||
SET enable_partitionwise_aggregate = 'on';
|
|
||||||
SET timescaledb.enable_remote_explain = true;
|
|
||||||
-- Cases where gapfill is pushed down to data-nodes
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 1,2;
|
|
||||||
QUERY PLAN
|
|
||||||
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
Custom Scan (AsyncAppend)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), name, (first(value, "time")), (avg(value))
|
|
||||||
-> Append
|
|
||||||
-> Custom Scan (DataNodeScan)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), test_gapfill.name, (first(test_gapfill.value, test_gapfill."time")), (avg(test_gapfill.value))
|
|
||||||
Relations: Aggregate on (public.test_gapfill)
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT public.time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), name, public.first(value, "time"), avg(value) FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[28]) GROUP BY 1, 2
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, "time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), name, (public.first(value, "time")), (avg(value))
|
|
||||||
-> Sort
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, (public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time")), (avg(_dist_hyper_X_X_chunk.value))
|
|
||||||
Sort Key: _dist_hyper_X_X_chunk.name, (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"), avg(_dist_hyper_X_X_chunk.value)
|
|
||||||
Group Key: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name
|
|
||||||
-> Result
|
|
||||||
Output: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Custom Scan (DataNodeScan)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), test_gapfill_1.name, (first(test_gapfill_1.value, test_gapfill_1."time")), (avg(test_gapfill_1.value))
|
|
||||||
Relations: Aggregate on (public.test_gapfill)
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT public.time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), name, public.first(value, "time"), avg(value) FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[23, 24, 25]) GROUP BY 1, 2
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, "time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), name, (public.first(value, "time")), (avg(value))
|
|
||||||
-> Sort
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, (public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time")), (avg(_dist_hyper_X_X_chunk.value))
|
|
||||||
Sort Key: _dist_hyper_X_X_chunk.name, (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"), avg(_dist_hyper_X_X_chunk.value)
|
|
||||||
Group Key: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name
|
|
||||||
-> Result
|
|
||||||
Output: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
(48 rows)
|
|
||||||
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 2,1;
|
|
||||||
QUERY PLAN
|
|
||||||
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
Custom Scan (AsyncAppend)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), name, (first(value, "time")), (avg(value))
|
|
||||||
-> Append
|
|
||||||
-> Custom Scan (DataNodeScan)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), test_gapfill.name, (first(test_gapfill.value, test_gapfill."time")), (avg(test_gapfill.value))
|
|
||||||
Relations: Aggregate on (public.test_gapfill)
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT public.time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), name, public.first(value, "time"), avg(value) FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[28]) GROUP BY 2, 1
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, "time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), name, (public.first(value, "time")), (avg(value))
|
|
||||||
-> Sort
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, (public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time")), (avg(_dist_hyper_X_X_chunk.value))
|
|
||||||
Sort Key: _dist_hyper_X_X_chunk.name, (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"), avg(_dist_hyper_X_X_chunk.value)
|
|
||||||
Group Key: _dist_hyper_X_X_chunk.name, public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)
|
|
||||||
-> Result
|
|
||||||
Output: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Custom Scan (DataNodeScan)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), test_gapfill_1.name, (first(test_gapfill_1.value, test_gapfill_1."time")), (avg(test_gapfill_1.value))
|
|
||||||
Relations: Aggregate on (public.test_gapfill)
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT public.time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), name, public.first(value, "time"), avg(value) FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[23, 24, 25]) GROUP BY 2, 1
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, "time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), name, (public.first(value, "time")), (avg(value))
|
|
||||||
-> Sort
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, (public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time")), (avg(_dist_hyper_X_X_chunk.value))
|
|
||||||
Sort Key: _dist_hyper_X_X_chunk.name, (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"), avg(_dist_hyper_X_X_chunk.value)
|
|
||||||
Group Key: _dist_hyper_X_X_chunk.name, public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)
|
|
||||||
-> Result
|
|
||||||
Output: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
(48 rows)
|
|
||||||
|
|
||||||
-- Check for multiple gapfill calls
|
|
||||||
SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
time_bucket_gapfill('6 hours', time, '2017-01-01 08:00', '2017-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 2,1,3;
|
|
||||||
ERROR: multiple time_bucket_gapfill calls not allowed
|
|
||||||
-- Cases where gapfill is not pushed down to data-nodes
|
|
||||||
-- Space dimension is not in group by clause
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-02 18:00'),
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 1;
|
|
||||||
QUERY PLAN
|
|
||||||
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), (first(value, "time")), (avg(value))
|
|
||||||
-> Finalize GroupAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), first(test_gapfill.value, test_gapfill."time"), avg(test_gapfill.value)
|
|
||||||
Group Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone))
|
|
||||||
-> Sort
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), (PARTIAL first(test_gapfill.value, test_gapfill."time")), (PARTIAL avg(test_gapfill.value))
|
|
||||||
Sort Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone))
|
|
||||||
-> Append
|
|
||||||
-> Partial HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), PARTIAL first(test_gapfill.value, test_gapfill."time"), PARTIAL avg(test_gapfill.value)
|
|
||||||
Group Key: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone), test_gapfill.value, test_gapfill."time"
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", value FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[28])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Partial HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), PARTIAL first(test_gapfill_1.value, test_gapfill_1."time"), PARTIAL avg(test_gapfill_1.value)
|
|
||||||
Group Key: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill test_gapfill_1
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone), test_gapfill_1.value, test_gapfill_1."time"
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", value FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[23, 24, 25])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
(38 rows)
|
|
||||||
|
|
||||||
-- Window functions
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT
|
|
||||||
time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
lag(min(time)) OVER ()
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 1;
|
|
||||||
QUERY PLAN
|
|
||||||
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
WindowAgg
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), lag((min("time"))) OVER (?)
|
|
||||||
-> Custom Scan (GapFill)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), (min("time"))
|
|
||||||
-> Finalize GroupAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), min(test_gapfill."time")
|
|
||||||
Group Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone))
|
|
||||||
-> Sort
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), (PARTIAL min(test_gapfill."time"))
|
|
||||||
Sort Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone))
|
|
||||||
-> Append
|
|
||||||
-> Partial HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), PARTIAL min(test_gapfill."time")
|
|
||||||
Group Key: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), test_gapfill."time"
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time" FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[28])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time"
|
|
||||||
|
|
||||||
-> Partial HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), PARTIAL min(test_gapfill_1."time")
|
|
||||||
Group Key: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill test_gapfill_1
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), test_gapfill_1."time"
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time" FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[23, 24, 25])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time"
|
|
||||||
|
|
||||||
(40 rows)
|
|
||||||
|
|
||||||
-- Data nodes are overlapping
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2018-01-01 06:00', '2018-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill_overlap
|
|
||||||
GROUP BY 1,2;
|
|
||||||
QUERY PLAN
|
|
||||||
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), name, (first(value, "time")), (avg(value))
|
|
||||||
-> Sort
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap.name, (first(test_gapfill_overlap.value, test_gapfill_overlap."time")), (avg(test_gapfill_overlap.value))
|
|
||||||
Sort Key: test_gapfill_overlap.name, (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone))
|
|
||||||
-> Finalize GroupAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap.name, first(test_gapfill_overlap.value, test_gapfill_overlap."time"), avg(test_gapfill_overlap.value)
|
|
||||||
Group Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap.name
|
|
||||||
-> Merge Append
|
|
||||||
Sort Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap.name
|
|
||||||
-> Partial GroupAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap.name, PARTIAL first(test_gapfill_overlap.value, test_gapfill_overlap."time"), PARTIAL avg(test_gapfill_overlap.value)
|
|
||||||
Group Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap.name
|
|
||||||
-> Sort
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap.name, test_gapfill_overlap.value, test_gapfill_overlap."time"
|
|
||||||
Sort Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap.name
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill_overlap
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone), test_gapfill_overlap.name, test_gapfill_overlap.value, test_gapfill_overlap."time"
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", name, value FROM public.test_gapfill_overlap WHERE _timescaledb_internal.chunks_in(public.test_gapfill_overlap.*, ARRAY[29, 30, 31, 32])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Partial GroupAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_1.name, PARTIAL first(test_gapfill_overlap_1.value, test_gapfill_overlap_1."time"), PARTIAL avg(test_gapfill_overlap_1.value)
|
|
||||||
Group Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_1.name
|
|
||||||
-> Sort
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_1.name, test_gapfill_overlap_1.value, test_gapfill_overlap_1."time"
|
|
||||||
Sort Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_1.name
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill_overlap test_gapfill_overlap_1
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone), test_gapfill_overlap_1.name, test_gapfill_overlap_1.value, test_gapfill_overlap_1."time"
|
|
||||||
Data node: data_node_2
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", name, value FROM public.test_gapfill_overlap WHERE _timescaledb_internal.chunks_in(public.test_gapfill_overlap.*, ARRAY[20])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Partial GroupAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_2."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_2.name, PARTIAL first(test_gapfill_overlap_2.value, test_gapfill_overlap_2."time"), PARTIAL avg(test_gapfill_overlap_2.value)
|
|
||||||
Group Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_2."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_2.name
|
|
||||||
-> Sort
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_2."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_2.name, test_gapfill_overlap_2.value, test_gapfill_overlap_2."time"
|
|
||||||
Sort Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_2."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_2.name
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill_overlap test_gapfill_overlap_2
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_2."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone), test_gapfill_overlap_2.name, test_gapfill_overlap_2.value, test_gapfill_overlap_2."time"
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", name, value FROM public.test_gapfill_overlap WHERE _timescaledb_internal.chunks_in(public.test_gapfill_overlap.*, ARRAY[26, 27, 28, 29, 30])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
(71 rows)
|
|
||||||
|
|
||||||
SET timescaledb.enable_remote_explain = false;
|
|
||||||
DROP TABLE test_gapfill;
|
|
||||||
DROP TABLE test_gapfill_overlap;
|
|
@ -1,297 +0,0 @@
|
|||||||
-- This file and its contents are licensed under the Timescale License.
|
|
||||||
-- Please see the included NOTICE for copyright information and
|
|
||||||
-- LICENSE-TIMESCALE for a copy of the license.
|
|
||||||
\set ON_ERROR_STOP 0
|
|
||||||
SET enable_partitionwise_aggregate = 'on';
|
|
||||||
SET timescaledb.enable_remote_explain = true;
|
|
||||||
-- Cases where gapfill is pushed down to data-nodes
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 1,2;
|
|
||||||
QUERY PLAN
|
|
||||||
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
Custom Scan (AsyncAppend)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), name, (first(value, "time")), (avg(value))
|
|
||||||
-> Append
|
|
||||||
-> Custom Scan (DataNodeScan)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), test_gapfill.name, (first(test_gapfill.value, test_gapfill."time")), (avg(test_gapfill.value))
|
|
||||||
Relations: Aggregate on (public.test_gapfill)
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT public.time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), name, public.first(value, "time"), avg(value) FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[28]) GROUP BY 1, 2
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, "time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), name, (public.first(value, "time")), (avg(value))
|
|
||||||
-> Sort
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, (public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time")), (avg(_dist_hyper_X_X_chunk.value))
|
|
||||||
Sort Key: _dist_hyper_X_X_chunk.name, (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"), avg(_dist_hyper_X_X_chunk.value)
|
|
||||||
Group Key: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name
|
|
||||||
-> Result
|
|
||||||
Output: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Custom Scan (DataNodeScan)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), test_gapfill_1.name, (first(test_gapfill_1.value, test_gapfill_1."time")), (avg(test_gapfill_1.value))
|
|
||||||
Relations: Aggregate on (public.test_gapfill)
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT public.time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), name, public.first(value, "time"), avg(value) FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[23, 24, 25]) GROUP BY 1, 2
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, test_gapfill."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), test_gapfill.name, (public.first(test_gapfill.value, test_gapfill."time")), (avg(test_gapfill.value))
|
|
||||||
-> Sort
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, (public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time")), (avg(_dist_hyper_X_X_chunk.value))
|
|
||||||
Sort Key: _dist_hyper_X_X_chunk.name, (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"), avg(_dist_hyper_X_X_chunk.value)
|
|
||||||
Group Key: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name
|
|
||||||
-> Result
|
|
||||||
Output: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
(48 rows)
|
|
||||||
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 2,1;
|
|
||||||
QUERY PLAN
|
|
||||||
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
Custom Scan (AsyncAppend)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), name, (first(value, "time")), (avg(value))
|
|
||||||
-> Append
|
|
||||||
-> Custom Scan (DataNodeScan)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), test_gapfill.name, (first(test_gapfill.value, test_gapfill."time")), (avg(test_gapfill.value))
|
|
||||||
Relations: Aggregate on (public.test_gapfill)
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT public.time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), name, public.first(value, "time"), avg(value) FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[28]) GROUP BY 2, 1
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, "time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), name, (public.first(value, "time")), (avg(value))
|
|
||||||
-> Sort
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, (public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time")), (avg(_dist_hyper_X_X_chunk.value))
|
|
||||||
Sort Key: _dist_hyper_X_X_chunk.name, (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"), avg(_dist_hyper_X_X_chunk.value)
|
|
||||||
Group Key: _dist_hyper_X_X_chunk.name, public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)
|
|
||||||
-> Result
|
|
||||||
Output: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Custom Scan (DataNodeScan)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), test_gapfill_1.name, (first(test_gapfill_1.value, test_gapfill_1."time")), (avg(test_gapfill_1.value))
|
|
||||||
Relations: Aggregate on (public.test_gapfill)
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT public.time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), name, public.first(value, "time"), avg(value) FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[23, 24, 25]) GROUP BY 2, 1
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, test_gapfill."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), test_gapfill.name, (public.first(test_gapfill.value, test_gapfill."time")), (avg(test_gapfill.value))
|
|
||||||
-> Sort
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, (public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time")), (avg(_dist_hyper_X_X_chunk.value))
|
|
||||||
Sort Key: _dist_hyper_X_X_chunk.name, (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"), avg(_dist_hyper_X_X_chunk.value)
|
|
||||||
Group Key: _dist_hyper_X_X_chunk.name, public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)
|
|
||||||
-> Result
|
|
||||||
Output: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
(48 rows)
|
|
||||||
|
|
||||||
-- Check for multiple gapfill calls
|
|
||||||
SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
time_bucket_gapfill('6 hours', time, '2017-01-01 08:00', '2017-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 2,1,3;
|
|
||||||
ERROR: multiple time_bucket_gapfill calls not allowed
|
|
||||||
-- Cases where gapfill is not pushed down to data-nodes
|
|
||||||
-- Space dimension is not in group by clause
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-02 18:00'),
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 1;
|
|
||||||
QUERY PLAN
|
|
||||||
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), (first(value, "time")), (avg(value))
|
|
||||||
-> Finalize GroupAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), first(test_gapfill.value, test_gapfill."time"), avg(test_gapfill.value)
|
|
||||||
Group Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone))
|
|
||||||
-> Sort
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), (PARTIAL first(test_gapfill.value, test_gapfill."time")), (PARTIAL avg(test_gapfill.value))
|
|
||||||
Sort Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone))
|
|
||||||
-> Append
|
|
||||||
-> Partial HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), PARTIAL first(test_gapfill.value, test_gapfill."time"), PARTIAL avg(test_gapfill.value)
|
|
||||||
Group Key: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone), test_gapfill.value, test_gapfill."time"
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", value FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[28])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Partial HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), PARTIAL first(test_gapfill_1.value, test_gapfill_1."time"), PARTIAL avg(test_gapfill_1.value)
|
|
||||||
Group Key: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill test_gapfill_1
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone), test_gapfill_1.value, test_gapfill_1."time"
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", value FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[23, 24, 25])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
(38 rows)
|
|
||||||
|
|
||||||
-- Window functions
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT
|
|
||||||
time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
lag(min(time)) OVER ()
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 1;
|
|
||||||
QUERY PLAN
|
|
||||||
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
WindowAgg
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), lag((min("time"))) OVER (?)
|
|
||||||
-> Custom Scan (GapFill)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), (min("time"))
|
|
||||||
-> Finalize GroupAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), min(test_gapfill."time")
|
|
||||||
Group Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone))
|
|
||||||
-> Sort
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), (PARTIAL min(test_gapfill."time"))
|
|
||||||
Sort Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone))
|
|
||||||
-> Append
|
|
||||||
-> Partial HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), PARTIAL min(test_gapfill."time")
|
|
||||||
Group Key: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), test_gapfill."time"
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time" FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[28])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time"
|
|
||||||
|
|
||||||
-> Partial HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), PARTIAL min(test_gapfill_1."time")
|
|
||||||
Group Key: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill test_gapfill_1
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), test_gapfill_1."time"
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time" FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[23, 24, 25])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time"
|
|
||||||
|
|
||||||
(40 rows)
|
|
||||||
|
|
||||||
-- Data nodes are overlapping
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2018-01-01 06:00', '2018-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill_overlap
|
|
||||||
GROUP BY 1,2;
|
|
||||||
QUERY PLAN
|
|
||||||
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap.name, (first(test_gapfill_overlap.value, test_gapfill_overlap."time")), (avg(test_gapfill_overlap.value))
|
|
||||||
-> Sort
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_1.name, (first(test_gapfill_overlap_1.value, test_gapfill_overlap_1."time")), (avg(test_gapfill_overlap_1.value))
|
|
||||||
Sort Key: test_gapfill_overlap_1.name, (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_1.name, first(test_gapfill_overlap_1.value, test_gapfill_overlap_1."time"), avg(test_gapfill_overlap_1.value)
|
|
||||||
Group Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_1.name
|
|
||||||
-> Append
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill_overlap test_gapfill_overlap_1
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone), test_gapfill_overlap_1.name, test_gapfill_overlap_1.value, test_gapfill_overlap_1."time"
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", name, value FROM public.test_gapfill_overlap WHERE _timescaledb_internal.chunks_in(public.test_gapfill_overlap.*, ARRAY[29, 30, 31, 32])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill_overlap test_gapfill_overlap_2
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_2."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone), test_gapfill_overlap_2.name, test_gapfill_overlap_2.value, test_gapfill_overlap_2."time"
|
|
||||||
Data node: data_node_2
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", name, value FROM public.test_gapfill_overlap WHERE _timescaledb_internal.chunks_in(public.test_gapfill_overlap.*, ARRAY[20])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill_overlap test_gapfill_overlap_3
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_3."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone), test_gapfill_overlap_3.name, test_gapfill_overlap_3.value, test_gapfill_overlap_3."time"
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", name, value FROM public.test_gapfill_overlap WHERE _timescaledb_internal.chunks_in(public.test_gapfill_overlap.*, ARRAY[26, 27, 28, 29, 30])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
(52 rows)
|
|
||||||
|
|
||||||
SET timescaledb.enable_remote_explain = false;
|
|
||||||
DROP TABLE test_gapfill;
|
|
||||||
DROP TABLE test_gapfill_overlap;
|
|
@ -1,297 +0,0 @@
|
|||||||
-- This file and its contents are licensed under the Timescale License.
|
|
||||||
-- Please see the included NOTICE for copyright information and
|
|
||||||
-- LICENSE-TIMESCALE for a copy of the license.
|
|
||||||
\set ON_ERROR_STOP 0
|
|
||||||
SET enable_partitionwise_aggregate = 'on';
|
|
||||||
SET timescaledb.enable_remote_explain = true;
|
|
||||||
-- Cases where gapfill is pushed down to data-nodes
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 1,2;
|
|
||||||
QUERY PLAN
|
|
||||||
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
Custom Scan (AsyncAppend)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), name, (first(value, "time")), (avg(value))
|
|
||||||
-> Append
|
|
||||||
-> Custom Scan (DataNodeScan)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), test_gapfill.name, (first(test_gapfill.value, test_gapfill."time")), (avg(test_gapfill.value))
|
|
||||||
Relations: Aggregate on (public.test_gapfill)
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT public.time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), name, public.first(value, "time"), avg(value) FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[28]) GROUP BY 1, 2
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, "time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), name, (public.first(value, "time")), (avg(value))
|
|
||||||
-> Sort
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, (public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time")), (avg(_dist_hyper_X_X_chunk.value))
|
|
||||||
Sort Key: _dist_hyper_X_X_chunk.name, (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"), avg(_dist_hyper_X_X_chunk.value)
|
|
||||||
Group Key: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name
|
|
||||||
-> Result
|
|
||||||
Output: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Custom Scan (DataNodeScan)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), test_gapfill_1.name, (first(test_gapfill_1.value, test_gapfill_1."time")), (avg(test_gapfill_1.value))
|
|
||||||
Relations: Aggregate on (public.test_gapfill)
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT public.time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), name, public.first(value, "time"), avg(value) FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[23, 24, 25]) GROUP BY 1, 2
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, test_gapfill."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), test_gapfill.name, (public.first(test_gapfill.value, test_gapfill."time")), (avg(test_gapfill.value))
|
|
||||||
-> Sort
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, (public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time")), (avg(_dist_hyper_X_X_chunk.value))
|
|
||||||
Sort Key: _dist_hyper_X_X_chunk.name, (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"), avg(_dist_hyper_X_X_chunk.value)
|
|
||||||
Group Key: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name
|
|
||||||
-> Result
|
|
||||||
Output: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
(48 rows)
|
|
||||||
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 2,1;
|
|
||||||
QUERY PLAN
|
|
||||||
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
Custom Scan (AsyncAppend)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), name, (first(value, "time")), (avg(value))
|
|
||||||
-> Append
|
|
||||||
-> Custom Scan (DataNodeScan)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), test_gapfill.name, (first(test_gapfill.value, test_gapfill."time")), (avg(test_gapfill.value))
|
|
||||||
Relations: Aggregate on (public.test_gapfill)
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT public.time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), name, public.first(value, "time"), avg(value) FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[28]) GROUP BY 2, 1
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, "time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), name, (public.first(value, "time")), (avg(value))
|
|
||||||
-> Sort
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, (public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time")), (avg(_dist_hyper_X_X_chunk.value))
|
|
||||||
Sort Key: _dist_hyper_X_X_chunk.name, (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"), avg(_dist_hyper_X_X_chunk.value)
|
|
||||||
Group Key: _dist_hyper_X_X_chunk.name, public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)
|
|
||||||
-> Result
|
|
||||||
Output: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Custom Scan (DataNodeScan)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), test_gapfill_1.name, (first(test_gapfill_1.value, test_gapfill_1."time")), (avg(test_gapfill_1.value))
|
|
||||||
Relations: Aggregate on (public.test_gapfill)
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT public.time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), name, public.first(value, "time"), avg(value) FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[23, 24, 25]) GROUP BY 2, 1
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, test_gapfill."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), test_gapfill.name, (public.first(test_gapfill.value, test_gapfill."time")), (avg(test_gapfill.value))
|
|
||||||
-> Sort
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, (public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time")), (avg(_dist_hyper_X_X_chunk.value))
|
|
||||||
Sort Key: _dist_hyper_X_X_chunk.name, (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)), _dist_hyper_X_X_chunk.name, public.first(_dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"), avg(_dist_hyper_X_X_chunk.value)
|
|
||||||
Group Key: _dist_hyper_X_X_chunk.name, public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone)
|
|
||||||
-> Result
|
|
||||||
Output: public.time_bucket_gapfill('03:00:00'::interval, _dist_hyper_X_X_chunk."time", '2017-01-01 06:00:00'::timestamp without time zone, '2017-01-01 18:00:00'::timestamp without time zone), _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value, _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
(48 rows)
|
|
||||||
|
|
||||||
-- Check for multiple gapfill calls
|
|
||||||
SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
time_bucket_gapfill('6 hours', time, '2017-01-01 08:00', '2017-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 2,1,3;
|
|
||||||
ERROR: multiple time_bucket_gapfill calls not allowed
|
|
||||||
-- Cases where gapfill is not pushed down to data-nodes
|
|
||||||
-- Space dimension is not in group by clause
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-02 18:00'),
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 1;
|
|
||||||
QUERY PLAN
|
|
||||||
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), (first(value, "time")), (avg(value))
|
|
||||||
-> Finalize GroupAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), first(test_gapfill.value, test_gapfill."time"), avg(test_gapfill.value)
|
|
||||||
Group Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone))
|
|
||||||
-> Sort
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), (PARTIAL first(test_gapfill.value, test_gapfill."time")), (PARTIAL avg(test_gapfill.value))
|
|
||||||
Sort Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone))
|
|
||||||
-> Append
|
|
||||||
-> Partial HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), PARTIAL first(test_gapfill.value, test_gapfill."time"), PARTIAL avg(test_gapfill.value)
|
|
||||||
Group Key: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone), test_gapfill.value, test_gapfill."time"
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", value FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[28])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Partial HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)), PARTIAL first(test_gapfill_1.value, test_gapfill_1."time"), PARTIAL avg(test_gapfill_1.value)
|
|
||||||
Group Key: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone)
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill test_gapfill_1
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Mon Jan 02 18:00:00 2017'::timestamp without time zone), test_gapfill_1.value, test_gapfill_1."time"
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", value FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[23, 24, 25])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
(38 rows)
|
|
||||||
|
|
||||||
-- Window functions
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT
|
|
||||||
time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
lag(min(time)) OVER ()
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 1;
|
|
||||||
QUERY PLAN
|
|
||||||
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
WindowAgg
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), lag((min("time"))) OVER (?)
|
|
||||||
-> Custom Scan (GapFill)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, "time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), (min("time"))
|
|
||||||
-> Finalize GroupAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), min(test_gapfill."time")
|
|
||||||
Group Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone))
|
|
||||||
-> Sort
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), (PARTIAL min(test_gapfill."time"))
|
|
||||||
Sort Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone))
|
|
||||||
-> Append
|
|
||||||
-> Partial HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), PARTIAL min(test_gapfill."time")
|
|
||||||
Group Key: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), test_gapfill."time"
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time" FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[28])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time"
|
|
||||||
|
|
||||||
-> Partial HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)), PARTIAL min(test_gapfill_1."time")
|
|
||||||
Group Key: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone)
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill test_gapfill_1
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_1."time", 'Sun Jan 01 06:00:00 2017'::timestamp without time zone, 'Sun Jan 01 18:00:00 2017'::timestamp without time zone), test_gapfill_1."time"
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time" FROM public.test_gapfill WHERE _timescaledb_internal.chunks_in(public.test_gapfill.*, ARRAY[23, 24, 25])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time"
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time"
|
|
||||||
|
|
||||||
(40 rows)
|
|
||||||
|
|
||||||
-- Data nodes are overlapping
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2018-01-01 06:00', '2018-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill_overlap
|
|
||||||
GROUP BY 1,2;
|
|
||||||
QUERY PLAN
|
|
||||||
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
Custom Scan (GapFill)
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap.name, (first(test_gapfill_overlap.value, test_gapfill_overlap."time")), (avg(test_gapfill_overlap.value))
|
|
||||||
-> Sort
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_1.name, (first(test_gapfill_overlap_1.value, test_gapfill_overlap_1."time")), (avg(test_gapfill_overlap_1.value))
|
|
||||||
Sort Key: test_gapfill_overlap_1.name, (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone))
|
|
||||||
-> HashAggregate
|
|
||||||
Output: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_1.name, first(test_gapfill_overlap_1.value, test_gapfill_overlap_1."time"), avg(test_gapfill_overlap_1.value)
|
|
||||||
Group Key: (time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone)), test_gapfill_overlap_1.name
|
|
||||||
-> Append
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill_overlap test_gapfill_overlap_1
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_1."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone), test_gapfill_overlap_1.name, test_gapfill_overlap_1.value, test_gapfill_overlap_1."time"
|
|
||||||
Data node: data_node_1
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", name, value FROM public.test_gapfill_overlap WHERE _timescaledb_internal.chunks_in(public.test_gapfill_overlap.*, ARRAY[29, 30, 31, 32])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill_overlap test_gapfill_overlap_2
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_2."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone), test_gapfill_overlap_2.name, test_gapfill_overlap_2.value, test_gapfill_overlap_2."time"
|
|
||||||
Data node: data_node_2
|
|
||||||
Chunks: _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", name, value FROM public.test_gapfill_overlap WHERE _timescaledb_internal.chunks_in(public.test_gapfill_overlap.*, ARRAY[20])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
-> Custom Scan (DataNodeScan) on public.test_gapfill_overlap test_gapfill_overlap_3
|
|
||||||
Output: time_bucket_gapfill('@ 3 hours'::interval, test_gapfill_overlap_3."time", 'Mon Jan 01 06:00:00 2018'::timestamp without time zone, 'Mon Jan 01 18:00:00 2018'::timestamp without time zone), test_gapfill_overlap_3.name, test_gapfill_overlap_3.value, test_gapfill_overlap_3."time"
|
|
||||||
Data node: data_node_3
|
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
|
||||||
Remote SQL: SELECT "time", name, value FROM public.test_gapfill_overlap WHERE _timescaledb_internal.chunks_in(public.test_gapfill_overlap.*, ARRAY[26, 27, 28, 29, 30])
|
|
||||||
Remote EXPLAIN:
|
|
||||||
Append
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
-> Seq Scan on _timescaledb_internal._dist_hyper_X_X_chunk
|
|
||||||
Output: _dist_hyper_X_X_chunk."time", _dist_hyper_X_X_chunk.name, _dist_hyper_X_X_chunk.value
|
|
||||||
|
|
||||||
(52 rows)
|
|
||||||
|
|
||||||
SET timescaledb.enable_remote_explain = false;
|
|
||||||
DROP TABLE test_gapfill;
|
|
||||||
DROP TABLE test_gapfill_overlap;
|
|
@ -37,7 +37,7 @@ select 1 from metrics_dist1 where ts_debug_shippable_error_after_n_rows(10000000
|
|||||||
Data node: data_node_1
|
Data node: data_node_1
|
||||||
Fetcher Type: Row by row
|
Fetcher Type: Row by row
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
||||||
Remote SQL: SELECT NULL FROM public.metrics_dist1 WHERE _timescaledb_internal.chunks_in(public.metrics_dist1.*, ARRAY[34, 35, 36]) AND ((public.ts_debug_shippable_error_after_n_rows(10000000, device_id) <> 0))
|
Remote SQL: SELECT NULL FROM public.metrics_dist1 WHERE _timescaledb_internal.chunks_in(public.metrics_dist1.*, ARRAY[28, 29, 30]) AND ((public.ts_debug_shippable_error_after_n_rows(10000000, device_id) <> 0))
|
||||||
(6 rows)
|
(6 rows)
|
||||||
|
|
||||||
explain (analyze, verbose, costs off, timing off, summary off)
|
explain (analyze, verbose, costs off, timing off, summary off)
|
||||||
@ -55,7 +55,7 @@ select 1 from metrics_dist1 where ts_debug_shippable_fatal_after_n_rows(10000000
|
|||||||
Data node: data_node_1
|
Data node: data_node_1
|
||||||
Fetcher Type: Row by row
|
Fetcher Type: Row by row
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
||||||
Remote SQL: SELECT NULL FROM public.metrics_dist1 WHERE _timescaledb_internal.chunks_in(public.metrics_dist1.*, ARRAY[34, 35, 36]) AND ((public.ts_debug_shippable_fatal_after_n_rows(10000000, device_id) <> 0))
|
Remote SQL: SELECT NULL FROM public.metrics_dist1 WHERE _timescaledb_internal.chunks_in(public.metrics_dist1.*, ARRAY[28, 29, 30]) AND ((public.ts_debug_shippable_fatal_after_n_rows(10000000, device_id) <> 0))
|
||||||
(6 rows)
|
(6 rows)
|
||||||
|
|
||||||
set timescaledb.remote_data_fetcher = 'cursor';
|
set timescaledb.remote_data_fetcher = 'cursor';
|
||||||
@ -74,7 +74,7 @@ select 1 from metrics_dist1 where ts_debug_shippable_error_after_n_rows(10000000
|
|||||||
Data node: data_node_1
|
Data node: data_node_1
|
||||||
Fetcher Type: Cursor
|
Fetcher Type: Cursor
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
||||||
Remote SQL: SELECT NULL FROM public.metrics_dist1 WHERE _timescaledb_internal.chunks_in(public.metrics_dist1.*, ARRAY[34, 35, 36]) AND ((public.ts_debug_shippable_error_after_n_rows(10000000, device_id) <> 0))
|
Remote SQL: SELECT NULL FROM public.metrics_dist1 WHERE _timescaledb_internal.chunks_in(public.metrics_dist1.*, ARRAY[28, 29, 30]) AND ((public.ts_debug_shippable_error_after_n_rows(10000000, device_id) <> 0))
|
||||||
(6 rows)
|
(6 rows)
|
||||||
|
|
||||||
explain (analyze, verbose, costs off, timing off, summary off)
|
explain (analyze, verbose, costs off, timing off, summary off)
|
||||||
@ -92,6 +92,6 @@ select 1 from metrics_dist1 where ts_debug_shippable_fatal_after_n_rows(10000000
|
|||||||
Data node: data_node_1
|
Data node: data_node_1
|
||||||
Fetcher Type: Cursor
|
Fetcher Type: Cursor
|
||||||
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
Chunks: _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk, _dist_hyper_X_X_chunk
|
||||||
Remote SQL: SELECT NULL FROM public.metrics_dist1 WHERE _timescaledb_internal.chunks_in(public.metrics_dist1.*, ARRAY[34, 35, 36]) AND ((public.ts_debug_shippable_fatal_after_n_rows(10000000, device_id) <> 0))
|
Remote SQL: SELECT NULL FROM public.metrics_dist1 WHERE _timescaledb_internal.chunks_in(public.metrics_dist1.*, ARRAY[28, 29, 30]) AND ((public.ts_debug_shippable_fatal_after_n_rows(10000000, device_id) <> 0))
|
||||||
(6 rows)
|
(6 rows)
|
||||||
|
|
||||||
|
@ -23,9 +23,8 @@ if(CMAKE_BUILD_TYPE MATCHES Debug)
|
|||||||
endif(CMAKE_BUILD_TYPE MATCHES Debug)
|
endif(CMAKE_BUILD_TYPE MATCHES Debug)
|
||||||
|
|
||||||
set(TEST_TEMPLATES_SHARED
|
set(TEST_TEMPLATES_SHARED
|
||||||
dist_gapfill_pushdown.sql.in gapfill.sql.in generated_columns.sql.in
|
gapfill.sql.in generated_columns.sql.in ordered_append.sql.in
|
||||||
ordered_append.sql.in ordered_append_join.sql.in
|
ordered_append_join.sql.in transparent_decompress_chunk.sql.in)
|
||||||
transparent_decompress_chunk.sql.in)
|
|
||||||
|
|
||||||
# Regression tests that vary with PostgreSQL version. Generated test files are
|
# Regression tests that vary with PostgreSQL version. Generated test files are
|
||||||
# put in the original source directory since all tests must be in the same
|
# put in the original source directory since all tests must be in the same
|
||||||
|
@ -1,62 +0,0 @@
|
|||||||
-- This file and its contents are licensed under the Timescale License.
|
|
||||||
-- Please see the included NOTICE for copyright information and
|
|
||||||
-- LICENSE-TIMESCALE for a copy of the license.
|
|
||||||
|
|
||||||
\set ON_ERROR_STOP 0
|
|
||||||
|
|
||||||
SET enable_partitionwise_aggregate = 'on';
|
|
||||||
SET timescaledb.enable_remote_explain = true;
|
|
||||||
|
|
||||||
-- Cases where gapfill is pushed down to data-nodes
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 1,2;
|
|
||||||
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 2,1;
|
|
||||||
|
|
||||||
-- Check for multiple gapfill calls
|
|
||||||
SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
time_bucket_gapfill('6 hours', time, '2017-01-01 08:00', '2017-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 2,1,3;
|
|
||||||
|
|
||||||
-- Cases where gapfill is not pushed down to data-nodes
|
|
||||||
|
|
||||||
-- Space dimension is not in group by clause
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-02 18:00'),
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 1;
|
|
||||||
|
|
||||||
-- Window functions
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT
|
|
||||||
time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
|
||||||
lag(min(time)) OVER ()
|
|
||||||
FROM test_gapfill
|
|
||||||
GROUP BY 1;
|
|
||||||
|
|
||||||
-- Data nodes are overlapping
|
|
||||||
|
|
||||||
EXPLAIN (VERBOSE, COSTS OFF) SELECT time_bucket_gapfill('3 hours', time, '2018-01-01 06:00', '2018-01-01 18:00'),
|
|
||||||
name,
|
|
||||||
first(value, time),
|
|
||||||
avg(value)
|
|
||||||
FROM test_gapfill_overlap
|
|
||||||
GROUP BY 1,2;
|
|
||||||
|
|
||||||
SET timescaledb.enable_remote_explain = false;
|
|
||||||
|
|
||||||
DROP TABLE test_gapfill;
|
|
||||||
DROP TABLE test_gapfill_overlap;
|
|
@ -13,16 +13,14 @@ SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:0
|
|||||||
first(value, time),
|
first(value, time),
|
||||||
avg(value)
|
avg(value)
|
||||||
FROM :CONDITIONS
|
FROM :CONDITIONS
|
||||||
GROUP BY 1,2
|
GROUP BY 1,2;
|
||||||
ORDER BY 2,1;
|
|
||||||
|
|
||||||
SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
SELECT time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
||||||
device,
|
device,
|
||||||
first(value, time),
|
first(value, time),
|
||||||
avg(value)
|
avg(value)
|
||||||
FROM :CONDITIONS
|
FROM :CONDITIONS
|
||||||
GROUP BY 2,1
|
GROUP BY 2,1;
|
||||||
ORDER BY 2,1;
|
|
||||||
|
|
||||||
SELECT
|
SELECT
|
||||||
time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
time_bucket_gapfill('3 hours', time, '2017-01-01 06:00', '2017-01-01 18:00'),
|
||||||
|
@ -274,44 +274,9 @@ create table distinct_on_distributed(ts timestamp, id int, val numeric);
|
|||||||
select create_distributed_hypertable('distinct_on_distributed', 'ts');
|
select create_distributed_hypertable('distinct_on_distributed', 'ts');
|
||||||
insert into distinct_on_distributed select * from distinct_on_hypertable;
|
insert into distinct_on_distributed select * from distinct_on_hypertable;
|
||||||
|
|
||||||
-- Table with non-overlapping data across data-nodes to test gapfill pushdown to data nodes
|
|
||||||
CREATE TABLE test_gapfill(time timestamp, name text, value numeric);
|
|
||||||
|
|
||||||
SELECT table_name from create_distributed_hypertable('test_gapfill', 'time', partitioning_column => 'name');
|
|
||||||
|
|
||||||
INSERT INTO test_gapfill VALUES
|
|
||||||
('2018-01-01 06:01', 'one', 1.2),
|
|
||||||
('2018-01-02 09:11', 'two', 4.3),
|
|
||||||
('2018-01-03 08:01', 'three', 7.3),
|
|
||||||
('2018-01-04 08:01', 'one', 0.23),
|
|
||||||
('2018-07-05 08:01', 'five', 0.0),
|
|
||||||
('2018-07-06 06:01', 'forty', 3.1),
|
|
||||||
('2018-07-07 09:11', 'eleven', 10303.12),
|
|
||||||
('2018-07-08 08:01', 'ten', 64);
|
|
||||||
|
|
||||||
-- Make table with data nodes overlapping
|
|
||||||
|
|
||||||
CREATE TABLE test_gapfill_overlap(time timestamp, name text, value numeric);
|
|
||||||
|
|
||||||
SELECT table_name from create_distributed_hypertable('test_gapfill_overlap', 'time', partitioning_column => 'name');
|
|
||||||
|
|
||||||
INSERT INTO test_gapfill_overlap SELECT * FROM test_gapfill;
|
|
||||||
|
|
||||||
SELECT set_number_partitions('test_gapfill_overlap', 4);
|
|
||||||
|
|
||||||
INSERT INTO test_gapfill_overlap VALUES
|
|
||||||
('2020-01-01 06:01', 'eleven', 1.2),
|
|
||||||
('2020-01-02 09:11', 'twenty-two', 4.3),
|
|
||||||
('2020-01-03 08:01', 'three', 7.3),
|
|
||||||
('2020-01-04 08:01', 'one', 0.23),
|
|
||||||
('2020-07-05 08:01', 'five', 0.0),
|
|
||||||
('2020-07-06 06:01', 'forty-six', 3.1),
|
|
||||||
('2020-07-07 09:11', 'eleven', 10303.12),
|
|
||||||
('2020-07-08 08:01', 'ten', 64);
|
|
||||||
|
|
||||||
-- Distributed table with custom type that has no binary output
|
-- Distributed table with custom type that has no binary output
|
||||||
CREATE TABLE disttable_with_ct(time timestamptz, txn_id rxid, val float, info text);
|
CREATE TABLE disttable_with_ct(time timestamptz, txn_id rxid, val float, info text);
|
||||||
SELECT * FROM create_hypertable('disttable_with_ct', 'time', replication_factor => 2);
|
SELECT * FROM create_hypertable('disttable_with_ct', 'time', replication_factor => 1);
|
||||||
|
|
||||||
-- Insert data with custom type
|
-- Insert data with custom type
|
||||||
INSERT INTO disttable_with_ct VALUES
|
INSERT INTO disttable_with_ct VALUES
|
||||||
|
Loading…
x
Reference in New Issue
Block a user