diff --git a/src/guc.c b/src/guc.c index c17901f6e..cb8d578e9 100644 --- a/src/guc.c +++ b/src/guc.c @@ -76,6 +76,7 @@ bool ts_guc_enable_now_constify = true; bool ts_guc_enable_osm_reads = true; TSDLLEXPORT bool ts_guc_enable_transparent_decompression = true; bool ts_guc_enable_per_data_node_queries = true; +bool ts_guc_enable_parameterized_data_node_scan = true; bool ts_guc_enable_async_append = true; TSDLLEXPORT bool ts_guc_enable_compression_indexscan = true; TSDLLEXPORT bool ts_guc_enable_skip_scan = true; @@ -294,6 +295,19 @@ _guc_init(void) NULL, NULL); + DefineCustomBoolVariable("timescaledb.enable_parameterized_data_node_scan", + "Enable parameterized data node scans", + "Disable this as a workaround in case these plans are incorrectly " + "chosen " + "by the query planner when they are suboptimal", + &ts_guc_enable_parameterized_data_node_scan, + true, + PGC_USERSET, + 0, + NULL, + NULL, + NULL); + DefineCustomBoolVariable("timescaledb.enable_tiered_reads", "Enable tiered data reads", "Enable reading of tiered data by including a foreign table " diff --git a/src/guc.h b/src/guc.h index a1dd0d530..a4268306a 100644 --- a/src/guc.h +++ b/src/guc.h @@ -28,6 +28,7 @@ extern bool ts_guc_enable_now_constify; extern bool ts_guc_enable_osm_reads; extern TSDLLEXPORT bool ts_guc_enable_transparent_decompression; extern TSDLLEXPORT bool ts_guc_enable_per_data_node_queries; +extern TSDLLEXPORT bool ts_guc_enable_parameterized_data_node_scan; extern TSDLLEXPORT bool ts_guc_enable_async_append; extern TSDLLEXPORT bool ts_guc_enable_skip_scan; extern bool ts_guc_restoring; diff --git a/tsl/src/fdw/data_node_scan_plan.c b/tsl/src/fdw/data_node_scan_plan.c index b95c90e55..04d53d34f 100644 --- a/tsl/src/fdw/data_node_scan_plan.c +++ b/tsl/src/fdw/data_node_scan_plan.c @@ -297,33 +297,15 @@ ts_ec_member_matches_foreign(PlannerInfo *root, RelOptInfo *rel, EquivalenceClas return true; } -static void -add_data_node_scan_paths(PlannerInfo *root, RelOptInfo *baserel) +/* + * Build parameterizations that are useful for performing joins with the given + * hypertable relation. We will use them to generate the parameterized data node + * scan paths. The code is mostly copied from postgres_fdw, + * postgresGetForeignPaths(). + */ +static List * +build_parameterizations(PlannerInfo *root, RelOptInfo *hyper_rel) { - TsFdwRelInfo *fpinfo = fdw_relinfo_get(baserel); - Path *path; - - if (baserel->reloptkind == RELOPT_JOINREL) - ereport(ERROR, - (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), - errmsg("foreign joins are not supported"))); - - path = data_node_scan_path_create(root, - baserel, - NULL, /* default pathtarget */ - fpinfo->rows, - fpinfo->startup_cost, - fpinfo->total_cost, - NIL, /* no pathkeys */ - NULL, - NULL /* no extra plan */, - NIL); - - fdw_utils_add_path(baserel, path); - - /* Add paths with pathkeys */ - fdw_add_paths_with_pathkeys_for_rel(root, baserel, NULL, data_node_scan_path_create); - /* * Thumb through all join clauses for the rel to identify which outer * relations could supply one or more safe-to-send-to-remote join clauses. @@ -343,28 +325,28 @@ add_data_node_scan_paths(PlannerInfo *root, RelOptInfo *baserel) */ List *ppi_list = NIL; ListCell *lc; - foreach (lc, baserel->joininfo) + foreach (lc, hyper_rel->joininfo) { RestrictInfo *rinfo = (RestrictInfo *) lfirst(lc); Relids required_outer; ParamPathInfo *param_info; /* Check if clause can be moved to this rel */ - if (!join_clause_is_movable_to(rinfo, baserel)) + if (!join_clause_is_movable_to(rinfo, hyper_rel)) { continue; } /* See if it is safe to send to remote */ - if (!ts_is_foreign_expr(root, baserel, rinfo->clause)) + if (!ts_is_foreign_expr(root, hyper_rel, rinfo->clause)) { continue; } /* Calculate required outer rels for the resulting path */ - required_outer = bms_union(rinfo->clause_relids, baserel->lateral_relids); + required_outer = bms_union(rinfo->clause_relids, hyper_rel->lateral_relids); /* We do not want the data node rel itself listed in required_outer */ - required_outer = bms_del_member(required_outer, baserel->relid); + required_outer = bms_del_member(required_outer, hyper_rel->relid); /* * required_outer probably can't be empty here, but if it were, we @@ -376,7 +358,7 @@ add_data_node_scan_paths(PlannerInfo *root, RelOptInfo *baserel) } /* Get the ParamPathInfo */ - param_info = get_baserel_parampathinfo(root, baserel, required_outer); + param_info = get_baserel_parampathinfo(root, hyper_rel, required_outer); Assert(param_info != NULL); /* @@ -391,7 +373,7 @@ add_data_node_scan_paths(PlannerInfo *root, RelOptInfo *baserel) * were absorbed into EquivalenceClauses. See if we can make anything out * of EquivalenceClauses. */ - if (baserel->has_eclass_joins) + if (hyper_rel->has_eclass_joins) { /* * We repeatedly scan the eclass list looking for column references @@ -410,10 +392,10 @@ add_data_node_scan_paths(PlannerInfo *root, RelOptInfo *baserel) /* Make clauses, skipping any that join to lateral_referencers */ arg.current = NULL; clauses = generate_implied_equalities_for_column(root, - baserel, + hyper_rel, ts_ec_member_matches_foreign, (void *) &arg, - baserel->lateral_referencers); + hyper_rel->lateral_referencers); /* Done if there are no more expressions in the data node rel */ if (arg.current == NULL) @@ -430,27 +412,27 @@ add_data_node_scan_paths(PlannerInfo *root, RelOptInfo *baserel) ParamPathInfo *param_info; /* Check if clause can be moved to this rel */ - if (!join_clause_is_movable_to(rinfo, baserel)) + if (!join_clause_is_movable_to(rinfo, hyper_rel)) { continue; } /* See if it is safe to send to remote */ - if (!ts_is_foreign_expr(root, baserel, rinfo->clause)) + if (!ts_is_foreign_expr(root, hyper_rel, rinfo->clause)) { continue; } /* Calculate required outer rels for the resulting path */ - required_outer = bms_union(rinfo->clause_relids, baserel->lateral_relids); - required_outer = bms_del_member(required_outer, baserel->relid); + required_outer = bms_union(rinfo->clause_relids, hyper_rel->lateral_relids); + required_outer = bms_del_member(required_outer, hyper_rel->relid); if (bms_is_empty(required_outer)) { continue; } /* Get the ParamPathInfo */ - param_info = get_baserel_parampathinfo(root, baserel, required_outer); + param_info = get_baserel_parampathinfo(root, hyper_rel, required_outer); Assert(param_info != NULL); /* Add it to list unless we already have it */ @@ -462,15 +444,82 @@ add_data_node_scan_paths(PlannerInfo *root, RelOptInfo *baserel) } } + return ppi_list; +} + +static void +add_data_node_scan_paths(PlannerInfo *root, RelOptInfo *data_node_rel, RelOptInfo *hyper_rel, + List *ppi_list) +{ + TsFdwRelInfo *fpinfo = fdw_relinfo_get(data_node_rel); + Path *path; + + if (data_node_rel->reloptkind == RELOPT_JOINREL) + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + errmsg("foreign joins are not supported"))); + + path = data_node_scan_path_create(root, + data_node_rel, + NULL, /* default pathtarget */ + fpinfo->rows, + fpinfo->startup_cost, + fpinfo->total_cost, + NIL, /* no pathkeys */ + NULL, + NULL /* no extra plan */, + NIL); + + fdw_utils_add_path(data_node_rel, path); + + /* Add paths with pathkeys */ + fdw_add_paths_with_pathkeys_for_rel(root, data_node_rel, NULL, data_node_scan_path_create); + /* - * Now build a path for each useful outer relation. + * Now build a path for each useful outer relation, if the parameterized + * data node scans are not disabled. */ - foreach (lc, ppi_list) + if (!ts_guc_enable_parameterized_data_node_scan) { - ParamPathInfo *param_info = (ParamPathInfo *) lfirst(lc); + return; + } + + ListCell *ppi_cell; + foreach (ppi_cell, ppi_list) + { + ParamPathInfo *param_info = (ParamPathInfo *) lfirst(ppi_cell); + + /* + * Check if we have an index path locally that matches the + * parameterization. If so, we're going to have the same index path on + * the data node, and it's going to be significantly cheaper that a seq + * scan. We don't know precise values, but we have to discount it later + * so that the remote index paths are preferred. + */ + bool index_matches_parameterization = false; + ListCell *path_cell; + foreach (path_cell, hyper_rel->pathlist) + { + Path *path = (Path *) lfirst(path_cell); + if (path->param_info == param_info) + { + /* + * We shouldn't have parameterized seq scans. Can be an + * IndexPath (includes index-only scans) or a BitmapHeapPath. + */ + Assert(path->type == T_BitmapHeapPath || path->type == T_IndexPath); + + index_matches_parameterization = true; + break; + } + } + + /* + * As a baseline, cost the data node scan as a seq scan. + */ Cost startup_cost = 0; Cost run_cost = 0; - double rows = baserel->tuples > 1 ? baserel->tuples : 123456; + double rows = data_node_rel->tuples > 1 ? data_node_rel->tuples : 123456; /* Run remote non-join clauses. */ const double remote_sel_sane = @@ -478,22 +527,38 @@ add_data_node_scan_paths(PlannerInfo *root, RelOptInfo *baserel) fpinfo->remote_conds_sel : 0.1; - startup_cost += baserel->reltarget->cost.startup; + startup_cost += data_node_rel->reltarget->cost.startup; startup_cost += fpinfo->remote_conds_cost.startup; run_cost += fpinfo->remote_conds_cost.per_tuple * rows; run_cost += cpu_tuple_cost * rows; - run_cost += seq_page_cost * baserel->pages; + run_cost += seq_page_cost * data_node_rel->pages; rows *= remote_sel_sane; + /* + * For this parameterization, we're going to have an index scan on the + * remote. We don't have a way to calculate the precise cost for it, so + * at least discount it by a constant factor compared to the seq scan. + */ + if (index_matches_parameterization) + { + run_cost *= 0.1; + } + /* Run remote join clauses. */ QualCost remote_join_cost; cost_qual_eval(&remote_join_cost, param_info->ppi_clauses, root); + /* - * We don't have up to date per-column statistics for distributed - * hypertables currently, so the join estimates are going to be way off. - * The worst is when they are too low and we end up transferring much - * more rows from the data node that we expected. Just hardcode it at - * 0.1 per clause for now. + * We don't have up to date per-column statistics for the root + * distributed hypertable currently, so the join estimates are going to + * be way off. The worst is when they are too low and we end up + * transferring much more rows from the data node that we expected. Just + * hardcode it at 0.1 per clause for now. + * In the future, we could make use of per-chunk per-column statistics + * that we do have, by injecting them into the Postgres cost functions + * through the get_relation_stats_hook. For a data node scan, we would + * combine statistics for all participating chunks on the given data + * node. */ const double remote_join_sel = pow(0.1, list_length(param_info->ppi_clauses)); @@ -517,7 +582,9 @@ add_data_node_scan_paths(PlannerInfo *root, RelOptInfo *baserel) rows *= local_sel_sane; /* Compute the output targetlist. */ - run_cost += baserel->reltarget->cost.per_tuple * rows; + run_cost += data_node_rel->reltarget->cost.per_tuple * rows; + + rows = clamp_row_est(rows); /* * ppi_rows currently won't get looked at by anything, but still we @@ -527,7 +594,7 @@ add_data_node_scan_paths(PlannerInfo *root, RelOptInfo *baserel) /* Make the path */ path = data_node_scan_path_create(root, - baserel, + data_node_rel, NULL, /* default pathtarget */ rows, startup_cost, @@ -537,7 +604,7 @@ add_data_node_scan_paths(PlannerInfo *root, RelOptInfo *baserel) NULL, NIL); /* no fdw_private list */ - add_path(baserel, (Path *) path); + add_path(data_node_rel, (Path *) path); } } @@ -722,6 +789,26 @@ data_node_scan_add_node_paths(PlannerInfo *root, RelOptInfo *hyper_rel) /* Try to push down GROUP BY expressions and bucketing, if possible */ push_down_group_bys(root, hyper_rel, ht->space, &scas); + /* + * Index path for this relation are not useful by themselves, but we are + * going to use them to guess whether the remote scan can use an index for a + * given parameterization. This is needed to estimate the cost for + * parameterized data node scans. We will reset the pathlist below so these + * path are not going to be used. + */ + create_index_paths(root, hyper_rel); + + /* + * Not sure what parameterizations there could be except the ones used for + * join. Still, it's hard to verify from the code because + * get_baserel_parampathinfo() is called all over the place w/o checking if + * a join would be valid for the given required_outer. So for generating + * the parameterized data node scan paths we'll use the explicit list of + * ppis valid for joins that we just built, and not the entire + * hyper_rel->ppilist. + */ + List *ppi_list = build_parameterizations(root, hyper_rel); + /* * Create estimates and paths for each data node rel based on data node chunk * assignments. @@ -753,7 +840,7 @@ data_node_scan_add_node_paths(PlannerInfo *root, RelOptInfo *hyper_rel) if (!bms_is_empty(sca->chunk_relids)) { - add_data_node_scan_paths(root, data_node_rel); + add_data_node_scan_paths(root, data_node_rel, hyper_rel, ppi_list); data_node_rels_list = lappend(data_node_rels_list, data_node_rel); #if PG15_GE data_node_live_rels = bms_add_member(data_node_live_rels, i); diff --git a/tsl/src/fdw/estimate.c b/tsl/src/fdw/estimate.c index e0a9b2bfd..d504bc3a2 100644 --- a/tsl/src/fdw/estimate.c +++ b/tsl/src/fdw/estimate.c @@ -121,14 +121,30 @@ get_upper_rel_estimate(PlannerInfo *root, RelOptInfo *rel, CostEstimate *ce) get_agg_clause_costs_compat(root, (Node *) fpinfo->grouped_tlist, aggsplit, &aggcosts); } - /* Get number of grouping columns and possible number of groups */ + /* + * Get number of grouping columns and possible number of groups. We don't + * have per-column ndistinct statistics on access node for the root + * distributed hypertable, so in this case hardcode it as 1/10 of all rows + * to incentivize grouping push down. + * We do have per-column per-chunk statistics, so we could do better by + * combining these statistics for the participating chunks. This probably + * should be done together with correcting the join selectivity estimation + * in add_data_node_scan_paths. + */ num_group_cols = list_length(root->parse->groupClause); - num_groups = estimate_num_groups_compat(root, - get_sortgrouplist_exprs(root->parse->groupClause, - fpinfo->grouped_tlist), - input_rows, - NULL, - NULL); + if (fpinfo->type == TS_FDW_RELINFO_HYPERTABLE_DATA_NODE) + { + num_groups = clamp_row_est(input_rows / 10.); + } + else + { + num_groups = estimate_num_groups_compat(root, + get_sortgrouplist_exprs(root->parse->groupClause, + fpinfo->grouped_tlist), + input_rows, + NULL, + NULL); + } /* * Get the retrieved_rows and rows estimates. If there are HAVING diff --git a/tsl/test/expected/debug_notice.out b/tsl/test/expected/debug_notice.out index d24b7ef8d..f59fdcdc6 100644 --- a/tsl/test/expected/debug_notice.out +++ b/tsl/test/expected/debug_notice.out @@ -214,9 +214,9 @@ Pruned paths: DEBUG: Upper rel stage GROUP_AGG: RELOPTINFO [rel name: Aggregate on (public.hyper), type: DATA_NODE, kind: OTHER_UPPER_REL, base rel names: hyper] rows=0 width=20 Path list: - CustomPath (DataNodeScanPath) [rel type: DATA_NODE, kind: OTHER_UPPER_REL, parent's base rels: hyper] rows=3 with pathkeys: ((hyper.time, hyper.time, hyper.time, hyper.time, hyper.time, hyper.time, hyper.time, hyper.time), (hyper.device, hyper.device, hyper.device, hyper.device, hyper.device, hyper.device, hyper.device, hyper.device)) + CustomPath (DataNodeScanPath) [rel type: DATA_NODE, kind: OTHER_UPPER_REL, parent's base rels: hyper] rows=1 with pathkeys: ((hyper.time, hyper.time, hyper.time, hyper.time, hyper.time, hyper.time, hyper.time, hyper.time), (hyper.device, hyper.device, hyper.device, hyper.device, hyper.device, hyper.device, hyper.device, hyper.device)) Pruned paths: - CustomPath (DataNodeScanPath) [rel type: DATA_NODE, kind: OTHER_UPPER_REL, parent's base rels: hyper] rows=3 with pathkeys: ((hyper.time, hyper.time, hyper.time, hyper.time, hyper.time, hyper.time, hyper.time, hyper.time), (hyper.device, hyper.device, hyper.device, hyper.device, hyper.device, hyper.device, hyper.device, hyper.device)) + CustomPath (DataNodeScanPath) [rel type: DATA_NODE, kind: OTHER_UPPER_REL, parent's base rels: hyper] rows=1 DEBUG: Upper rel stage GROUP_AGG: diff --git a/tsl/test/expected/dist_param.out b/tsl/test/expected/dist_param.out index a1ff3d5fa..e67e234dc 100644 --- a/tsl/test/expected/dist_param.out +++ b/tsl/test/expected/dist_param.out @@ -45,125 +45,250 @@ select count(*) from show_chunks('metric_dist'); (1 row) -- dictionary -create table metric_name(id int primary key, name text); +create table metric_name(id int, name text collate "C", + constraint metric_name_name unique (name), + constraint metric_name_id primary key (id)); insert into metric_name values (1, 'cpu1'), (3, 'cpu3'), (7, 'cpu7'); +insert into metric_name select x, 'other' || x + from generate_series(1000, 10000) x +; analyze metric_name; -- for predictable plans -set enable_hashjoin to off; -set enable_mergejoin to off; set enable_hashagg to off; +set enable_material to off; +set enable_mergejoin to off; +-- not present on PG 12 +\set ECHO errors -- Subquery + IN select id, max(value), count(*) from metric_dist where id in (select id from metric_name where name like 'cpu%') - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by id order by id ; id | max | count ----+------------------+------- - 1 | 49.9941974878311 | 139 - 3 | 49.3596792221069 | 138 - 7 | 49.795538187027 | 146 + 1 | 49.9941974878311 | 4174 + 3 | 49.9958902597427 | 4119 + 7 | 49.9881327152252 | 4316 (3 rows) explain (costs off, verbose) select id, max(value), count(*) from metric_dist where id in (select id from metric_name where name like 'cpu%') - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by id order by id ; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: metric_dist.id, max(metric_dist.value), count(*) Group Key: metric_dist.id - -> Nested Loop + -> Sort Output: metric_dist.id, metric_dist.value - -> Index Scan using metric_name_pkey on public.metric_name - Output: metric_name.id, metric_name.name - Filter: (metric_name.name ~~ 'cpu%'::text) - -> Custom Scan (DataNodeScan) on public.metric_dist + Sort Key: metric_dist.id + -> Nested Loop Output: metric_dist.id, metric_dist.value - Data node: data_node_1 - Chunks: _dist_hyper_1_52_chunk - Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-02-02 15:02:02-08'::timestamp with time zone)) AND (($1::integer = id)) -(13 rows) + -> Index Scan using metric_name_name on public.metric_name + Output: metric_name.id, metric_name.name + Index Cond: ((metric_name.name >= 'cpu'::text) AND (metric_name.name < 'cpv'::text)) + Filter: (metric_name.name ~~ 'cpu%'::text) + -> Custom Scan (DataNodeScan) on public.metric_dist + Output: metric_dist.id, metric_dist.value + Data node: data_node_1 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_52_chunk + Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[3, 16, 20, 37, 52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-03-02 15:02:02-08'::timestamp with time zone)) AND (($1::integer = id)) +(17 rows) +-- Check that the GUC to disable these plans works. Our cost model is very +-- heuristic and may be often wrong, so there needs to be a way to disable them. +set timescaledb.enable_parameterized_data_node_scan to false; +select id, max(value), count(*) +from metric_dist +where id in (select id from metric_name where name like 'cpu%') + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' +group by id +order by id +; + id | max | count +----+------------------+------- + 1 | 49.9941974878311 | 4174 + 3 | 49.9958902597427 | 4119 + 7 | 49.9881327152252 | 4316 +(3 rows) + +explain (costs off, verbose) +select id, max(value), count(*) +from metric_dist +where id in (select id from metric_name where name like 'cpu%') + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' +group by id +order by id +; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + GroupAggregate + Output: metric_dist.id, max(metric_dist.value), count(*) + Group Key: metric_dist.id + -> Sort + Output: metric_dist.id, metric_dist.value + Sort Key: metric_dist.id + -> Hash Join + Output: metric_dist.id, metric_dist.value + Inner Unique: true + Hash Cond: (metric_dist.id = metric_name.id) + -> Custom Scan (DataNodeScan) on public.metric_dist + Output: metric_dist.id, metric_dist.value + Data node: data_node_1 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_52_chunk + Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[3, 16, 20, 37, 52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-03-02 15:02:02-08'::timestamp with time zone)) ORDER BY id ASC NULLS LAST + -> Hash + Output: metric_name.id + -> Index Scan using metric_name_name on public.metric_name + Output: metric_name.id + Index Cond: ((metric_name.name >= 'cpu'::text) AND (metric_name.name < 'cpv'::text)) + Filter: (metric_name.name ~~ 'cpu%'::text) +(21 rows) + +reset timescaledb.enable_parameterized_data_node_scan; -- Shippable EC join select name, max(value), count(*) from metric_dist join metric_name using (id) where name like 'cpu%' - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; name | max | count ------+------------------+------- - cpu1 | 49.9941974878311 | 139 - cpu3 | 49.3596792221069 | 138 - cpu7 | 49.795538187027 | 146 + cpu1 | 49.9941974878311 | 4174 + cpu3 | 49.9958902597427 | 4119 + cpu7 | 49.9881327152252 | 4316 (3 rows) explain (costs off, verbose) select name, max(value), count(*) from metric_dist join metric_name using (id) where name like 'cpu%' - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + GroupAggregate + Output: metric_name.name, max(metric_dist.value), count(*) + Group Key: metric_name.name + -> Nested Loop + Output: metric_name.name, metric_dist.value + -> Index Scan using metric_name_name on public.metric_name + Output: metric_name.id, metric_name.name + Index Cond: ((metric_name.name >= 'cpu'::text) AND (metric_name.name < 'cpv'::text)) + Filter: (metric_name.name ~~ 'cpu%'::text) + -> Custom Scan (DataNodeScan) on public.metric_dist + Output: metric_dist.value, metric_dist.id + Data node: data_node_1 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_52_chunk + Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[3, 16, 20, 37, 52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-03-02 15:02:02-08'::timestamp with time zone)) AND (($1::integer = id)) +(14 rows) + +-- Shipping still might make sense if the local table is outer. +explain (costs off, verbose) +select name, max(value), count(*) +from metric_dist right join metric_name using (id) +where name like 'cpu%' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' +group by name +order by name +; + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + GroupAggregate + Output: metric_name.name, max(metric_dist.value), count(*) + Group Key: metric_name.name + -> Nested Loop + Output: metric_name.name, metric_dist.value + -> Index Scan using metric_name_name on public.metric_name + Output: metric_name.id, metric_name.name + Index Cond: ((metric_name.name >= 'cpu'::text) AND (metric_name.name < 'cpv'::text)) + Filter: (metric_name.name ~~ 'cpu%'::text) + -> Custom Scan (DataNodeScan) on public.metric_dist + Output: metric_dist.value, metric_dist.id + Data node: data_node_1 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_52_chunk + Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[3, 16, 20, 37, 52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-03-02 15:02:02-08'::timestamp with time zone)) AND (($1::integer = id)) +(14 rows) + +-- Shipping doesn't make sense if the distributed table is outer. +explain (costs off, verbose) +select name, max(value), count(*) +from metric_dist +left join (select * from metric_name where name like 'cpu%') t using (id) +where ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' +group by name +order by name +; + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: metric_name.name, max(metric_dist.value), count(*) Group Key: metric_name.name -> Sort Output: metric_name.name, metric_dist.value - Sort Key: metric_name.name - -> Nested Loop + Sort Key: metric_name.name COLLATE "C" + -> Hash Left Join Output: metric_name.name, metric_dist.value - -> Seq Scan on public.metric_name - Output: metric_name.id, metric_name.name - Filter: (metric_name.name ~~ 'cpu%'::text) + Inner Unique: true + Hash Cond: (metric_dist.id = metric_name.id) -> Custom Scan (DataNodeScan) on public.metric_dist Output: metric_dist.value, metric_dist.id Data node: data_node_1 - Chunks: _dist_hyper_1_52_chunk - Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-02-02 15:02:02-08'::timestamp with time zone)) AND (($1::integer = id)) -(16 rows) + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_52_chunk + Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[3, 16, 20, 37, 52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-03-02 15:02:02-08'::timestamp with time zone)) + -> Hash + Output: metric_name.name, metric_name.id + -> Index Scan using metric_name_name on public.metric_name + Output: metric_name.name, metric_name.id + Index Cond: ((metric_name.name >= 'cpu'::text) AND (metric_name.name < 'cpv'::text)) + Filter: (metric_name.name ~~ 'cpu%'::text) +(21 rows) -- Non-shippable EC join explain (costs off, verbose) select name, max(value), count(*) from metric_dist join metric_name on name = concat('cpu', metric_dist.id) -where ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' +where metric_name.name like 'cpu%' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: metric_name.name, max(metric_dist.value), count(*) Group Key: metric_name.name -> Sort Output: metric_name.name, metric_dist.value - Sort Key: metric_name.name - -> Nested Loop + Sort Key: metric_name.name COLLATE "C" + -> Hash Join Output: metric_name.name, metric_dist.value - Join Filter: (concat('cpu', metric_dist.id) = metric_name.name) + Inner Unique: true + Hash Cond: ((concat('cpu', metric_dist.id))::text = metric_name.name) -> Custom Scan (DataNodeScan) on public.metric_dist Output: metric_dist.value, metric_dist.id Data node: data_node_1 - Chunks: _dist_hyper_1_52_chunk - Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-02-02 15:02:02-08'::timestamp with time zone)) - -> Materialize + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_52_chunk + Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[3, 16, 20, 37, 52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-03-02 15:02:02-08'::timestamp with time zone)) + -> Hash Output: metric_name.name - -> Seq Scan on public.metric_name + -> Index Only Scan using metric_name_name on public.metric_name Output: metric_name.name -(18 rows) + Index Cond: ((metric_name.name >= 'cpu'::text) AND (metric_name.name < 'cpv'::text)) + Filter: (metric_name.name ~~ 'cpu%'::text) +(21 rows) -- Shippable non-EC join. The weird condition is to only use immutable functions -- that can be shipped to the remote node. `id::text` does CoerceViaIO which is @@ -172,81 +297,80 @@ order by name select name, max(value), count(*) from metric_dist join metric_name on texteq('cpu' || textin(int4out(metric_dist.id)), name) -where ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' +where metric_name.name like 'cpu%' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; name | max | count ------+------------------+------- - cpu1 | 49.9941974878311 | 139 - cpu3 | 49.3596792221069 | 138 - cpu7 | 49.795538187027 | 146 + cpu1 | 49.9941974878311 | 4174 + cpu3 | 49.9958902597427 | 4119 + cpu7 | 49.9881327152252 | 4316 (3 rows) explain (costs off, verbose) select name, max(value), count(*) from metric_dist join metric_name on texteq('cpu' || textin(int4out(metric_dist.id)), name) -where ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' +where metric_name.name like 'cpu%' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: metric_name.name, max(metric_dist.value), count(*) Group Key: metric_name.name - -> Sort + -> Nested Loop Output: metric_name.name, metric_dist.value - Sort Key: metric_name.name - -> Nested Loop - Output: metric_name.name, metric_dist.value - -> Seq Scan on public.metric_name - Output: metric_name.id, metric_name.name - -> Custom Scan (DataNodeScan) on public.metric_dist - Output: metric_dist.value, metric_dist.id - Data node: data_node_1 - Chunks: _dist_hyper_1_52_chunk - Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-02-02 15:02:02-08'::timestamp with time zone)) AND (texteq(('cpu'::text || textin(int4out(id))), $1::text)) -(15 rows) + -> Index Only Scan using metric_name_name on public.metric_name + Output: metric_name.name + Index Cond: ((metric_name.name >= 'cpu'::text) AND (metric_name.name < 'cpv'::text)) + Filter: (metric_name.name ~~ 'cpu%'::text) + -> Custom Scan (DataNodeScan) on public.metric_dist + Output: metric_dist.value, metric_dist.id + Data node: data_node_1 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_52_chunk + Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[3, 16, 20, 37, 52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-03-02 15:02:02-08'::timestamp with time zone)) AND (texteq(('cpu'::text || textin(int4out(id))), $1::text)) +(14 rows) -- Non-shippable non-EC join. explain (costs off, verbose) select name, max(value), count(*) from metric_dist join metric_name on texteq(concat('cpu', textin(int4out(metric_dist.id))), name) -where ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' +where metric_name.name like 'cpu%' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: metric_name.name, max(metric_dist.value), count(*) Group Key: metric_name.name - -> Sort + -> Nested Loop Output: metric_name.name, metric_dist.value - Sort Key: metric_name.name - -> Nested Loop - Output: metric_name.name, metric_dist.value - Join Filter: texteq(concat('cpu', textin(int4out(metric_dist.id))), metric_name.name) - -> Custom Scan (DataNodeScan) on public.metric_dist - Output: metric_dist.value, metric_dist.id - Data node: data_node_1 - Chunks: _dist_hyper_1_52_chunk - Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-02-02 15:02:02-08'::timestamp with time zone)) - -> Materialize - Output: metric_name.name - -> Seq Scan on public.metric_name - Output: metric_name.name -(18 rows) + Join Filter: texteq(concat('cpu', textin(int4out(metric_dist.id))), metric_name.name) + -> Index Only Scan using metric_name_name on public.metric_name + Output: metric_name.name + Index Cond: ((metric_name.name >= 'cpu'::text) AND (metric_name.name < 'cpv'::text)) + Filter: (metric_name.name ~~ 'cpu%'::text) + -> Custom Scan (DataNodeScan) on public.metric_dist + Output: metric_dist.value, metric_dist.id + Data node: data_node_1 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_52_chunk + Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[3, 16, 20, 37, 52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-03-02 15:02:02-08'::timestamp with time zone)) +(15 rows) -- distinct on, order by, limit 1, with subquery select distinct on (id) id, ts, value from metric_dist where id in (select id from metric_name where name like 'cpu%') - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' order by id, ts, value limit 1 ; @@ -260,30 +384,30 @@ select distinct on (id) id, ts, value from metric_dist where id in (select id from metric_name where name like 'cpu%') - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' order by id, ts, value limit 1 ; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit Output: metric_dist.id, metric_dist.ts, metric_dist.value -> Unique Output: metric_dist.id, metric_dist.ts, metric_dist.value - -> Nested Loop + -> Sort Output: metric_dist.id, metric_dist.ts, metric_dist.value - Inner Unique: true - Join Filter: (metric_dist.id = metric_name.id) - -> Custom Scan (DataNodeScan) on public.metric_dist + Sort Key: metric_dist.id, metric_dist.ts, metric_dist.value + -> Nested Loop Output: metric_dist.id, metric_dist.ts, metric_dist.value - Data node: data_node_1 - Chunks: _dist_hyper_1_52_chunk - Remote SQL: SELECT DISTINCT ON (id) ts, id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-02-02 15:02:02-08'::timestamp with time zone)) ORDER BY id ASC NULLS LAST, ts ASC NULLS LAST, value ASC NULLS LAST - -> Materialize - Output: metric_name.id - -> Seq Scan on public.metric_name - Output: metric_name.id + -> Index Scan using metric_name_name on public.metric_name + Output: metric_name.id, metric_name.name + Index Cond: ((metric_name.name >= 'cpu'::text) AND (metric_name.name < 'cpv'::text)) Filter: (metric_name.name ~~ 'cpu%'::text) + -> Custom Scan (DataNodeScan) on public.metric_dist + Output: metric_dist.id, metric_dist.ts, metric_dist.value + Data node: data_node_1 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_52_chunk + Remote SQL: SELECT ts, id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[3, 16, 20, 37, 52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-03-02 15:02:02-08'::timestamp with time zone)) AND (($1::integer = id)) (18 rows) -- distinct on, order by, limit 1, with explicit join @@ -291,7 +415,7 @@ select distinct on (name) name, ts, value from metric_dist join metric_name using (id) where name like 'cpu%' - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' order by name, ts, value limit 1 ; @@ -305,68 +429,66 @@ select distinct on (name) name, ts, value from metric_dist join metric_name using (id) where name like 'cpu%' - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' order by name, ts, value limit 1 ; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit Output: metric_name.name, metric_dist.ts, metric_dist.value -> Unique Output: metric_name.name, metric_dist.ts, metric_dist.value -> Sort Output: metric_name.name, metric_dist.ts, metric_dist.value - Sort Key: metric_name.name, metric_dist.ts, metric_dist.value + Sort Key: metric_name.name COLLATE "C", metric_dist.ts, metric_dist.value -> Nested Loop Output: metric_name.name, metric_dist.ts, metric_dist.value - -> Seq Scan on public.metric_name + -> Index Scan using metric_name_name on public.metric_name Output: metric_name.id, metric_name.name + Index Cond: ((metric_name.name >= 'cpu'::text) AND (metric_name.name < 'cpv'::text)) Filter: (metric_name.name ~~ 'cpu%'::text) -> Custom Scan (DataNodeScan) on public.metric_dist Output: metric_dist.ts, metric_dist.value, metric_dist.id Data node: data_node_1 - Chunks: _dist_hyper_1_52_chunk - Remote SQL: SELECT ts, id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-02-02 15:02:02-08'::timestamp with time zone)) AND (($1::integer = id)) -(17 rows) + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_52_chunk + Remote SQL: SELECT ts, id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[3, 16, 20, 37, 52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-03-02 15:02:02-08'::timestamp with time zone)) AND (($1::integer = id)) +(18 rows) --- If the local table is very big, the parameterized nested loop might download --- the entire dist table or even more than that (in case of not equi-join). +-- If there are a lot of rows chosen from the local table, the parameterized +-- nested loop might download the entire dist table or even more than that (in +-- case of not equi-join or duplicate join keys). -- Check that the parameterized plan is not chosen in this case. -create table metric_name_big as select * from metric_name; -insert into metric_name_big select x, 'other' || x - from generate_series(1000, 10000) x -; -analyze metric_name_big; explain (costs off, verbose) select name, max(value), count(*) from metric_dist -join metric_name_big using (id) -where ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' +join metric_name using (id) +where ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate - Output: metric_name_big.name, max(metric_dist.value), count(*) - Group Key: metric_name_big.name + Output: metric_name.name, max(metric_dist.value), count(*) + Group Key: metric_name.name -> Sort - Output: metric_name_big.name, metric_dist.value - Sort Key: metric_name_big.name - -> Nested Loop - Output: metric_name_big.name, metric_dist.value - Join Filter: (metric_dist.id = metric_name_big.id) - -> Seq Scan on public.metric_name_big - Output: metric_name_big.id, metric_name_big.name - -> Materialize + Output: metric_name.name, metric_dist.value + Sort Key: metric_name.name COLLATE "C" + -> Hash Join + Output: metric_name.name, metric_dist.value + Inner Unique: true + Hash Cond: (metric_dist.id = metric_name.id) + -> Custom Scan (DataNodeScan) on public.metric_dist Output: metric_dist.value, metric_dist.id - -> Custom Scan (DataNodeScan) on public.metric_dist - Output: metric_dist.value, metric_dist.id - Data node: data_node_1 - Chunks: _dist_hyper_1_52_chunk - Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-02-02 15:02:02-08'::timestamp with time zone)) -(18 rows) + Data node: data_node_1 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_52_chunk + Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[3, 16, 20, 37, 52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-03-02 15:02:02-08'::timestamp with time zone)) + -> Hash + Output: metric_name.name, metric_name.id + -> Seq Scan on public.metric_name + Output: metric_name.name, metric_name.id +(19 rows) -- An interesting special case is when the remote SQL has a parameter, but it is -- the result of an initplan. It's not "parameterized" in the join sense, because @@ -376,27 +498,28 @@ explain (costs off, verbose) select id, max(value) from metric_dist where id = any((select array_agg(id) from metric_name where name like 'cpu%')::int[]) - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by id order by id ; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: metric_dist.id, max(metric_dist.value) Group Key: metric_dist.id InitPlan 1 (returns $0) -> Aggregate Output: array_agg(metric_name.id) - -> Seq Scan on public.metric_name + -> Index Scan using metric_name_name on public.metric_name Output: metric_name.id, metric_name.name + Index Cond: ((metric_name.name >= 'cpu'::text) AND (metric_name.name < 'cpv'::text)) Filter: (metric_name.name ~~ 'cpu%'::text) -> Custom Scan (DataNodeScan) on public.metric_dist Output: metric_dist.id, metric_dist.value Data node: data_node_1 - Chunks: _dist_hyper_1_52_chunk - Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-02-02 15:02:02-08'::timestamp with time zone)) AND ((id = ANY ($1::integer[]))) ORDER BY id ASC NULLS LAST -(14 rows) + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_52_chunk + Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[3, 16, 20, 37, 52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-03-02 15:02:02-08'::timestamp with time zone)) AND ((id = ANY ($1::integer[]))) ORDER BY id ASC NULLS LAST +(15 rows) -- Multiple joins. Test both EC and non-EC (texteq) join in one query. create table metric_location(id int, location text); @@ -405,7 +528,7 @@ analyze metric_location; select id, max(value) from metric_dist natural join metric_location natural join metric_name where name like 'cpu%' and texteq(location, 'Yerevan') - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by id ; id | max @@ -417,11 +540,11 @@ explain (costs off, verbose) select id, max(value) from metric_dist natural join metric_location natural join metric_name where name like 'cpu%' and texteq(location, 'Yerevan') - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by id ; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: metric_dist.id, max(metric_dist.value) Group Key: metric_dist.id @@ -437,21 +560,22 @@ group by id -> Seq Scan on public.metric_location Output: metric_location.id, metric_location.location Filter: texteq(metric_location.location, 'Yerevan'::text) - -> Seq Scan on public.metric_name + -> Index Scan using metric_name_name on public.metric_name Output: metric_name.id, metric_name.name + Index Cond: ((metric_name.name >= 'cpu'::text) AND (metric_name.name < 'cpv'::text)) Filter: (metric_name.name ~~ 'cpu%'::text) -> Custom Scan (DataNodeScan) on public.metric_dist Output: metric_dist.id, metric_dist.value Data node: data_node_1 - Chunks: _dist_hyper_1_52_chunk - Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-02-02 15:02:02-08'::timestamp with time zone)) AND (($1::integer = id)) -(23 rows) + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_52_chunk + Remote SQL: SELECT id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[3, 16, 20, 37, 52]) AND ((ts >= '2022-02-01 15:02:02-08'::timestamp with time zone)) AND ((ts <= '2022-03-02 15:02:02-08'::timestamp with time zone)) AND (($1::integer = id)) +(24 rows) -- Multiple joins on different variables. Use a table instead of a CTE for saner -- stats. create table max_value_times as select distinct on (id) id, ts from metric_dist -where ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' +where ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' order by id, value desc ; analyze max_value_times; @@ -461,24 +585,73 @@ from metric_dist natural join max_value_times natural join metric_name where name like 'cpu%' order by 1 ; - QUERY PLAN -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Nested Loop + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Sort Output: metric_dist.id, metric_dist.value + Sort Key: metric_dist.id -> Nested Loop - Output: max_value_times.ts, max_value_times.id, metric_name.id - Join Filter: (max_value_times.id = metric_name.id) - -> Index Scan using metric_name_pkey on public.metric_name - Output: metric_name.id, metric_name.name - Filter: (metric_name.name ~~ 'cpu%'::text) - -> Materialize - Output: max_value_times.ts, max_value_times.id + Output: metric_dist.id, metric_dist.value + -> Hash Join + Output: max_value_times.ts, max_value_times.id, metric_name.id + Inner Unique: true + Hash Cond: (max_value_times.id = metric_name.id) -> Seq Scan on public.max_value_times - Output: max_value_times.ts, max_value_times.id - -> Custom Scan (DataNodeScan) on public.metric_dist - Output: metric_dist.id, metric_dist.value, metric_dist.ts - Data node: data_node_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_2_chunk, _dist_hyper_1_3_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_17_chunk, _dist_hyper_1_18_chunk, _dist_hyper_1_19_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_21_chunk, _dist_hyper_1_22_chunk, _dist_hyper_1_23_chunk, _dist_hyper_1_24_chunk, _dist_hyper_1_25_chunk, _dist_hyper_1_26_chunk, _dist_hyper_1_27_chunk, _dist_hyper_1_28_chunk, _dist_hyper_1_29_chunk, _dist_hyper_1_30_chunk, _dist_hyper_1_31_chunk, _dist_hyper_1_32_chunk, _dist_hyper_1_33_chunk, _dist_hyper_1_34_chunk, _dist_hyper_1_35_chunk, _dist_hyper_1_36_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_38_chunk, _dist_hyper_1_39_chunk, _dist_hyper_1_40_chunk, _dist_hyper_1_41_chunk, _dist_hyper_1_42_chunk, _dist_hyper_1_43_chunk, _dist_hyper_1_44_chunk, _dist_hyper_1_45_chunk, _dist_hyper_1_46_chunk, _dist_hyper_1_47_chunk, _dist_hyper_1_48_chunk, _dist_hyper_1_49_chunk, _dist_hyper_1_50_chunk, _dist_hyper_1_51_chunk, _dist_hyper_1_52_chunk, _dist_hyper_1_53_chunk - Remote SQL: SELECT ts, id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53]) AND (($1::timestamp with time zone = ts)) AND (($2::integer = id)) -(17 rows) + Output: max_value_times.id, max_value_times.ts + -> Hash + Output: metric_name.id + -> Index Scan using metric_name_name on public.metric_name + Output: metric_name.id + Index Cond: ((metric_name.name >= 'cpu'::text) AND (metric_name.name < 'cpv'::text)) + Filter: (metric_name.name ~~ 'cpu%'::text) + -> Custom Scan (DataNodeScan) on public.metric_dist + Output: metric_dist.id, metric_dist.value, metric_dist.ts + Data node: data_node_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_2_chunk, _dist_hyper_1_3_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_17_chunk, _dist_hyper_1_18_chunk, _dist_hyper_1_19_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_21_chunk, _dist_hyper_1_22_chunk, _dist_hyper_1_23_chunk, _dist_hyper_1_24_chunk, _dist_hyper_1_25_chunk, _dist_hyper_1_26_chunk, _dist_hyper_1_27_chunk, _dist_hyper_1_28_chunk, _dist_hyper_1_29_chunk, _dist_hyper_1_30_chunk, _dist_hyper_1_31_chunk, _dist_hyper_1_32_chunk, _dist_hyper_1_33_chunk, _dist_hyper_1_34_chunk, _dist_hyper_1_35_chunk, _dist_hyper_1_36_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_38_chunk, _dist_hyper_1_39_chunk, _dist_hyper_1_40_chunk, _dist_hyper_1_41_chunk, _dist_hyper_1_42_chunk, _dist_hyper_1_43_chunk, _dist_hyper_1_44_chunk, _dist_hyper_1_45_chunk, _dist_hyper_1_46_chunk, _dist_hyper_1_47_chunk, _dist_hyper_1_48_chunk, _dist_hyper_1_49_chunk, _dist_hyper_1_50_chunk, _dist_hyper_1_51_chunk, _dist_hyper_1_52_chunk, _dist_hyper_1_53_chunk + Remote SQL: SELECT ts, id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53]) AND (($1::timestamp with time zone = ts)) AND (($2::integer = id)) +(22 rows) + +-- Two distributed hypertables, each joined to reference and all joined together. +-- The query finds the percentage of time points where one metric is higher than +-- another, and also tweaked not to use initplans.Requires hash join. +explain (analyze, verbose, costs off, timing off, summary off) +select count(*) filter (where m1.value > m2.value) / count(*) +from metric_dist m1 +join metric_dist m2 using (ts) +where m1.id in (select id from metric_name where name = 'cpu1') + and m2.id in (select id from metric_name where name = 'cpu3') +; + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Aggregate (actual rows=1 loops=1) + Output: (count(*) FILTER (WHERE (m1.value > m2.value)) / count(*)) + -> Hash Join (actual rows=91 loops=1) + Output: m1.value, m2.value + Hash Cond: (m1.ts = m2.ts) + -> Nested Loop (actual rows=50037 loops=1) + Output: m1.value, m1.ts + -> Index Scan using metric_name_name on public.metric_name (actual rows=1 loops=1) + Output: metric_name.id, metric_name.name + Index Cond: (metric_name.name = 'cpu1'::text) + -> Custom Scan (DataNodeScan) on public.metric_dist m1 (actual rows=50037 loops=1) + Output: m1.value, m1.ts, m1.id + Data node: data_node_1 + Fetcher Type: Cursor + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_2_chunk, _dist_hyper_1_3_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_17_chunk, _dist_hyper_1_18_chunk, _dist_hyper_1_19_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_21_chunk, _dist_hyper_1_22_chunk, _dist_hyper_1_23_chunk, _dist_hyper_1_24_chunk, _dist_hyper_1_25_chunk, _dist_hyper_1_26_chunk, _dist_hyper_1_27_chunk, _dist_hyper_1_28_chunk, _dist_hyper_1_29_chunk, _dist_hyper_1_30_chunk, _dist_hyper_1_31_chunk, _dist_hyper_1_32_chunk, _dist_hyper_1_33_chunk, _dist_hyper_1_34_chunk, _dist_hyper_1_35_chunk, _dist_hyper_1_36_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_38_chunk, _dist_hyper_1_39_chunk, _dist_hyper_1_40_chunk, _dist_hyper_1_41_chunk, _dist_hyper_1_42_chunk, _dist_hyper_1_43_chunk, _dist_hyper_1_44_chunk, _dist_hyper_1_45_chunk, _dist_hyper_1_46_chunk, _dist_hyper_1_47_chunk, _dist_hyper_1_48_chunk, _dist_hyper_1_49_chunk, _dist_hyper_1_50_chunk, _dist_hyper_1_51_chunk, _dist_hyper_1_52_chunk, _dist_hyper_1_53_chunk + Remote SQL: SELECT ts, id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53]) AND (($1::integer = id)) + -> Hash (actual rows=50101 loops=1) + Output: m2.value, m2.ts + Buckets: 65536 (originally 1024) Batches: 1 (originally 1) + -> Nested Loop (actual rows=50101 loops=1) + Output: m2.value, m2.ts + -> Index Scan using metric_name_name on public.metric_name metric_name_1 (actual rows=1 loops=1) + Output: metric_name_1.id, metric_name_1.name + Index Cond: (metric_name_1.name = 'cpu3'::text) + -> Custom Scan (DataNodeScan) on public.metric_dist m2 (actual rows=50101 loops=1) + Output: m2.value, m2.ts, m2.id + Data node: data_node_1 + Fetcher Type: Cursor + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_2_chunk, _dist_hyper_1_3_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_16_chunk, _dist_hyper_1_17_chunk, _dist_hyper_1_18_chunk, _dist_hyper_1_19_chunk, _dist_hyper_1_20_chunk, _dist_hyper_1_21_chunk, _dist_hyper_1_22_chunk, _dist_hyper_1_23_chunk, _dist_hyper_1_24_chunk, _dist_hyper_1_25_chunk, _dist_hyper_1_26_chunk, _dist_hyper_1_27_chunk, _dist_hyper_1_28_chunk, _dist_hyper_1_29_chunk, _dist_hyper_1_30_chunk, _dist_hyper_1_31_chunk, _dist_hyper_1_32_chunk, _dist_hyper_1_33_chunk, _dist_hyper_1_34_chunk, _dist_hyper_1_35_chunk, _dist_hyper_1_36_chunk, _dist_hyper_1_37_chunk, _dist_hyper_1_38_chunk, _dist_hyper_1_39_chunk, _dist_hyper_1_40_chunk, _dist_hyper_1_41_chunk, _dist_hyper_1_42_chunk, _dist_hyper_1_43_chunk, _dist_hyper_1_44_chunk, _dist_hyper_1_45_chunk, _dist_hyper_1_46_chunk, _dist_hyper_1_47_chunk, _dist_hyper_1_48_chunk, _dist_hyper_1_49_chunk, _dist_hyper_1_50_chunk, _dist_hyper_1_51_chunk, _dist_hyper_1_52_chunk, _dist_hyper_1_53_chunk + Remote SQL: SELECT ts, id, value FROM public.metric_dist WHERE _timescaledb_internal.chunks_in(public.metric_dist.*, ARRAY[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53]) AND (($1::integer = id)) +(30 rows) diff --git a/tsl/test/expected/dist_partial_agg-12.out b/tsl/test/expected/dist_partial_agg-12.out index 8460e9035..11628afdb 100644 --- a/tsl/test/expected/dist_partial_agg-12.out +++ b/tsl/test/expected/dist_partial_agg-12.out @@ -167,31 +167,35 @@ SET timescaledb.remote_data_fetcher = 'cursor'; FROM :TEST_TABLE GROUP BY :GROUPING, timec ORDER BY :GROUPING, timec; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Custom Scan (AsyncAppend) - Output: location, (min(allnull)), (max(temperature)), ((sum(temperature) + sum(humidity))), (avg(humidity)), (round(stddev((humidity)::integer), 5)), (bit_and(bit_int)), (bit_or(bit_int)), (bool_and(good_life)), (every((temperature > '0'::double precision))), (bool_or(good_life)), (count(*)), (count(temperature)), (count(allnull)), (round((corr(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round(stddev((temperature)::integer), 5)), (round(stddev_pop((temperature)::integer), 5)), (round(stddev_samp((temperature)::integer), 5)), (round(variance((temperature)::integer), 5)), (round(var_pop((temperature)::integer), 5)), (round(var_samp((temperature)::integer), 5)), (last(temperature, timec)), (histogram(temperature, '0'::double precision, '100'::double precision, 1)), timec - -> Merge Append - Sort Key: conditions.location, conditions.timec - -> Custom Scan (DataNodeScan) - Output: conditions.location, (min(conditions.allnull)), (max(conditions.temperature)), ((sum(conditions.temperature) + sum(conditions.humidity))), (avg(conditions.humidity)), (round(stddev((conditions.humidity)::integer), 5)), (bit_and(conditions.bit_int)), (bit_or(conditions.bit_int)), (bool_and(conditions.good_life)), (every((conditions.temperature > '0'::double precision))), (bool_or(conditions.good_life)), (count(*)), (count(conditions.temperature)), (count(conditions.allnull)), (round((corr(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round(stddev((conditions.temperature)::integer), 5)), (round(stddev_pop((conditions.temperature)::integer), 5)), (round(stddev_samp((conditions.temperature)::integer), 5)), (round(variance((conditions.temperature)::integer), 5)), (round(var_pop((conditions.temperature)::integer), 5)), (round(var_samp((conditions.temperature)::integer), 5)), (last(conditions.temperature, conditions.timec)), (histogram(conditions.temperature, '0'::double precision, '100'::double precision, 1)), conditions.timec - Relations: Aggregate on (public.conditions) + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Merge Append + Sort Key: conditions.location, conditions.timec + -> GroupAggregate + Output: conditions.location, min(conditions.allnull), max(conditions.temperature), (sum(conditions.temperature) + sum(conditions.humidity)), avg(conditions.humidity), round(stddev((conditions.humidity)::integer), 5), bit_and(conditions.bit_int), bit_or(conditions.bit_int), bool_and(conditions.good_life), every((conditions.temperature > '0'::double precision)), bool_or(conditions.good_life), count(*), count(conditions.temperature), count(conditions.allnull), round((corr(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((covar_pop(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((covar_samp(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_avgx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_avgy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_count(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_intercept(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_r2(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_slope(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_sxx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_sxy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_syy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round(stddev((conditions.temperature)::integer), 5), round(stddev_pop((conditions.temperature)::integer), 5), round(stddev_samp((conditions.temperature)::integer), 5), round(variance((conditions.temperature)::integer), 5), round(var_pop((conditions.temperature)::integer), 5), round(var_samp((conditions.temperature)::integer), 5), last(conditions.temperature, conditions.timec), histogram(conditions.temperature, '0'::double precision, '100'::double precision, 1), conditions.timec + Group Key: conditions.location, conditions.timec + -> Custom Scan (DataNodeScan) on public.conditions + Output: conditions.location, conditions.timec, conditions.allnull, conditions.temperature, conditions.humidity, conditions.bit_int, conditions.good_life Data node: db_dist_partial_agg_1 Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_2_chunk, _dist_hyper_1_3_chunk, _dist_hyper_1_4_chunk - Remote SQL: SELECT location, min(allnull), max(temperature), (sum(temperature) + sum(humidity)), avg(humidity), round(stddev(humidity::integer), 5), bit_and(bit_int), bit_or(bit_int), bool_and(good_life), every((temperature > 0::double precision)), bool_or(good_life), count(*), count(temperature), count(allnull), round(corr(temperature::integer, humidity::integer)::numeric, 5), round(covar_pop(temperature::integer, humidity::integer)::numeric, 5), round(covar_samp(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgx(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgy(temperature::integer, humidity::integer)::numeric, 5), round(regr_count(temperature::integer, humidity::integer)::numeric, 5), round(regr_intercept(temperature::integer, humidity::integer)::numeric, 5), round(regr_r2(temperature::integer, humidity::integer)::numeric, 5), round(regr_slope(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxx(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxy(temperature::integer, humidity::integer)::numeric, 5), round(regr_syy(temperature::integer, humidity::integer)::numeric, 5), round(stddev(temperature::integer), 5), round(stddev_pop(temperature::integer), 5), round(stddev_samp(temperature::integer), 5), round(variance(temperature::integer), 5), round(var_pop(temperature::integer), 5), round(var_samp(temperature::integer), 5), public.last(temperature, timec), public.histogram(temperature, 0::double precision, 100::double precision, 1), timec FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) GROUP BY 1, 35 ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: conditions_1.location, (min(conditions_1.allnull)), (max(conditions_1.temperature)), ((sum(conditions_1.temperature) + sum(conditions_1.humidity))), (avg(conditions_1.humidity)), (round(stddev((conditions_1.humidity)::integer), 5)), (bit_and(conditions_1.bit_int)), (bit_or(conditions_1.bit_int)), (bool_and(conditions_1.good_life)), (every((conditions_1.temperature > '0'::double precision))), (bool_or(conditions_1.good_life)), (count(*)), (count(conditions_1.temperature)), (count(conditions_1.allnull)), (round((corr(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round(stddev((conditions_1.temperature)::integer), 5)), (round(stddev_pop((conditions_1.temperature)::integer), 5)), (round(stddev_samp((conditions_1.temperature)::integer), 5)), (round(variance((conditions_1.temperature)::integer), 5)), (round(var_pop((conditions_1.temperature)::integer), 5)), (round(var_samp((conditions_1.temperature)::integer), 5)), (last(conditions_1.temperature, conditions_1.timec)), (histogram(conditions_1.temperature, '0'::double precision, '100'::double precision, 1)), conditions_1.timec - Relations: Aggregate on (public.conditions) + Remote SQL: SELECT timec, location, temperature, humidity, allnull, bit_int, good_life FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST + -> GroupAggregate + Output: conditions_1.location, min(conditions_1.allnull), max(conditions_1.temperature), (sum(conditions_1.temperature) + sum(conditions_1.humidity)), avg(conditions_1.humidity), round(stddev((conditions_1.humidity)::integer), 5), bit_and(conditions_1.bit_int), bit_or(conditions_1.bit_int), bool_and(conditions_1.good_life), every((conditions_1.temperature > '0'::double precision)), bool_or(conditions_1.good_life), count(*), count(conditions_1.temperature), count(conditions_1.allnull), round((corr(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((covar_pop(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((covar_samp(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_avgx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_avgy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_count(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_intercept(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_r2(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_slope(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_sxx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_sxy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_syy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round(stddev((conditions_1.temperature)::integer), 5), round(stddev_pop((conditions_1.temperature)::integer), 5), round(stddev_samp((conditions_1.temperature)::integer), 5), round(variance((conditions_1.temperature)::integer), 5), round(var_pop((conditions_1.temperature)::integer), 5), round(var_samp((conditions_1.temperature)::integer), 5), last(conditions_1.temperature, conditions_1.timec), histogram(conditions_1.temperature, '0'::double precision, '100'::double precision, 1), conditions_1.timec + Group Key: conditions_1.location, conditions_1.timec + -> Custom Scan (DataNodeScan) on public.conditions conditions_1 + Output: conditions_1.location, conditions_1.timec, conditions_1.allnull, conditions_1.temperature, conditions_1.humidity, conditions_1.bit_int, conditions_1.good_life Data node: db_dist_partial_agg_2 Chunks: _dist_hyper_1_9_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_12_chunk - Remote SQL: SELECT location, min(allnull), max(temperature), (sum(temperature) + sum(humidity)), avg(humidity), round(stddev(humidity::integer), 5), bit_and(bit_int), bit_or(bit_int), bool_and(good_life), every((temperature > 0::double precision)), bool_or(good_life), count(*), count(temperature), count(allnull), round(corr(temperature::integer, humidity::integer)::numeric, 5), round(covar_pop(temperature::integer, humidity::integer)::numeric, 5), round(covar_samp(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgx(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgy(temperature::integer, humidity::integer)::numeric, 5), round(regr_count(temperature::integer, humidity::integer)::numeric, 5), round(regr_intercept(temperature::integer, humidity::integer)::numeric, 5), round(regr_r2(temperature::integer, humidity::integer)::numeric, 5), round(regr_slope(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxx(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxy(temperature::integer, humidity::integer)::numeric, 5), round(regr_syy(temperature::integer, humidity::integer)::numeric, 5), round(stddev(temperature::integer), 5), round(stddev_pop(temperature::integer), 5), round(stddev_samp(temperature::integer), 5), round(variance(temperature::integer), 5), round(var_pop(temperature::integer), 5), round(var_samp(temperature::integer), 5), public.last(temperature, timec), public.histogram(temperature, 0::double precision, 100::double precision, 1), timec FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) GROUP BY 1, 35 ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: conditions_2.location, (min(conditions_2.allnull)), (max(conditions_2.temperature)), ((sum(conditions_2.temperature) + sum(conditions_2.humidity))), (avg(conditions_2.humidity)), (round(stddev((conditions_2.humidity)::integer), 5)), (bit_and(conditions_2.bit_int)), (bit_or(conditions_2.bit_int)), (bool_and(conditions_2.good_life)), (every((conditions_2.temperature > '0'::double precision))), (bool_or(conditions_2.good_life)), (count(*)), (count(conditions_2.temperature)), (count(conditions_2.allnull)), (round((corr(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round(stddev((conditions_2.temperature)::integer), 5)), (round(stddev_pop((conditions_2.temperature)::integer), 5)), (round(stddev_samp((conditions_2.temperature)::integer), 5)), (round(variance((conditions_2.temperature)::integer), 5)), (round(var_pop((conditions_2.temperature)::integer), 5)), (round(var_samp((conditions_2.temperature)::integer), 5)), (last(conditions_2.temperature, conditions_2.timec)), (histogram(conditions_2.temperature, '0'::double precision, '100'::double precision, 1)), conditions_2.timec - Relations: Aggregate on (public.conditions) + Remote SQL: SELECT timec, location, temperature, humidity, allnull, bit_int, good_life FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST + -> GroupAggregate + Output: conditions_2.location, min(conditions_2.allnull), max(conditions_2.temperature), (sum(conditions_2.temperature) + sum(conditions_2.humidity)), avg(conditions_2.humidity), round(stddev((conditions_2.humidity)::integer), 5), bit_and(conditions_2.bit_int), bit_or(conditions_2.bit_int), bool_and(conditions_2.good_life), every((conditions_2.temperature > '0'::double precision)), bool_or(conditions_2.good_life), count(*), count(conditions_2.temperature), count(conditions_2.allnull), round((corr(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((covar_pop(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((covar_samp(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_avgx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_avgy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_count(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_intercept(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_r2(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_slope(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_sxx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_sxy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_syy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round(stddev((conditions_2.temperature)::integer), 5), round(stddev_pop((conditions_2.temperature)::integer), 5), round(stddev_samp((conditions_2.temperature)::integer), 5), round(variance((conditions_2.temperature)::integer), 5), round(var_pop((conditions_2.temperature)::integer), 5), round(var_samp((conditions_2.temperature)::integer), 5), last(conditions_2.temperature, conditions_2.timec), histogram(conditions_2.temperature, '0'::double precision, '100'::double precision, 1), conditions_2.timec + Group Key: conditions_2.location, conditions_2.timec + -> Custom Scan (DataNodeScan) on public.conditions conditions_2 + Output: conditions_2.location, conditions_2.timec, conditions_2.allnull, conditions_2.temperature, conditions_2.humidity, conditions_2.bit_int, conditions_2.good_life Data node: db_dist_partial_agg_3 Chunks: _dist_hyper_1_5_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_8_chunk - Remote SQL: SELECT location, min(allnull), max(temperature), (sum(temperature) + sum(humidity)), avg(humidity), round(stddev(humidity::integer), 5), bit_and(bit_int), bit_or(bit_int), bool_and(good_life), every((temperature > 0::double precision)), bool_or(good_life), count(*), count(temperature), count(allnull), round(corr(temperature::integer, humidity::integer)::numeric, 5), round(covar_pop(temperature::integer, humidity::integer)::numeric, 5), round(covar_samp(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgx(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgy(temperature::integer, humidity::integer)::numeric, 5), round(regr_count(temperature::integer, humidity::integer)::numeric, 5), round(regr_intercept(temperature::integer, humidity::integer)::numeric, 5), round(regr_r2(temperature::integer, humidity::integer)::numeric, 5), round(regr_slope(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxx(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxy(temperature::integer, humidity::integer)::numeric, 5), round(regr_syy(temperature::integer, humidity::integer)::numeric, 5), round(stddev(temperature::integer), 5), round(stddev_pop(temperature::integer), 5), round(stddev_samp(temperature::integer), 5), round(variance(temperature::integer), 5), round(var_pop(temperature::integer), 5), round(var_samp(temperature::integer), 5), public.last(temperature, timec), public.histogram(temperature, 0::double precision, 100::double precision, 1), timec FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) GROUP BY 1, 35 ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST -(22 rows) + Remote SQL: SELECT timec, location, temperature, humidity, allnull, bit_int, good_life FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST +(26 rows) -- Aggregates on custom types are not yet pushed down :PREFIX SELECT :GROUPING, diff --git a/tsl/test/expected/dist_partial_agg-13.out b/tsl/test/expected/dist_partial_agg-13.out index 8460e9035..11628afdb 100644 --- a/tsl/test/expected/dist_partial_agg-13.out +++ b/tsl/test/expected/dist_partial_agg-13.out @@ -167,31 +167,35 @@ SET timescaledb.remote_data_fetcher = 'cursor'; FROM :TEST_TABLE GROUP BY :GROUPING, timec ORDER BY :GROUPING, timec; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Custom Scan (AsyncAppend) - Output: location, (min(allnull)), (max(temperature)), ((sum(temperature) + sum(humidity))), (avg(humidity)), (round(stddev((humidity)::integer), 5)), (bit_and(bit_int)), (bit_or(bit_int)), (bool_and(good_life)), (every((temperature > '0'::double precision))), (bool_or(good_life)), (count(*)), (count(temperature)), (count(allnull)), (round((corr(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round(stddev((temperature)::integer), 5)), (round(stddev_pop((temperature)::integer), 5)), (round(stddev_samp((temperature)::integer), 5)), (round(variance((temperature)::integer), 5)), (round(var_pop((temperature)::integer), 5)), (round(var_samp((temperature)::integer), 5)), (last(temperature, timec)), (histogram(temperature, '0'::double precision, '100'::double precision, 1)), timec - -> Merge Append - Sort Key: conditions.location, conditions.timec - -> Custom Scan (DataNodeScan) - Output: conditions.location, (min(conditions.allnull)), (max(conditions.temperature)), ((sum(conditions.temperature) + sum(conditions.humidity))), (avg(conditions.humidity)), (round(stddev((conditions.humidity)::integer), 5)), (bit_and(conditions.bit_int)), (bit_or(conditions.bit_int)), (bool_and(conditions.good_life)), (every((conditions.temperature > '0'::double precision))), (bool_or(conditions.good_life)), (count(*)), (count(conditions.temperature)), (count(conditions.allnull)), (round((corr(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round(stddev((conditions.temperature)::integer), 5)), (round(stddev_pop((conditions.temperature)::integer), 5)), (round(stddev_samp((conditions.temperature)::integer), 5)), (round(variance((conditions.temperature)::integer), 5)), (round(var_pop((conditions.temperature)::integer), 5)), (round(var_samp((conditions.temperature)::integer), 5)), (last(conditions.temperature, conditions.timec)), (histogram(conditions.temperature, '0'::double precision, '100'::double precision, 1)), conditions.timec - Relations: Aggregate on (public.conditions) + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Merge Append + Sort Key: conditions.location, conditions.timec + -> GroupAggregate + Output: conditions.location, min(conditions.allnull), max(conditions.temperature), (sum(conditions.temperature) + sum(conditions.humidity)), avg(conditions.humidity), round(stddev((conditions.humidity)::integer), 5), bit_and(conditions.bit_int), bit_or(conditions.bit_int), bool_and(conditions.good_life), every((conditions.temperature > '0'::double precision)), bool_or(conditions.good_life), count(*), count(conditions.temperature), count(conditions.allnull), round((corr(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((covar_pop(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((covar_samp(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_avgx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_avgy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_count(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_intercept(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_r2(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_slope(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_sxx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_sxy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_syy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round(stddev((conditions.temperature)::integer), 5), round(stddev_pop((conditions.temperature)::integer), 5), round(stddev_samp((conditions.temperature)::integer), 5), round(variance((conditions.temperature)::integer), 5), round(var_pop((conditions.temperature)::integer), 5), round(var_samp((conditions.temperature)::integer), 5), last(conditions.temperature, conditions.timec), histogram(conditions.temperature, '0'::double precision, '100'::double precision, 1), conditions.timec + Group Key: conditions.location, conditions.timec + -> Custom Scan (DataNodeScan) on public.conditions + Output: conditions.location, conditions.timec, conditions.allnull, conditions.temperature, conditions.humidity, conditions.bit_int, conditions.good_life Data node: db_dist_partial_agg_1 Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_2_chunk, _dist_hyper_1_3_chunk, _dist_hyper_1_4_chunk - Remote SQL: SELECT location, min(allnull), max(temperature), (sum(temperature) + sum(humidity)), avg(humidity), round(stddev(humidity::integer), 5), bit_and(bit_int), bit_or(bit_int), bool_and(good_life), every((temperature > 0::double precision)), bool_or(good_life), count(*), count(temperature), count(allnull), round(corr(temperature::integer, humidity::integer)::numeric, 5), round(covar_pop(temperature::integer, humidity::integer)::numeric, 5), round(covar_samp(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgx(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgy(temperature::integer, humidity::integer)::numeric, 5), round(regr_count(temperature::integer, humidity::integer)::numeric, 5), round(regr_intercept(temperature::integer, humidity::integer)::numeric, 5), round(regr_r2(temperature::integer, humidity::integer)::numeric, 5), round(regr_slope(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxx(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxy(temperature::integer, humidity::integer)::numeric, 5), round(regr_syy(temperature::integer, humidity::integer)::numeric, 5), round(stddev(temperature::integer), 5), round(stddev_pop(temperature::integer), 5), round(stddev_samp(temperature::integer), 5), round(variance(temperature::integer), 5), round(var_pop(temperature::integer), 5), round(var_samp(temperature::integer), 5), public.last(temperature, timec), public.histogram(temperature, 0::double precision, 100::double precision, 1), timec FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) GROUP BY 1, 35 ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: conditions_1.location, (min(conditions_1.allnull)), (max(conditions_1.temperature)), ((sum(conditions_1.temperature) + sum(conditions_1.humidity))), (avg(conditions_1.humidity)), (round(stddev((conditions_1.humidity)::integer), 5)), (bit_and(conditions_1.bit_int)), (bit_or(conditions_1.bit_int)), (bool_and(conditions_1.good_life)), (every((conditions_1.temperature > '0'::double precision))), (bool_or(conditions_1.good_life)), (count(*)), (count(conditions_1.temperature)), (count(conditions_1.allnull)), (round((corr(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round(stddev((conditions_1.temperature)::integer), 5)), (round(stddev_pop((conditions_1.temperature)::integer), 5)), (round(stddev_samp((conditions_1.temperature)::integer), 5)), (round(variance((conditions_1.temperature)::integer), 5)), (round(var_pop((conditions_1.temperature)::integer), 5)), (round(var_samp((conditions_1.temperature)::integer), 5)), (last(conditions_1.temperature, conditions_1.timec)), (histogram(conditions_1.temperature, '0'::double precision, '100'::double precision, 1)), conditions_1.timec - Relations: Aggregate on (public.conditions) + Remote SQL: SELECT timec, location, temperature, humidity, allnull, bit_int, good_life FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST + -> GroupAggregate + Output: conditions_1.location, min(conditions_1.allnull), max(conditions_1.temperature), (sum(conditions_1.temperature) + sum(conditions_1.humidity)), avg(conditions_1.humidity), round(stddev((conditions_1.humidity)::integer), 5), bit_and(conditions_1.bit_int), bit_or(conditions_1.bit_int), bool_and(conditions_1.good_life), every((conditions_1.temperature > '0'::double precision)), bool_or(conditions_1.good_life), count(*), count(conditions_1.temperature), count(conditions_1.allnull), round((corr(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((covar_pop(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((covar_samp(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_avgx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_avgy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_count(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_intercept(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_r2(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_slope(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_sxx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_sxy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_syy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round(stddev((conditions_1.temperature)::integer), 5), round(stddev_pop((conditions_1.temperature)::integer), 5), round(stddev_samp((conditions_1.temperature)::integer), 5), round(variance((conditions_1.temperature)::integer), 5), round(var_pop((conditions_1.temperature)::integer), 5), round(var_samp((conditions_1.temperature)::integer), 5), last(conditions_1.temperature, conditions_1.timec), histogram(conditions_1.temperature, '0'::double precision, '100'::double precision, 1), conditions_1.timec + Group Key: conditions_1.location, conditions_1.timec + -> Custom Scan (DataNodeScan) on public.conditions conditions_1 + Output: conditions_1.location, conditions_1.timec, conditions_1.allnull, conditions_1.temperature, conditions_1.humidity, conditions_1.bit_int, conditions_1.good_life Data node: db_dist_partial_agg_2 Chunks: _dist_hyper_1_9_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_12_chunk - Remote SQL: SELECT location, min(allnull), max(temperature), (sum(temperature) + sum(humidity)), avg(humidity), round(stddev(humidity::integer), 5), bit_and(bit_int), bit_or(bit_int), bool_and(good_life), every((temperature > 0::double precision)), bool_or(good_life), count(*), count(temperature), count(allnull), round(corr(temperature::integer, humidity::integer)::numeric, 5), round(covar_pop(temperature::integer, humidity::integer)::numeric, 5), round(covar_samp(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgx(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgy(temperature::integer, humidity::integer)::numeric, 5), round(regr_count(temperature::integer, humidity::integer)::numeric, 5), round(regr_intercept(temperature::integer, humidity::integer)::numeric, 5), round(regr_r2(temperature::integer, humidity::integer)::numeric, 5), round(regr_slope(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxx(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxy(temperature::integer, humidity::integer)::numeric, 5), round(regr_syy(temperature::integer, humidity::integer)::numeric, 5), round(stddev(temperature::integer), 5), round(stddev_pop(temperature::integer), 5), round(stddev_samp(temperature::integer), 5), round(variance(temperature::integer), 5), round(var_pop(temperature::integer), 5), round(var_samp(temperature::integer), 5), public.last(temperature, timec), public.histogram(temperature, 0::double precision, 100::double precision, 1), timec FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) GROUP BY 1, 35 ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: conditions_2.location, (min(conditions_2.allnull)), (max(conditions_2.temperature)), ((sum(conditions_2.temperature) + sum(conditions_2.humidity))), (avg(conditions_2.humidity)), (round(stddev((conditions_2.humidity)::integer), 5)), (bit_and(conditions_2.bit_int)), (bit_or(conditions_2.bit_int)), (bool_and(conditions_2.good_life)), (every((conditions_2.temperature > '0'::double precision))), (bool_or(conditions_2.good_life)), (count(*)), (count(conditions_2.temperature)), (count(conditions_2.allnull)), (round((corr(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round(stddev((conditions_2.temperature)::integer), 5)), (round(stddev_pop((conditions_2.temperature)::integer), 5)), (round(stddev_samp((conditions_2.temperature)::integer), 5)), (round(variance((conditions_2.temperature)::integer), 5)), (round(var_pop((conditions_2.temperature)::integer), 5)), (round(var_samp((conditions_2.temperature)::integer), 5)), (last(conditions_2.temperature, conditions_2.timec)), (histogram(conditions_2.temperature, '0'::double precision, '100'::double precision, 1)), conditions_2.timec - Relations: Aggregate on (public.conditions) + Remote SQL: SELECT timec, location, temperature, humidity, allnull, bit_int, good_life FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST + -> GroupAggregate + Output: conditions_2.location, min(conditions_2.allnull), max(conditions_2.temperature), (sum(conditions_2.temperature) + sum(conditions_2.humidity)), avg(conditions_2.humidity), round(stddev((conditions_2.humidity)::integer), 5), bit_and(conditions_2.bit_int), bit_or(conditions_2.bit_int), bool_and(conditions_2.good_life), every((conditions_2.temperature > '0'::double precision)), bool_or(conditions_2.good_life), count(*), count(conditions_2.temperature), count(conditions_2.allnull), round((corr(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((covar_pop(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((covar_samp(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_avgx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_avgy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_count(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_intercept(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_r2(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_slope(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_sxx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_sxy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_syy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round(stddev((conditions_2.temperature)::integer), 5), round(stddev_pop((conditions_2.temperature)::integer), 5), round(stddev_samp((conditions_2.temperature)::integer), 5), round(variance((conditions_2.temperature)::integer), 5), round(var_pop((conditions_2.temperature)::integer), 5), round(var_samp((conditions_2.temperature)::integer), 5), last(conditions_2.temperature, conditions_2.timec), histogram(conditions_2.temperature, '0'::double precision, '100'::double precision, 1), conditions_2.timec + Group Key: conditions_2.location, conditions_2.timec + -> Custom Scan (DataNodeScan) on public.conditions conditions_2 + Output: conditions_2.location, conditions_2.timec, conditions_2.allnull, conditions_2.temperature, conditions_2.humidity, conditions_2.bit_int, conditions_2.good_life Data node: db_dist_partial_agg_3 Chunks: _dist_hyper_1_5_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_8_chunk - Remote SQL: SELECT location, min(allnull), max(temperature), (sum(temperature) + sum(humidity)), avg(humidity), round(stddev(humidity::integer), 5), bit_and(bit_int), bit_or(bit_int), bool_and(good_life), every((temperature > 0::double precision)), bool_or(good_life), count(*), count(temperature), count(allnull), round(corr(temperature::integer, humidity::integer)::numeric, 5), round(covar_pop(temperature::integer, humidity::integer)::numeric, 5), round(covar_samp(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgx(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgy(temperature::integer, humidity::integer)::numeric, 5), round(regr_count(temperature::integer, humidity::integer)::numeric, 5), round(regr_intercept(temperature::integer, humidity::integer)::numeric, 5), round(regr_r2(temperature::integer, humidity::integer)::numeric, 5), round(regr_slope(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxx(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxy(temperature::integer, humidity::integer)::numeric, 5), round(regr_syy(temperature::integer, humidity::integer)::numeric, 5), round(stddev(temperature::integer), 5), round(stddev_pop(temperature::integer), 5), round(stddev_samp(temperature::integer), 5), round(variance(temperature::integer), 5), round(var_pop(temperature::integer), 5), round(var_samp(temperature::integer), 5), public.last(temperature, timec), public.histogram(temperature, 0::double precision, 100::double precision, 1), timec FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) GROUP BY 1, 35 ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST -(22 rows) + Remote SQL: SELECT timec, location, temperature, humidity, allnull, bit_int, good_life FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST +(26 rows) -- Aggregates on custom types are not yet pushed down :PREFIX SELECT :GROUPING, diff --git a/tsl/test/expected/dist_partial_agg-14.out b/tsl/test/expected/dist_partial_agg-14.out index 8460e9035..11628afdb 100644 --- a/tsl/test/expected/dist_partial_agg-14.out +++ b/tsl/test/expected/dist_partial_agg-14.out @@ -167,31 +167,35 @@ SET timescaledb.remote_data_fetcher = 'cursor'; FROM :TEST_TABLE GROUP BY :GROUPING, timec ORDER BY :GROUPING, timec; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Custom Scan (AsyncAppend) - Output: location, (min(allnull)), (max(temperature)), ((sum(temperature) + sum(humidity))), (avg(humidity)), (round(stddev((humidity)::integer), 5)), (bit_and(bit_int)), (bit_or(bit_int)), (bool_and(good_life)), (every((temperature > '0'::double precision))), (bool_or(good_life)), (count(*)), (count(temperature)), (count(allnull)), (round((corr(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round(stddev((temperature)::integer), 5)), (round(stddev_pop((temperature)::integer), 5)), (round(stddev_samp((temperature)::integer), 5)), (round(variance((temperature)::integer), 5)), (round(var_pop((temperature)::integer), 5)), (round(var_samp((temperature)::integer), 5)), (last(temperature, timec)), (histogram(temperature, '0'::double precision, '100'::double precision, 1)), timec - -> Merge Append - Sort Key: conditions.location, conditions.timec - -> Custom Scan (DataNodeScan) - Output: conditions.location, (min(conditions.allnull)), (max(conditions.temperature)), ((sum(conditions.temperature) + sum(conditions.humidity))), (avg(conditions.humidity)), (round(stddev((conditions.humidity)::integer), 5)), (bit_and(conditions.bit_int)), (bit_or(conditions.bit_int)), (bool_and(conditions.good_life)), (every((conditions.temperature > '0'::double precision))), (bool_or(conditions.good_life)), (count(*)), (count(conditions.temperature)), (count(conditions.allnull)), (round((corr(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round(stddev((conditions.temperature)::integer), 5)), (round(stddev_pop((conditions.temperature)::integer), 5)), (round(stddev_samp((conditions.temperature)::integer), 5)), (round(variance((conditions.temperature)::integer), 5)), (round(var_pop((conditions.temperature)::integer), 5)), (round(var_samp((conditions.temperature)::integer), 5)), (last(conditions.temperature, conditions.timec)), (histogram(conditions.temperature, '0'::double precision, '100'::double precision, 1)), conditions.timec - Relations: Aggregate on (public.conditions) + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Merge Append + Sort Key: conditions.location, conditions.timec + -> GroupAggregate + Output: conditions.location, min(conditions.allnull), max(conditions.temperature), (sum(conditions.temperature) + sum(conditions.humidity)), avg(conditions.humidity), round(stddev((conditions.humidity)::integer), 5), bit_and(conditions.bit_int), bit_or(conditions.bit_int), bool_and(conditions.good_life), every((conditions.temperature > '0'::double precision)), bool_or(conditions.good_life), count(*), count(conditions.temperature), count(conditions.allnull), round((corr(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((covar_pop(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((covar_samp(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_avgx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_avgy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_count(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_intercept(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_r2(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_slope(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_sxx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_sxy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_syy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round(stddev((conditions.temperature)::integer), 5), round(stddev_pop((conditions.temperature)::integer), 5), round(stddev_samp((conditions.temperature)::integer), 5), round(variance((conditions.temperature)::integer), 5), round(var_pop((conditions.temperature)::integer), 5), round(var_samp((conditions.temperature)::integer), 5), last(conditions.temperature, conditions.timec), histogram(conditions.temperature, '0'::double precision, '100'::double precision, 1), conditions.timec + Group Key: conditions.location, conditions.timec + -> Custom Scan (DataNodeScan) on public.conditions + Output: conditions.location, conditions.timec, conditions.allnull, conditions.temperature, conditions.humidity, conditions.bit_int, conditions.good_life Data node: db_dist_partial_agg_1 Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_2_chunk, _dist_hyper_1_3_chunk, _dist_hyper_1_4_chunk - Remote SQL: SELECT location, min(allnull), max(temperature), (sum(temperature) + sum(humidity)), avg(humidity), round(stddev(humidity::integer), 5), bit_and(bit_int), bit_or(bit_int), bool_and(good_life), every((temperature > 0::double precision)), bool_or(good_life), count(*), count(temperature), count(allnull), round(corr(temperature::integer, humidity::integer)::numeric, 5), round(covar_pop(temperature::integer, humidity::integer)::numeric, 5), round(covar_samp(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgx(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgy(temperature::integer, humidity::integer)::numeric, 5), round(regr_count(temperature::integer, humidity::integer)::numeric, 5), round(regr_intercept(temperature::integer, humidity::integer)::numeric, 5), round(regr_r2(temperature::integer, humidity::integer)::numeric, 5), round(regr_slope(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxx(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxy(temperature::integer, humidity::integer)::numeric, 5), round(regr_syy(temperature::integer, humidity::integer)::numeric, 5), round(stddev(temperature::integer), 5), round(stddev_pop(temperature::integer), 5), round(stddev_samp(temperature::integer), 5), round(variance(temperature::integer), 5), round(var_pop(temperature::integer), 5), round(var_samp(temperature::integer), 5), public.last(temperature, timec), public.histogram(temperature, 0::double precision, 100::double precision, 1), timec FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) GROUP BY 1, 35 ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: conditions_1.location, (min(conditions_1.allnull)), (max(conditions_1.temperature)), ((sum(conditions_1.temperature) + sum(conditions_1.humidity))), (avg(conditions_1.humidity)), (round(stddev((conditions_1.humidity)::integer), 5)), (bit_and(conditions_1.bit_int)), (bit_or(conditions_1.bit_int)), (bool_and(conditions_1.good_life)), (every((conditions_1.temperature > '0'::double precision))), (bool_or(conditions_1.good_life)), (count(*)), (count(conditions_1.temperature)), (count(conditions_1.allnull)), (round((corr(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round(stddev((conditions_1.temperature)::integer), 5)), (round(stddev_pop((conditions_1.temperature)::integer), 5)), (round(stddev_samp((conditions_1.temperature)::integer), 5)), (round(variance((conditions_1.temperature)::integer), 5)), (round(var_pop((conditions_1.temperature)::integer), 5)), (round(var_samp((conditions_1.temperature)::integer), 5)), (last(conditions_1.temperature, conditions_1.timec)), (histogram(conditions_1.temperature, '0'::double precision, '100'::double precision, 1)), conditions_1.timec - Relations: Aggregate on (public.conditions) + Remote SQL: SELECT timec, location, temperature, humidity, allnull, bit_int, good_life FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST + -> GroupAggregate + Output: conditions_1.location, min(conditions_1.allnull), max(conditions_1.temperature), (sum(conditions_1.temperature) + sum(conditions_1.humidity)), avg(conditions_1.humidity), round(stddev((conditions_1.humidity)::integer), 5), bit_and(conditions_1.bit_int), bit_or(conditions_1.bit_int), bool_and(conditions_1.good_life), every((conditions_1.temperature > '0'::double precision)), bool_or(conditions_1.good_life), count(*), count(conditions_1.temperature), count(conditions_1.allnull), round((corr(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((covar_pop(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((covar_samp(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_avgx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_avgy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_count(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_intercept(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_r2(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_slope(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_sxx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_sxy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_syy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round(stddev((conditions_1.temperature)::integer), 5), round(stddev_pop((conditions_1.temperature)::integer), 5), round(stddev_samp((conditions_1.temperature)::integer), 5), round(variance((conditions_1.temperature)::integer), 5), round(var_pop((conditions_1.temperature)::integer), 5), round(var_samp((conditions_1.temperature)::integer), 5), last(conditions_1.temperature, conditions_1.timec), histogram(conditions_1.temperature, '0'::double precision, '100'::double precision, 1), conditions_1.timec + Group Key: conditions_1.location, conditions_1.timec + -> Custom Scan (DataNodeScan) on public.conditions conditions_1 + Output: conditions_1.location, conditions_1.timec, conditions_1.allnull, conditions_1.temperature, conditions_1.humidity, conditions_1.bit_int, conditions_1.good_life Data node: db_dist_partial_agg_2 Chunks: _dist_hyper_1_9_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_12_chunk - Remote SQL: SELECT location, min(allnull), max(temperature), (sum(temperature) + sum(humidity)), avg(humidity), round(stddev(humidity::integer), 5), bit_and(bit_int), bit_or(bit_int), bool_and(good_life), every((temperature > 0::double precision)), bool_or(good_life), count(*), count(temperature), count(allnull), round(corr(temperature::integer, humidity::integer)::numeric, 5), round(covar_pop(temperature::integer, humidity::integer)::numeric, 5), round(covar_samp(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgx(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgy(temperature::integer, humidity::integer)::numeric, 5), round(regr_count(temperature::integer, humidity::integer)::numeric, 5), round(regr_intercept(temperature::integer, humidity::integer)::numeric, 5), round(regr_r2(temperature::integer, humidity::integer)::numeric, 5), round(regr_slope(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxx(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxy(temperature::integer, humidity::integer)::numeric, 5), round(regr_syy(temperature::integer, humidity::integer)::numeric, 5), round(stddev(temperature::integer), 5), round(stddev_pop(temperature::integer), 5), round(stddev_samp(temperature::integer), 5), round(variance(temperature::integer), 5), round(var_pop(temperature::integer), 5), round(var_samp(temperature::integer), 5), public.last(temperature, timec), public.histogram(temperature, 0::double precision, 100::double precision, 1), timec FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) GROUP BY 1, 35 ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: conditions_2.location, (min(conditions_2.allnull)), (max(conditions_2.temperature)), ((sum(conditions_2.temperature) + sum(conditions_2.humidity))), (avg(conditions_2.humidity)), (round(stddev((conditions_2.humidity)::integer), 5)), (bit_and(conditions_2.bit_int)), (bit_or(conditions_2.bit_int)), (bool_and(conditions_2.good_life)), (every((conditions_2.temperature > '0'::double precision))), (bool_or(conditions_2.good_life)), (count(*)), (count(conditions_2.temperature)), (count(conditions_2.allnull)), (round((corr(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round(stddev((conditions_2.temperature)::integer), 5)), (round(stddev_pop((conditions_2.temperature)::integer), 5)), (round(stddev_samp((conditions_2.temperature)::integer), 5)), (round(variance((conditions_2.temperature)::integer), 5)), (round(var_pop((conditions_2.temperature)::integer), 5)), (round(var_samp((conditions_2.temperature)::integer), 5)), (last(conditions_2.temperature, conditions_2.timec)), (histogram(conditions_2.temperature, '0'::double precision, '100'::double precision, 1)), conditions_2.timec - Relations: Aggregate on (public.conditions) + Remote SQL: SELECT timec, location, temperature, humidity, allnull, bit_int, good_life FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST + -> GroupAggregate + Output: conditions_2.location, min(conditions_2.allnull), max(conditions_2.temperature), (sum(conditions_2.temperature) + sum(conditions_2.humidity)), avg(conditions_2.humidity), round(stddev((conditions_2.humidity)::integer), 5), bit_and(conditions_2.bit_int), bit_or(conditions_2.bit_int), bool_and(conditions_2.good_life), every((conditions_2.temperature > '0'::double precision)), bool_or(conditions_2.good_life), count(*), count(conditions_2.temperature), count(conditions_2.allnull), round((corr(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((covar_pop(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((covar_samp(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_avgx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_avgy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_count(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_intercept(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_r2(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_slope(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_sxx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_sxy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_syy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round(stddev((conditions_2.temperature)::integer), 5), round(stddev_pop((conditions_2.temperature)::integer), 5), round(stddev_samp((conditions_2.temperature)::integer), 5), round(variance((conditions_2.temperature)::integer), 5), round(var_pop((conditions_2.temperature)::integer), 5), round(var_samp((conditions_2.temperature)::integer), 5), last(conditions_2.temperature, conditions_2.timec), histogram(conditions_2.temperature, '0'::double precision, '100'::double precision, 1), conditions_2.timec + Group Key: conditions_2.location, conditions_2.timec + -> Custom Scan (DataNodeScan) on public.conditions conditions_2 + Output: conditions_2.location, conditions_2.timec, conditions_2.allnull, conditions_2.temperature, conditions_2.humidity, conditions_2.bit_int, conditions_2.good_life Data node: db_dist_partial_agg_3 Chunks: _dist_hyper_1_5_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_8_chunk - Remote SQL: SELECT location, min(allnull), max(temperature), (sum(temperature) + sum(humidity)), avg(humidity), round(stddev(humidity::integer), 5), bit_and(bit_int), bit_or(bit_int), bool_and(good_life), every((temperature > 0::double precision)), bool_or(good_life), count(*), count(temperature), count(allnull), round(corr(temperature::integer, humidity::integer)::numeric, 5), round(covar_pop(temperature::integer, humidity::integer)::numeric, 5), round(covar_samp(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgx(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgy(temperature::integer, humidity::integer)::numeric, 5), round(regr_count(temperature::integer, humidity::integer)::numeric, 5), round(regr_intercept(temperature::integer, humidity::integer)::numeric, 5), round(regr_r2(temperature::integer, humidity::integer)::numeric, 5), round(regr_slope(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxx(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxy(temperature::integer, humidity::integer)::numeric, 5), round(regr_syy(temperature::integer, humidity::integer)::numeric, 5), round(stddev(temperature::integer), 5), round(stddev_pop(temperature::integer), 5), round(stddev_samp(temperature::integer), 5), round(variance(temperature::integer), 5), round(var_pop(temperature::integer), 5), round(var_samp(temperature::integer), 5), public.last(temperature, timec), public.histogram(temperature, 0::double precision, 100::double precision, 1), timec FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) GROUP BY 1, 35 ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST -(22 rows) + Remote SQL: SELECT timec, location, temperature, humidity, allnull, bit_int, good_life FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST +(26 rows) -- Aggregates on custom types are not yet pushed down :PREFIX SELECT :GROUPING, diff --git a/tsl/test/expected/dist_partial_agg-15.out b/tsl/test/expected/dist_partial_agg-15.out index f6955fc00..d8cbaee4a 100644 --- a/tsl/test/expected/dist_partial_agg-15.out +++ b/tsl/test/expected/dist_partial_agg-15.out @@ -167,31 +167,41 @@ SET timescaledb.remote_data_fetcher = 'cursor'; FROM :TEST_TABLE GROUP BY :GROUPING, timec ORDER BY :GROUPING, timec; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Custom Scan (AsyncAppend) - Output: location, (min(allnull)), (max(temperature)), ((sum(temperature) + sum(humidity))), (avg(humidity)), (round(stddev((humidity)::integer), 5)), (bit_and(bit_int)), (bit_or(bit_int)), (bool_and(good_life)), (every((temperature > '0'::double precision))), (bool_or(good_life)), (count(*)), (count(temperature)), (count(allnull)), (round((corr(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((temperature)::integer)::double precision, ((humidity)::integer)::double precision))::numeric, 5)), (round(stddev((temperature)::integer), 5)), (round(stddev_pop((temperature)::integer), 5)), (round(stddev_samp((temperature)::integer), 5)), (round(variance((temperature)::integer), 5)), (round(var_pop((temperature)::integer), 5)), (round(var_samp((temperature)::integer), 5)), (last(temperature, timec)), (histogram(temperature, '0'::double precision, '100'::double precision, 1)), timec - -> Merge Append - Sort Key: conditions.location, conditions.timec - -> Custom Scan (DataNodeScan) - Output: conditions.location, (min(conditions.allnull)), (max(conditions.temperature)), ((sum(conditions.temperature) + sum(conditions.humidity))), (avg(conditions.humidity)), (round(stddev((conditions.humidity)::integer), 5)), (bit_and(conditions.bit_int)), (bit_or(conditions.bit_int)), (bool_and(conditions.good_life)), (every((conditions.temperature > '0'::double precision))), (bool_or(conditions.good_life)), (count(*)), (count(conditions.temperature)), (count(conditions.allnull)), (round((corr(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5)), (round(stddev((conditions.temperature)::integer), 5)), (round(stddev_pop((conditions.temperature)::integer), 5)), (round(stddev_samp((conditions.temperature)::integer), 5)), (round(variance((conditions.temperature)::integer), 5)), (round(var_pop((conditions.temperature)::integer), 5)), (round(var_samp((conditions.temperature)::integer), 5)), (last(conditions.temperature, conditions.timec)), (histogram(conditions.temperature, '0'::double precision, '100'::double precision, 1)), conditions.timec - Relations: Aggregate on (public.conditions) - Data node: db_dist_partial_agg_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_2_chunk, _dist_hyper_1_3_chunk, _dist_hyper_1_4_chunk - Remote SQL: SELECT location, min(allnull), max(temperature), (sum(temperature) + sum(humidity)), avg(humidity), round(stddev(humidity::integer), 5), bit_and(bit_int), bit_or(bit_int), bool_and(good_life), every((temperature > 0::double precision)), bool_or(good_life), count(*), count(temperature), count(allnull), round(corr(temperature::integer, humidity::integer)::numeric, 5), round(covar_pop(temperature::integer, humidity::integer)::numeric, 5), round(covar_samp(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgx(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgy(temperature::integer, humidity::integer)::numeric, 5), round(regr_count(temperature::integer, humidity::integer)::numeric, 5), round(regr_intercept(temperature::integer, humidity::integer)::numeric, 5), round(regr_r2(temperature::integer, humidity::integer)::numeric, 5), round(regr_slope(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxx(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxy(temperature::integer, humidity::integer)::numeric, 5), round(regr_syy(temperature::integer, humidity::integer)::numeric, 5), round(stddev(temperature::integer), 5), round(stddev_pop(temperature::integer), 5), round(stddev_samp(temperature::integer), 5), round(variance(temperature::integer), 5), round(var_pop(temperature::integer), 5), round(var_samp(temperature::integer), 5), public.last(temperature, timec), public.histogram(temperature, 0::double precision, 100::double precision, 1), timec FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) GROUP BY 1, 35 ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: conditions_1.location, (min(conditions_1.allnull)), (max(conditions_1.temperature)), ((sum(conditions_1.temperature) + sum(conditions_1.humidity))), (avg(conditions_1.humidity)), (round(stddev((conditions_1.humidity)::integer), 5)), (bit_and(conditions_1.bit_int)), (bit_or(conditions_1.bit_int)), (bool_and(conditions_1.good_life)), (every((conditions_1.temperature > '0'::double precision))), (bool_or(conditions_1.good_life)), (count(*)), (count(conditions_1.temperature)), (count(conditions_1.allnull)), (round((corr(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5)), (round(stddev((conditions_1.temperature)::integer), 5)), (round(stddev_pop((conditions_1.temperature)::integer), 5)), (round(stddev_samp((conditions_1.temperature)::integer), 5)), (round(variance((conditions_1.temperature)::integer), 5)), (round(var_pop((conditions_1.temperature)::integer), 5)), (round(var_samp((conditions_1.temperature)::integer), 5)), (last(conditions_1.temperature, conditions_1.timec)), (histogram(conditions_1.temperature, '0'::double precision, '100'::double precision, 1)), conditions_1.timec - Relations: Aggregate on (public.conditions) - Data node: db_dist_partial_agg_2 - Chunks: _dist_hyper_1_9_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_12_chunk - Remote SQL: SELECT location, min(allnull), max(temperature), (sum(temperature) + sum(humidity)), avg(humidity), round(stddev(humidity::integer), 5), bit_and(bit_int), bit_or(bit_int), bool_and(good_life), every((temperature > 0::double precision)), bool_or(good_life), count(*), count(temperature), count(allnull), round(corr(temperature::integer, humidity::integer)::numeric, 5), round(covar_pop(temperature::integer, humidity::integer)::numeric, 5), round(covar_samp(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgx(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgy(temperature::integer, humidity::integer)::numeric, 5), round(regr_count(temperature::integer, humidity::integer)::numeric, 5), round(regr_intercept(temperature::integer, humidity::integer)::numeric, 5), round(regr_r2(temperature::integer, humidity::integer)::numeric, 5), round(regr_slope(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxx(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxy(temperature::integer, humidity::integer)::numeric, 5), round(regr_syy(temperature::integer, humidity::integer)::numeric, 5), round(stddev(temperature::integer), 5), round(stddev_pop(temperature::integer), 5), round(stddev_samp(temperature::integer), 5), round(variance(temperature::integer), 5), round(var_pop(temperature::integer), 5), round(var_samp(temperature::integer), 5), public.last(temperature, timec), public.histogram(temperature, 0::double precision, 100::double precision, 1), timec FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) GROUP BY 1, 35 ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: conditions_2.location, (min(conditions_2.allnull)), (max(conditions_2.temperature)), ((sum(conditions_2.temperature) + sum(conditions_2.humidity))), (avg(conditions_2.humidity)), (round(stddev((conditions_2.humidity)::integer), 5)), (bit_and(conditions_2.bit_int)), (bit_or(conditions_2.bit_int)), (bool_and(conditions_2.good_life)), (every((conditions_2.temperature > '0'::double precision))), (bool_or(conditions_2.good_life)), (count(*)), (count(conditions_2.temperature)), (count(conditions_2.allnull)), (round((corr(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((covar_pop(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((covar_samp(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_avgy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_count(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_intercept(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_r2(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_slope(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_sxy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round((regr_syy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5)), (round(stddev((conditions_2.temperature)::integer), 5)), (round(stddev_pop((conditions_2.temperature)::integer), 5)), (round(stddev_samp((conditions_2.temperature)::integer), 5)), (round(variance((conditions_2.temperature)::integer), 5)), (round(var_pop((conditions_2.temperature)::integer), 5)), (round(var_samp((conditions_2.temperature)::integer), 5)), (last(conditions_2.temperature, conditions_2.timec)), (histogram(conditions_2.temperature, '0'::double precision, '100'::double precision, 1)), conditions_2.timec - Relations: Aggregate on (public.conditions) - Data node: db_dist_partial_agg_3 - Chunks: _dist_hyper_1_5_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_8_chunk - Remote SQL: SELECT location, min(allnull), max(temperature), (sum(temperature) + sum(humidity)), avg(humidity), round(stddev(humidity::integer), 5), bit_and(bit_int), bit_or(bit_int), bool_and(good_life), every((temperature > 0::double precision)), bool_or(good_life), count(*), count(temperature), count(allnull), round(corr(temperature::integer, humidity::integer)::numeric, 5), round(covar_pop(temperature::integer, humidity::integer)::numeric, 5), round(covar_samp(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgx(temperature::integer, humidity::integer)::numeric, 5), round(regr_avgy(temperature::integer, humidity::integer)::numeric, 5), round(regr_count(temperature::integer, humidity::integer)::numeric, 5), round(regr_intercept(temperature::integer, humidity::integer)::numeric, 5), round(regr_r2(temperature::integer, humidity::integer)::numeric, 5), round(regr_slope(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxx(temperature::integer, humidity::integer)::numeric, 5), round(regr_sxy(temperature::integer, humidity::integer)::numeric, 5), round(regr_syy(temperature::integer, humidity::integer)::numeric, 5), round(stddev(temperature::integer), 5), round(stddev_pop(temperature::integer), 5), round(stddev_samp(temperature::integer), 5), round(variance(temperature::integer), 5), round(var_pop(temperature::integer), 5), round(var_samp(temperature::integer), 5), public.last(temperature, timec), public.histogram(temperature, 0::double precision, 100::double precision, 1), timec FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) GROUP BY 1, 35 ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST -(22 rows) + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Merge Append + Sort Key: conditions.location, conditions.timec + -> GroupAggregate + Output: conditions.location, min(conditions.allnull), max(conditions.temperature), (sum(conditions.temperature) + sum(conditions.humidity)), avg(conditions.humidity), round(stddev((conditions.humidity)::integer), 5), bit_and(conditions.bit_int), bit_or(conditions.bit_int), bool_and(conditions.good_life), every((conditions.temperature > '0'::double precision)), bool_or(conditions.good_life), count(*), count(conditions.temperature), count(conditions.allnull), round((corr(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((covar_pop(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((covar_samp(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_avgx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_avgy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_count(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_intercept(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_r2(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_slope(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_sxx(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_sxy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round((regr_syy(((conditions.temperature)::integer)::double precision, ((conditions.humidity)::integer)::double precision))::numeric, 5), round(stddev((conditions.temperature)::integer), 5), round(stddev_pop((conditions.temperature)::integer), 5), round(stddev_samp((conditions.temperature)::integer), 5), round(variance((conditions.temperature)::integer), 5), round(var_pop((conditions.temperature)::integer), 5), round(var_samp((conditions.temperature)::integer), 5), last(conditions.temperature, conditions.timec), histogram(conditions.temperature, '0'::double precision, '100'::double precision, 1), conditions.timec + Group Key: conditions.location, conditions.timec + -> Result + Output: conditions.location, conditions.timec, conditions.allnull, conditions.temperature, conditions.humidity, conditions.bit_int, conditions.good_life + -> Custom Scan (DataNodeScan) on public.conditions + Output: conditions.location, conditions.allnull, conditions.temperature, conditions.humidity, conditions.bit_int, conditions.good_life, conditions.timec + Data node: db_dist_partial_agg_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_2_chunk, _dist_hyper_1_3_chunk, _dist_hyper_1_4_chunk + Remote SQL: SELECT timec, location, temperature, humidity, allnull, bit_int, good_life FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST + -> GroupAggregate + Output: conditions_1.location, min(conditions_1.allnull), max(conditions_1.temperature), (sum(conditions_1.temperature) + sum(conditions_1.humidity)), avg(conditions_1.humidity), round(stddev((conditions_1.humidity)::integer), 5), bit_and(conditions_1.bit_int), bit_or(conditions_1.bit_int), bool_and(conditions_1.good_life), every((conditions_1.temperature > '0'::double precision)), bool_or(conditions_1.good_life), count(*), count(conditions_1.temperature), count(conditions_1.allnull), round((corr(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((covar_pop(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((covar_samp(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_avgx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_avgy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_count(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_intercept(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_r2(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_slope(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_sxx(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_sxy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round((regr_syy(((conditions_1.temperature)::integer)::double precision, ((conditions_1.humidity)::integer)::double precision))::numeric, 5), round(stddev((conditions_1.temperature)::integer), 5), round(stddev_pop((conditions_1.temperature)::integer), 5), round(stddev_samp((conditions_1.temperature)::integer), 5), round(variance((conditions_1.temperature)::integer), 5), round(var_pop((conditions_1.temperature)::integer), 5), round(var_samp((conditions_1.temperature)::integer), 5), last(conditions_1.temperature, conditions_1.timec), histogram(conditions_1.temperature, '0'::double precision, '100'::double precision, 1), conditions_1.timec + Group Key: conditions_1.location, conditions_1.timec + -> Result + Output: conditions_1.location, conditions_1.timec, conditions_1.allnull, conditions_1.temperature, conditions_1.humidity, conditions_1.bit_int, conditions_1.good_life + -> Custom Scan (DataNodeScan) on public.conditions conditions_1 + Output: conditions_1.location, conditions_1.allnull, conditions_1.temperature, conditions_1.humidity, conditions_1.bit_int, conditions_1.good_life, conditions_1.timec + Data node: db_dist_partial_agg_2 + Chunks: _dist_hyper_1_9_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_12_chunk + Remote SQL: SELECT timec, location, temperature, humidity, allnull, bit_int, good_life FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST + -> GroupAggregate + Output: conditions_2.location, min(conditions_2.allnull), max(conditions_2.temperature), (sum(conditions_2.temperature) + sum(conditions_2.humidity)), avg(conditions_2.humidity), round(stddev((conditions_2.humidity)::integer), 5), bit_and(conditions_2.bit_int), bit_or(conditions_2.bit_int), bool_and(conditions_2.good_life), every((conditions_2.temperature > '0'::double precision)), bool_or(conditions_2.good_life), count(*), count(conditions_2.temperature), count(conditions_2.allnull), round((corr(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((covar_pop(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((covar_samp(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_avgx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_avgy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_count(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_intercept(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_r2(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_slope(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_sxx(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_sxy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round((regr_syy(((conditions_2.temperature)::integer)::double precision, ((conditions_2.humidity)::integer)::double precision))::numeric, 5), round(stddev((conditions_2.temperature)::integer), 5), round(stddev_pop((conditions_2.temperature)::integer), 5), round(stddev_samp((conditions_2.temperature)::integer), 5), round(variance((conditions_2.temperature)::integer), 5), round(var_pop((conditions_2.temperature)::integer), 5), round(var_samp((conditions_2.temperature)::integer), 5), last(conditions_2.temperature, conditions_2.timec), histogram(conditions_2.temperature, '0'::double precision, '100'::double precision, 1), conditions_2.timec + Group Key: conditions_2.location, conditions_2.timec + -> Result + Output: conditions_2.location, conditions_2.timec, conditions_2.allnull, conditions_2.temperature, conditions_2.humidity, conditions_2.bit_int, conditions_2.good_life + -> Custom Scan (DataNodeScan) on public.conditions conditions_2 + Output: conditions_2.location, conditions_2.allnull, conditions_2.temperature, conditions_2.humidity, conditions_2.bit_int, conditions_2.good_life, conditions_2.timec + Data node: db_dist_partial_agg_3 + Chunks: _dist_hyper_1_5_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_8_chunk + Remote SQL: SELECT timec, location, temperature, humidity, allnull, bit_int, good_life FROM public.conditions WHERE _timescaledb_internal.chunks_in(public.conditions.*, ARRAY[1, 2, 3, 4]) ORDER BY location ASC NULLS LAST, timec ASC NULLS LAST +(32 rows) -- Aggregates on custom types are not yet pushed down :PREFIX SELECT :GROUPING, diff --git a/tsl/test/expected/dist_query-12.out b/tsl/test/expected/dist_query-12.out index e06eef36b..383a9f122 100644 --- a/tsl/test/expected/dist_query-12.out +++ b/tsl/test/expected/dist_query-12.out @@ -194,35 +194,36 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: "time", avg(temp) Group Key: "time" - -> Custom Scan (AsyncAppend) + -> Sort Output: "time", (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper."time", (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Partial GroupAggregate - Output: hyper_2."time", PARTIAL avg(hyper_2.temp) - Group Key: hyper_2."time" - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.temp + Sort Key: "time" + -> Custom Scan (AsyncAppend) + Output: "time", (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper."time", (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST -(26 rows) + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time only (partial aggregation) @@ -234,35 +235,36 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")) - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Partial GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), PARTIAL avg(hyper_2.temp) - Group Key: time_bucket('@ 2 days'::interval, hyper_2."time") - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.temp + Sort Key: (time_bucket('@ 2 days'::interval, "time")) + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST -(26 rows) + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -274,8 +276,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -291,15 +293,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### Grouping on time and device (full aggregation) @@ -311,8 +311,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: (time_bucket('@ 2 days'::interval, "time")), device, (avg(temp)) -> Append @@ -328,15 +328,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### Grouping on time and device (full aggregation) @@ -348,8 +346,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Custom Scan (AsyncAppend) Output: (date_trunc('month'::text, "time")), device, (avg(temp)) -> Append @@ -365,15 +363,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT date_trunc('month'::text, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: (date_trunc('month'::text, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: date_trunc('month'::text, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: date_trunc('month'::text, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY date_trunc('month'::text, "time") ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### Grouping on time and device (full aggregation) @@ -386,8 +382,8 @@ GROUP BY 1,2 HAVING device > 4 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Custom Scan (AsyncAppend) Output: (time_bucket('@ 2 days'::interval, "time")), device, (avg(temp)) -> Append @@ -403,15 +399,13 @@ HAVING device > 4 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 +(21 rows) EXPLAIN (verbose, costs off) SELECT time_bucket('2 days', time) AS time, device, avg(temp) @@ -456,8 +450,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Custom Scan (AsyncAppend) Output: device, (avg(temp)) -> Append @@ -473,15 +467,13 @@ GROUP BY 1 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> GroupAggregate - Output: hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 +(21 rows) ######### No push down on some functions @@ -532,30 +524,34 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - HashAggregate - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp), sum((hyper.temp * (((random() <= '1'::double precision))::integer)::double precision)) - Group Key: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device - -> Custom Scan (AsyncAppend) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: time_bucket('@ 2 days'::interval, hyper_3."time"), hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(21 rows) + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Append + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp), sum((hyper.temp * (((random() <= '1'::double precision))::integer)::double precision)) + Group Key: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device, hyper.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), hyper_1.device, avg(hyper_1.temp), sum((hyper_1.temp * (((random() <= '1'::double precision))::integer)::double precision)) + Group Key: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp), sum((hyper_2.temp * (((random() <= '1'::double precision))::integer)::double precision)) + Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(25 rows) ######### No push down on some functions @@ -570,29 +566,37 @@ HAVING avg(temp) * custom_sum(device) > 0.8 QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - HashAggregate - Output: hyper."time", hyper.device, avg(hyper.temp) - Group Key: hyper."time", hyper.device - Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp + Custom Scan (AsyncAppend) + Output: "time", device, (avg(temp)) + -> Append + -> GroupAggregate + Output: hyper."time", hyper.device, avg(hyper.temp) + Group Key: hyper."time", hyper.device + Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.device, hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp + -> GroupAggregate + Output: hyper_1."time", hyper_1.device, avg(hyper_1.temp) + Group Key: hyper_1."time", hyper_1.device + Filter: ((avg(hyper_1.temp) * (custom_sum(hyper_1.device))::double precision) > '0.8'::double precision) + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp + -> GroupAggregate + Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) + Group Key: hyper_2."time", hyper_2.device + Filter: ((avg(hyper_2.temp) * (custom_sum(hyper_2.device))::double precision) > '0.8'::double precision) + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(22 rows) +(30 rows) ######### No push down on some functions @@ -606,28 +610,34 @@ GROUP BY 1,2 QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - HashAggregate - Output: hyper."time", hyper.device, avg(hyper.temp), custom_sum(hyper.device) - Group Key: hyper."time", hyper.device - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp + Custom Scan (AsyncAppend) + Output: "time", device, (avg(temp)), (custom_sum(device)) + -> Append + -> GroupAggregate + Output: hyper."time", hyper.device, avg(hyper.temp), custom_sum(hyper.device) + Group Key: hyper."time", hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.device, hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp + -> GroupAggregate + Output: hyper_1."time", hyper_1.device, avg(hyper_1.temp), custom_sum(hyper_1.device) + Group Key: hyper_1."time", hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp + -> GroupAggregate + Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp), custom_sum(hyper_2.device) + Group Key: hyper_2."time", hyper_2.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(21 rows) +(27 rows) ######### Constification and runtime push down of time-related functions @@ -637,8 +647,8 @@ GROUP BY 1,2 (1 row) - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -654,18 +664,16 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -681,23 +689,21 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) tsl_override_current_timestamptz ---------------------------------- (1 row) - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -713,15 +719,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### LIMIT push down cases @@ -1040,69 +1044,65 @@ FROM hyper INNER JOIN top_n USING (device) WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Sort - Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp - Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join - Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp - Inner Unique: true - Hash Cond: (hyper.device = top_n.device) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + Group Key: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device + -> Nested Loop + Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp + Inner Unique: true + Join Filter: (hyper.device = top_n.device) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp, time_bucket('@ 1 min'::interval, hyper_1."time") + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp, time_bucket('@ 1 min'::interval, hyper_2."time") + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_3 + Output: hyper_3."time", hyper_3.device, hyper_3.temp, time_bucket('@ 1 min'::interval, hyper_3."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize + Output: top_n.device + -> Subquery Scan on top_n Output: top_n.device - -> Subquery Scan on top_n - Output: top_n.device - -> Limit + -> Limit + Output: device, (avg(temp)) + -> Sort Output: device, (avg(temp)) - -> Sort + Sort Key: (avg(temp)) DESC + -> Custom Scan (AsyncAppend) Output: device, (avg(temp)) - Sort Key: (avg(temp)) DESC - -> Custom Scan (AsyncAppend) - Output: device, (avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper_4.device, (avg(hyper_4.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: hyper_5.device, (avg(hyper_5.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> GroupAggregate - Output: hyper_6.device, avg(hyper_6.temp) - Group Key: hyper_6.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_6 - Output: hyper_6.device, hyper_6.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(60 rows) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper_4.device, (avg(hyper_4.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_5.device, (avg(hyper_5.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_6.device, (avg(hyper_6.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(56 rows) ######### CTEs/Sub-queries @@ -1114,41 +1114,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -1269,15 +1268,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### Grouping on time and device (full aggregation) @@ -1307,15 +1304,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### Grouping on time and device (full aggregation) @@ -1345,15 +1340,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT date_trunc('month'::text, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY date_trunc('month'::text, "time") ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: (date_trunc('month'::text, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: date_trunc('month'::text, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: date_trunc('month'::text, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY date_trunc('month'::text, "time") ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY date_trunc('month'::text, "time") ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### Grouping on time and device (full aggregation) @@ -1548,34 +1541,32 @@ GROUP BY 1,2 HAVING avg(temp) * custom_sum(device) > 0.8 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ - Sort - Output: hyper."time", hyper.device, (avg(hyper.temp)) - Sort Key: hyper."time", hyper.device - -> HashAggregate - Output: hyper."time", hyper.device, avg(hyper.temp) - Group Key: hyper."time", hyper.device - Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(25 rows) + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + GroupAggregate + Output: hyper."time", hyper.device, avg(hyper.temp) + Group Key: hyper."time", hyper.device + Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: hyper_1."time", hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_3 + Output: hyper_3."time", hyper_3.device, hyper_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(23 rows) ######### No push down on some functions @@ -1639,15 +1630,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ @@ -1667,15 +1656,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) tsl_override_current_timestamptz ---------------------------------- @@ -1700,15 +1687,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### LIMIT push down cases @@ -2035,69 +2020,65 @@ FROM hyper INNER JOIN top_n USING (device) WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Sort - Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp - Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join - Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp - Inner Unique: true - Hash Cond: (hyper.device = top_n.device) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + Group Key: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device + -> Nested Loop + Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp + Inner Unique: true + Join Filter: (hyper.device = top_n.device) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp, time_bucket('@ 1 min'::interval, hyper_1."time") + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp, time_bucket('@ 1 min'::interval, hyper_2."time") + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_3 + Output: hyper_3."time", hyper_3.device, hyper_3.temp, time_bucket('@ 1 min'::interval, hyper_3."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize + Output: top_n.device + -> Subquery Scan on top_n Output: top_n.device - -> Subquery Scan on top_n - Output: top_n.device - -> Limit + -> Limit + Output: device, (avg(temp)) + -> Sort Output: device, (avg(temp)) - -> Sort + Sort Key: (avg(temp)) DESC + -> Custom Scan (AsyncAppend) Output: device, (avg(temp)) - Sort Key: (avg(temp)) DESC - -> Custom Scan (AsyncAppend) - Output: device, (avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper_4.device, (avg(hyper_4.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: hyper_5.device, (avg(hyper_5.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> GroupAggregate - Output: hyper_6.device, avg(hyper_6.temp) - Group Key: hyper_6.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_6 - Output: hyper_6.device, hyper_6.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(60 rows) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper_4.device, (avg(hyper_4.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_5.device, (avg(hyper_5.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_6.device, (avg(hyper_6.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(56 rows) ######### CTEs/Sub-queries @@ -2109,41 +2090,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -2816,41 +2796,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -2875,33 +2854,40 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 ORDER BY 1 LIMIT 10 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit - Output: hyper."time", (avg(hyper.temp)) - -> GroupAggregate - Output: hyper."time", avg(hyper.temp) - Group Key: hyper."time" + Output: "time", (avg(temp)) + -> Finalize GroupAggregate + Output: "time", avg(temp) + Group Key: "time" -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.temp + Output: "time", (PARTIAL avg(temp)) -> Merge Append - Sort Key: hyper_1."time" - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.temp + Sort Key: hyper."time" + -> Partial GroupAggregate + Output: hyper."time", PARTIAL avg(hyper.temp) + Group Key: hyper."time" + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST + -> Partial GroupAggregate + Output: hyper_1."time", PARTIAL avg(hyper_1.temp) + Group Key: hyper_1."time" + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST -(24 rows) + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY "time" ASC NULLS LAST +(31 rows) ######### Grouping on time only (partial aggregation) @@ -2913,33 +2899,38 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 ORDER BY 1 LIMIT 10 - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit Output: (time_bucket('@ 2 days'::interval, hyper."time")), (avg(hyper.temp)) - -> GroupAggregate + -> Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, hyper."time")), avg(hyper.temp) Group Key: (time_bucket('@ 2 days'::interval, hyper."time")) - -> Custom Scan (AsyncAppend) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.temp - -> Merge Append - Sort Key: (time_bucket('@ 2 days'::interval, hyper_1."time")) - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.temp + -> Merge Append + Sort Key: (time_bucket('@ 2 days'::interval, hyper."time")) + -> Partial GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper."time")), PARTIAL avg(hyper.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper."time") + -> Custom Scan (DataNodeScan) on public.hyper + Output: time_bucket('@ 2 days'::interval, hyper."time"), hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.temp + -> Partial GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), PARTIAL avg(hyper_1.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper_1."time") + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: time_bucket('@ 2 days'::interval, hyper_3."time"), hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST +(29 rows) ######### Grouping on time and device (full aggregation) @@ -3066,33 +3057,35 @@ GROUP BY 1,2 HAVING device > 4 ORDER BY 1,2 LIMIT 10 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, (avg(hyper.temp)) - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device - -> Custom Scan (AsyncAppend) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, hyper.temp - -> Merge Append - Sort Key: (time_bucket('@ 2 days'::interval, hyper_1."time")), hyper_1.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: time_bucket('@ 2 days'::interval, hyper_3."time"), hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Merge Append + Sort Key: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device, hyper.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), hyper_1.device, avg(hyper_1.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(26 rows) EXPLAIN (verbose, costs off) SELECT time_bucket('2 days', time) AS time, device, avg(temp) @@ -3140,33 +3133,37 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 ORDER BY 1 LIMIT 10 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit - Output: hyper.device, (avg(hyper.temp)) - -> GroupAggregate - Output: hyper.device, avg(hyper.temp) - Group Key: hyper.device - -> Custom Scan (AsyncAppend) - Output: hyper.device, hyper.temp - -> Merge Append - Sort Key: hyper_1.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1.device, hyper_1.temp + Output: device, (avg(temp)) + -> Custom Scan (AsyncAppend) + Output: device, (avg(temp)) + -> Merge Append + Sort Key: hyper.device + -> GroupAggregate + Output: hyper.device, avg(hyper.temp) + Group Key: hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper.device, hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2.device, hyper_2.temp + -> GroupAggregate + Output: hyper_1.device, avg(hyper_1.temp) + Group Key: hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1.device, hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(28 rows) ######### No push down on some functions @@ -3747,69 +3744,65 @@ FROM hyper INNER JOIN top_n USING (device) WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Sort - Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp - Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join - Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp - Inner Unique: true - Hash Cond: (hyper.device = top_n.device) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + Group Key: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device + -> Nested Loop + Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp + Inner Unique: true + Join Filter: (hyper.device = top_n.device) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp, time_bucket('@ 1 min'::interval, hyper_1."time") + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp, time_bucket('@ 1 min'::interval, hyper_2."time") + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_3 + Output: hyper_3."time", hyper_3.device, hyper_3.temp, time_bucket('@ 1 min'::interval, hyper_3."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize + Output: top_n.device + -> Subquery Scan on top_n Output: top_n.device - -> Subquery Scan on top_n - Output: top_n.device - -> Limit + -> Limit + Output: device, (avg(temp)) + -> Sort Output: device, (avg(temp)) - -> Sort + Sort Key: (avg(temp)) DESC + -> Custom Scan (AsyncAppend) Output: device, (avg(temp)) - Sort Key: (avg(temp)) DESC - -> Custom Scan (AsyncAppend) - Output: device, (avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper_4.device, (avg(hyper_4.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: hyper_5.device, (avg(hyper_5.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> GroupAggregate - Output: hyper_6.device, avg(hyper_6.temp) - Group Key: hyper_6.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_6 - Output: hyper_6.device, hyper_6.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(60 rows) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper_4.device, (avg(hyper_4.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_5.device, (avg(hyper_5.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_6.device, (avg(hyper_6.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(56 rows) ######### CTEs/Sub-queries @@ -3821,41 +3814,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -3880,33 +3872,36 @@ WHERE time >= '2019-01-01' GROUP BY 1 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: "time", avg(temp) Group Key: "time" - -> Custom Scan (AsyncAppend) + -> Sort Output: "time", (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper."time", (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 -(24 rows) + Sort Key: "time" + -> Custom Scan (AsyncAppend) + Output: "time", (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper."time", (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time only (partial aggregation) @@ -3918,33 +3913,36 @@ WHERE time >= '2019-01-01' GROUP BY 1 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")) - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 -(24 rows) + Sort Key: (time_bucket('@ 2 days'::interval, "time")) + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -4148,33 +4146,36 @@ WHERE time >= '2019-01-01' GROUP BY 1 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: device, avg(temp) Group Key: device - -> Custom Scan (AsyncAppend) + -> Sort Output: device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper.device, (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk - Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_1.device, (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk - Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_2.device, (PARTIAL avg(hyper_2.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk - Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 -(24 rows) + Sort Key: device + -> Custom Scan (AsyncAppend) + Output: device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper.device, (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk + Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_1.device, (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk + Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_2.device, (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk + Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### No push down on some functions @@ -4779,10 +4780,10 @@ ORDER BY 1,2 -> Sort Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp Inner Unique: true - Hash Cond: (hyper.device = top_n.device) + Join Filter: (hyper.device = top_n.device) -> Custom Scan (AsyncAppend) Output: hyper."time", hyper.device, hyper.temp -> Append @@ -4801,7 +4802,7 @@ ORDER BY 1,2 Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + -> Materialize Output: top_n.device -> Subquery Scan on top_n Output: top_n.device @@ -4847,41 +4848,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper1d @@ -5016,33 +5016,36 @@ WHERE time >= '2019-01-01' GROUP BY 1,2 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), device, avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")), device - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_1 - Chunks: _dist_hyper_2_20_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_2 - Chunks: _dist_hyper_2_21_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 -(24 rows) + Sort Key: (time_bucket('@ 2 days'::interval, "time")), device + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_1 + Chunks: _dist_hyper_2_20_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_2 + Chunks: _dist_hyper_2_21_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -5054,33 +5057,36 @@ WHERE time >= '2019-01-01' GROUP BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: (date_trunc('month'::text, "time")), device, avg(temp) Group Key: (date_trunc('month'::text, "time")), device - -> Custom Scan (AsyncAppend) + -> Sort Output: (date_trunc('month'::text, "time")), device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (date_trunc('month'::text, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_1 - Chunks: _dist_hyper_2_20_chunk - Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (date_trunc('month'::text, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_2 - Chunks: _dist_hyper_2_21_chunk - Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (date_trunc('month'::text, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 -(24 rows) + Sort Key: (date_trunc('month'::text, "time")), device + -> Custom Scan (AsyncAppend) + Output: (date_trunc('month'::text, "time")), device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_1 + Chunks: _dist_hyper_2_20_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_2 + Chunks: _dist_hyper_2_21_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -5093,33 +5099,36 @@ GROUP BY 1,2 HAVING device > 4 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), device, avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")), device - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_1 - Chunks: _dist_hyper_2_20_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_2 - Chunks: _dist_hyper_2_21_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 -(24 rows) + Sort Key: (time_bucket('@ 2 days'::interval, "time")), device + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_1 + Chunks: _dist_hyper_2_20_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_2 + Chunks: _dist_hyper_2_21_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 +(27 rows) EXPLAIN (verbose, costs off) SELECT time_bucket('2 days', time) AS time, device, avg(temp) @@ -5790,10 +5799,10 @@ ORDER BY 1,2 -> Sort Output: (time_bucket('@ 1 min'::interval, hyper1d."time")), hyper1d.device, hyper1d.temp Sort Key: (time_bucket('@ 1 min'::interval, hyper1d."time")), hyper1d.device - -> Hash Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, hyper1d."time"), hyper1d.device, hyper1d.temp Inner Unique: true - Hash Cond: (hyper1d.device = top_n.device) + Join Filter: (hyper1d.device = top_n.device) -> Custom Scan (AsyncAppend) Output: hyper1d."time", hyper1d.device, hyper1d.temp -> Append @@ -5812,7 +5821,7 @@ ORDER BY 1,2 Data node: db_dist_query_3 Chunks: _dist_hyper_2_19_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + -> Materialize Output: top_n.device -> Subquery Scan on top_n Output: top_n.device @@ -5821,12 +5830,13 @@ ORDER BY 1,2 -> Sort Output: device, (avg(temp)) Sort Key: (avg(temp)) DESC - -> Finalize HashAggregate + -> Finalize GroupAggregate Output: device, avg(temp) Group Key: device -> Custom Scan (AsyncAppend) Output: device, (PARTIAL avg(temp)) - -> Append + -> Merge Append + Sort Key: hyper1d_4.device -> Custom Scan (DataNodeScan) Output: hyper1d_4.device, (PARTIAL avg(hyper1d_4.temp)) Relations: Aggregate on (public.hyper1d) @@ -5845,7 +5855,7 @@ ORDER BY 1,2 Data node: db_dist_query_3 Chunks: _dist_hyper_2_19_chunk Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST -(61 rows) +(62 rows) ######### CTEs/Sub-queries @@ -5857,41 +5867,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: reference diff --git a/tsl/test/expected/dist_query-13.out b/tsl/test/expected/dist_query-13.out index e06eef36b..383a9f122 100644 --- a/tsl/test/expected/dist_query-13.out +++ b/tsl/test/expected/dist_query-13.out @@ -194,35 +194,36 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: "time", avg(temp) Group Key: "time" - -> Custom Scan (AsyncAppend) + -> Sort Output: "time", (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper."time", (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Partial GroupAggregate - Output: hyper_2."time", PARTIAL avg(hyper_2.temp) - Group Key: hyper_2."time" - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.temp + Sort Key: "time" + -> Custom Scan (AsyncAppend) + Output: "time", (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper."time", (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST -(26 rows) + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time only (partial aggregation) @@ -234,35 +235,36 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")) - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Partial GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), PARTIAL avg(hyper_2.temp) - Group Key: time_bucket('@ 2 days'::interval, hyper_2."time") - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.temp + Sort Key: (time_bucket('@ 2 days'::interval, "time")) + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST -(26 rows) + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -274,8 +276,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -291,15 +293,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### Grouping on time and device (full aggregation) @@ -311,8 +311,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: (time_bucket('@ 2 days'::interval, "time")), device, (avg(temp)) -> Append @@ -328,15 +328,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### Grouping on time and device (full aggregation) @@ -348,8 +346,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Custom Scan (AsyncAppend) Output: (date_trunc('month'::text, "time")), device, (avg(temp)) -> Append @@ -365,15 +363,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT date_trunc('month'::text, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: (date_trunc('month'::text, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: date_trunc('month'::text, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: date_trunc('month'::text, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY date_trunc('month'::text, "time") ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### Grouping on time and device (full aggregation) @@ -386,8 +382,8 @@ GROUP BY 1,2 HAVING device > 4 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Custom Scan (AsyncAppend) Output: (time_bucket('@ 2 days'::interval, "time")), device, (avg(temp)) -> Append @@ -403,15 +399,13 @@ HAVING device > 4 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 +(21 rows) EXPLAIN (verbose, costs off) SELECT time_bucket('2 days', time) AS time, device, avg(temp) @@ -456,8 +450,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Custom Scan (AsyncAppend) Output: device, (avg(temp)) -> Append @@ -473,15 +467,13 @@ GROUP BY 1 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> GroupAggregate - Output: hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 +(21 rows) ######### No push down on some functions @@ -532,30 +524,34 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - HashAggregate - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp), sum((hyper.temp * (((random() <= '1'::double precision))::integer)::double precision)) - Group Key: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device - -> Custom Scan (AsyncAppend) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: time_bucket('@ 2 days'::interval, hyper_3."time"), hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(21 rows) + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Append + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp), sum((hyper.temp * (((random() <= '1'::double precision))::integer)::double precision)) + Group Key: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device, hyper.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), hyper_1.device, avg(hyper_1.temp), sum((hyper_1.temp * (((random() <= '1'::double precision))::integer)::double precision)) + Group Key: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp), sum((hyper_2.temp * (((random() <= '1'::double precision))::integer)::double precision)) + Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(25 rows) ######### No push down on some functions @@ -570,29 +566,37 @@ HAVING avg(temp) * custom_sum(device) > 0.8 QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - HashAggregate - Output: hyper."time", hyper.device, avg(hyper.temp) - Group Key: hyper."time", hyper.device - Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp + Custom Scan (AsyncAppend) + Output: "time", device, (avg(temp)) + -> Append + -> GroupAggregate + Output: hyper."time", hyper.device, avg(hyper.temp) + Group Key: hyper."time", hyper.device + Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.device, hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp + -> GroupAggregate + Output: hyper_1."time", hyper_1.device, avg(hyper_1.temp) + Group Key: hyper_1."time", hyper_1.device + Filter: ((avg(hyper_1.temp) * (custom_sum(hyper_1.device))::double precision) > '0.8'::double precision) + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp + -> GroupAggregate + Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) + Group Key: hyper_2."time", hyper_2.device + Filter: ((avg(hyper_2.temp) * (custom_sum(hyper_2.device))::double precision) > '0.8'::double precision) + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(22 rows) +(30 rows) ######### No push down on some functions @@ -606,28 +610,34 @@ GROUP BY 1,2 QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - HashAggregate - Output: hyper."time", hyper.device, avg(hyper.temp), custom_sum(hyper.device) - Group Key: hyper."time", hyper.device - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp + Custom Scan (AsyncAppend) + Output: "time", device, (avg(temp)), (custom_sum(device)) + -> Append + -> GroupAggregate + Output: hyper."time", hyper.device, avg(hyper.temp), custom_sum(hyper.device) + Group Key: hyper."time", hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.device, hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp + -> GroupAggregate + Output: hyper_1."time", hyper_1.device, avg(hyper_1.temp), custom_sum(hyper_1.device) + Group Key: hyper_1."time", hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp + -> GroupAggregate + Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp), custom_sum(hyper_2.device) + Group Key: hyper_2."time", hyper_2.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(21 rows) +(27 rows) ######### Constification and runtime push down of time-related functions @@ -637,8 +647,8 @@ GROUP BY 1,2 (1 row) - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -654,18 +664,16 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -681,23 +689,21 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) tsl_override_current_timestamptz ---------------------------------- (1 row) - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -713,15 +719,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### LIMIT push down cases @@ -1040,69 +1044,65 @@ FROM hyper INNER JOIN top_n USING (device) WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Sort - Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp - Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join - Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp - Inner Unique: true - Hash Cond: (hyper.device = top_n.device) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + Group Key: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device + -> Nested Loop + Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp + Inner Unique: true + Join Filter: (hyper.device = top_n.device) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp, time_bucket('@ 1 min'::interval, hyper_1."time") + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp, time_bucket('@ 1 min'::interval, hyper_2."time") + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_3 + Output: hyper_3."time", hyper_3.device, hyper_3.temp, time_bucket('@ 1 min'::interval, hyper_3."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize + Output: top_n.device + -> Subquery Scan on top_n Output: top_n.device - -> Subquery Scan on top_n - Output: top_n.device - -> Limit + -> Limit + Output: device, (avg(temp)) + -> Sort Output: device, (avg(temp)) - -> Sort + Sort Key: (avg(temp)) DESC + -> Custom Scan (AsyncAppend) Output: device, (avg(temp)) - Sort Key: (avg(temp)) DESC - -> Custom Scan (AsyncAppend) - Output: device, (avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper_4.device, (avg(hyper_4.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: hyper_5.device, (avg(hyper_5.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> GroupAggregate - Output: hyper_6.device, avg(hyper_6.temp) - Group Key: hyper_6.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_6 - Output: hyper_6.device, hyper_6.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(60 rows) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper_4.device, (avg(hyper_4.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_5.device, (avg(hyper_5.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_6.device, (avg(hyper_6.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(56 rows) ######### CTEs/Sub-queries @@ -1114,41 +1114,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -1269,15 +1268,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### Grouping on time and device (full aggregation) @@ -1307,15 +1304,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### Grouping on time and device (full aggregation) @@ -1345,15 +1340,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT date_trunc('month'::text, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY date_trunc('month'::text, "time") ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: (date_trunc('month'::text, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: date_trunc('month'::text, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: date_trunc('month'::text, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY date_trunc('month'::text, "time") ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY date_trunc('month'::text, "time") ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### Grouping on time and device (full aggregation) @@ -1548,34 +1541,32 @@ GROUP BY 1,2 HAVING avg(temp) * custom_sum(device) > 0.8 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ - Sort - Output: hyper."time", hyper.device, (avg(hyper.temp)) - Sort Key: hyper."time", hyper.device - -> HashAggregate - Output: hyper."time", hyper.device, avg(hyper.temp) - Group Key: hyper."time", hyper.device - Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(25 rows) + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + GroupAggregate + Output: hyper."time", hyper.device, avg(hyper.temp) + Group Key: hyper."time", hyper.device + Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: hyper_1."time", hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_3 + Output: hyper_3."time", hyper_3.device, hyper_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(23 rows) ######### No push down on some functions @@ -1639,15 +1630,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ @@ -1667,15 +1656,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) tsl_override_current_timestamptz ---------------------------------- @@ -1700,15 +1687,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### LIMIT push down cases @@ -2035,69 +2020,65 @@ FROM hyper INNER JOIN top_n USING (device) WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Sort - Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp - Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join - Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp - Inner Unique: true - Hash Cond: (hyper.device = top_n.device) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + Group Key: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device + -> Nested Loop + Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp + Inner Unique: true + Join Filter: (hyper.device = top_n.device) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp, time_bucket('@ 1 min'::interval, hyper_1."time") + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp, time_bucket('@ 1 min'::interval, hyper_2."time") + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_3 + Output: hyper_3."time", hyper_3.device, hyper_3.temp, time_bucket('@ 1 min'::interval, hyper_3."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize + Output: top_n.device + -> Subquery Scan on top_n Output: top_n.device - -> Subquery Scan on top_n - Output: top_n.device - -> Limit + -> Limit + Output: device, (avg(temp)) + -> Sort Output: device, (avg(temp)) - -> Sort + Sort Key: (avg(temp)) DESC + -> Custom Scan (AsyncAppend) Output: device, (avg(temp)) - Sort Key: (avg(temp)) DESC - -> Custom Scan (AsyncAppend) - Output: device, (avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper_4.device, (avg(hyper_4.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: hyper_5.device, (avg(hyper_5.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> GroupAggregate - Output: hyper_6.device, avg(hyper_6.temp) - Group Key: hyper_6.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_6 - Output: hyper_6.device, hyper_6.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(60 rows) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper_4.device, (avg(hyper_4.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_5.device, (avg(hyper_5.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_6.device, (avg(hyper_6.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(56 rows) ######### CTEs/Sub-queries @@ -2109,41 +2090,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -2816,41 +2796,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -2875,33 +2854,40 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 ORDER BY 1 LIMIT 10 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit - Output: hyper."time", (avg(hyper.temp)) - -> GroupAggregate - Output: hyper."time", avg(hyper.temp) - Group Key: hyper."time" + Output: "time", (avg(temp)) + -> Finalize GroupAggregate + Output: "time", avg(temp) + Group Key: "time" -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.temp + Output: "time", (PARTIAL avg(temp)) -> Merge Append - Sort Key: hyper_1."time" - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.temp + Sort Key: hyper."time" + -> Partial GroupAggregate + Output: hyper."time", PARTIAL avg(hyper.temp) + Group Key: hyper."time" + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST + -> Partial GroupAggregate + Output: hyper_1."time", PARTIAL avg(hyper_1.temp) + Group Key: hyper_1."time" + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST -(24 rows) + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY "time" ASC NULLS LAST +(31 rows) ######### Grouping on time only (partial aggregation) @@ -2913,33 +2899,38 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 ORDER BY 1 LIMIT 10 - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit Output: (time_bucket('@ 2 days'::interval, hyper."time")), (avg(hyper.temp)) - -> GroupAggregate + -> Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, hyper."time")), avg(hyper.temp) Group Key: (time_bucket('@ 2 days'::interval, hyper."time")) - -> Custom Scan (AsyncAppend) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.temp - -> Merge Append - Sort Key: (time_bucket('@ 2 days'::interval, hyper_1."time")) - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.temp + -> Merge Append + Sort Key: (time_bucket('@ 2 days'::interval, hyper."time")) + -> Partial GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper."time")), PARTIAL avg(hyper.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper."time") + -> Custom Scan (DataNodeScan) on public.hyper + Output: time_bucket('@ 2 days'::interval, hyper."time"), hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.temp + -> Partial GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), PARTIAL avg(hyper_1.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper_1."time") + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: time_bucket('@ 2 days'::interval, hyper_3."time"), hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST +(29 rows) ######### Grouping on time and device (full aggregation) @@ -3066,33 +3057,35 @@ GROUP BY 1,2 HAVING device > 4 ORDER BY 1,2 LIMIT 10 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, (avg(hyper.temp)) - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device - -> Custom Scan (AsyncAppend) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, hyper.temp - -> Merge Append - Sort Key: (time_bucket('@ 2 days'::interval, hyper_1."time")), hyper_1.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: time_bucket('@ 2 days'::interval, hyper_3."time"), hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Merge Append + Sort Key: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device, hyper.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), hyper_1.device, avg(hyper_1.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(26 rows) EXPLAIN (verbose, costs off) SELECT time_bucket('2 days', time) AS time, device, avg(temp) @@ -3140,33 +3133,37 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 ORDER BY 1 LIMIT 10 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit - Output: hyper.device, (avg(hyper.temp)) - -> GroupAggregate - Output: hyper.device, avg(hyper.temp) - Group Key: hyper.device - -> Custom Scan (AsyncAppend) - Output: hyper.device, hyper.temp - -> Merge Append - Sort Key: hyper_1.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1.device, hyper_1.temp + Output: device, (avg(temp)) + -> Custom Scan (AsyncAppend) + Output: device, (avg(temp)) + -> Merge Append + Sort Key: hyper.device + -> GroupAggregate + Output: hyper.device, avg(hyper.temp) + Group Key: hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper.device, hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2.device, hyper_2.temp + -> GroupAggregate + Output: hyper_1.device, avg(hyper_1.temp) + Group Key: hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1.device, hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(28 rows) ######### No push down on some functions @@ -3747,69 +3744,65 @@ FROM hyper INNER JOIN top_n USING (device) WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Sort - Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp - Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join - Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp - Inner Unique: true - Hash Cond: (hyper.device = top_n.device) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + Group Key: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device + -> Nested Loop + Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp + Inner Unique: true + Join Filter: (hyper.device = top_n.device) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp, time_bucket('@ 1 min'::interval, hyper_1."time") + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp, time_bucket('@ 1 min'::interval, hyper_2."time") + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_3 + Output: hyper_3."time", hyper_3.device, hyper_3.temp, time_bucket('@ 1 min'::interval, hyper_3."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize + Output: top_n.device + -> Subquery Scan on top_n Output: top_n.device - -> Subquery Scan on top_n - Output: top_n.device - -> Limit + -> Limit + Output: device, (avg(temp)) + -> Sort Output: device, (avg(temp)) - -> Sort + Sort Key: (avg(temp)) DESC + -> Custom Scan (AsyncAppend) Output: device, (avg(temp)) - Sort Key: (avg(temp)) DESC - -> Custom Scan (AsyncAppend) - Output: device, (avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper_4.device, (avg(hyper_4.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: hyper_5.device, (avg(hyper_5.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> GroupAggregate - Output: hyper_6.device, avg(hyper_6.temp) - Group Key: hyper_6.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_6 - Output: hyper_6.device, hyper_6.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(60 rows) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper_4.device, (avg(hyper_4.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_5.device, (avg(hyper_5.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_6.device, (avg(hyper_6.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(56 rows) ######### CTEs/Sub-queries @@ -3821,41 +3814,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -3880,33 +3872,36 @@ WHERE time >= '2019-01-01' GROUP BY 1 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: "time", avg(temp) Group Key: "time" - -> Custom Scan (AsyncAppend) + -> Sort Output: "time", (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper."time", (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 -(24 rows) + Sort Key: "time" + -> Custom Scan (AsyncAppend) + Output: "time", (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper."time", (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time only (partial aggregation) @@ -3918,33 +3913,36 @@ WHERE time >= '2019-01-01' GROUP BY 1 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")) - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 -(24 rows) + Sort Key: (time_bucket('@ 2 days'::interval, "time")) + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -4148,33 +4146,36 @@ WHERE time >= '2019-01-01' GROUP BY 1 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: device, avg(temp) Group Key: device - -> Custom Scan (AsyncAppend) + -> Sort Output: device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper.device, (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk - Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_1.device, (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk - Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_2.device, (PARTIAL avg(hyper_2.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk - Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 -(24 rows) + Sort Key: device + -> Custom Scan (AsyncAppend) + Output: device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper.device, (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk + Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_1.device, (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk + Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_2.device, (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk + Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### No push down on some functions @@ -4779,10 +4780,10 @@ ORDER BY 1,2 -> Sort Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp Inner Unique: true - Hash Cond: (hyper.device = top_n.device) + Join Filter: (hyper.device = top_n.device) -> Custom Scan (AsyncAppend) Output: hyper."time", hyper.device, hyper.temp -> Append @@ -4801,7 +4802,7 @@ ORDER BY 1,2 Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + -> Materialize Output: top_n.device -> Subquery Scan on top_n Output: top_n.device @@ -4847,41 +4848,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper1d @@ -5016,33 +5016,36 @@ WHERE time >= '2019-01-01' GROUP BY 1,2 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), device, avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")), device - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_1 - Chunks: _dist_hyper_2_20_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_2 - Chunks: _dist_hyper_2_21_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 -(24 rows) + Sort Key: (time_bucket('@ 2 days'::interval, "time")), device + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_1 + Chunks: _dist_hyper_2_20_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_2 + Chunks: _dist_hyper_2_21_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -5054,33 +5057,36 @@ WHERE time >= '2019-01-01' GROUP BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: (date_trunc('month'::text, "time")), device, avg(temp) Group Key: (date_trunc('month'::text, "time")), device - -> Custom Scan (AsyncAppend) + -> Sort Output: (date_trunc('month'::text, "time")), device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (date_trunc('month'::text, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_1 - Chunks: _dist_hyper_2_20_chunk - Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (date_trunc('month'::text, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_2 - Chunks: _dist_hyper_2_21_chunk - Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (date_trunc('month'::text, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 -(24 rows) + Sort Key: (date_trunc('month'::text, "time")), device + -> Custom Scan (AsyncAppend) + Output: (date_trunc('month'::text, "time")), device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_1 + Chunks: _dist_hyper_2_20_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_2 + Chunks: _dist_hyper_2_21_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -5093,33 +5099,36 @@ GROUP BY 1,2 HAVING device > 4 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), device, avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")), device - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_1 - Chunks: _dist_hyper_2_20_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_2 - Chunks: _dist_hyper_2_21_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 -(24 rows) + Sort Key: (time_bucket('@ 2 days'::interval, "time")), device + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_1 + Chunks: _dist_hyper_2_20_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_2 + Chunks: _dist_hyper_2_21_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 +(27 rows) EXPLAIN (verbose, costs off) SELECT time_bucket('2 days', time) AS time, device, avg(temp) @@ -5790,10 +5799,10 @@ ORDER BY 1,2 -> Sort Output: (time_bucket('@ 1 min'::interval, hyper1d."time")), hyper1d.device, hyper1d.temp Sort Key: (time_bucket('@ 1 min'::interval, hyper1d."time")), hyper1d.device - -> Hash Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, hyper1d."time"), hyper1d.device, hyper1d.temp Inner Unique: true - Hash Cond: (hyper1d.device = top_n.device) + Join Filter: (hyper1d.device = top_n.device) -> Custom Scan (AsyncAppend) Output: hyper1d."time", hyper1d.device, hyper1d.temp -> Append @@ -5812,7 +5821,7 @@ ORDER BY 1,2 Data node: db_dist_query_3 Chunks: _dist_hyper_2_19_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + -> Materialize Output: top_n.device -> Subquery Scan on top_n Output: top_n.device @@ -5821,12 +5830,13 @@ ORDER BY 1,2 -> Sort Output: device, (avg(temp)) Sort Key: (avg(temp)) DESC - -> Finalize HashAggregate + -> Finalize GroupAggregate Output: device, avg(temp) Group Key: device -> Custom Scan (AsyncAppend) Output: device, (PARTIAL avg(temp)) - -> Append + -> Merge Append + Sort Key: hyper1d_4.device -> Custom Scan (DataNodeScan) Output: hyper1d_4.device, (PARTIAL avg(hyper1d_4.temp)) Relations: Aggregate on (public.hyper1d) @@ -5845,7 +5855,7 @@ ORDER BY 1,2 Data node: db_dist_query_3 Chunks: _dist_hyper_2_19_chunk Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST -(61 rows) +(62 rows) ######### CTEs/Sub-queries @@ -5857,41 +5867,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: reference diff --git a/tsl/test/expected/dist_query-14.out b/tsl/test/expected/dist_query-14.out index e06eef36b..383a9f122 100644 --- a/tsl/test/expected/dist_query-14.out +++ b/tsl/test/expected/dist_query-14.out @@ -194,35 +194,36 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: "time", avg(temp) Group Key: "time" - -> Custom Scan (AsyncAppend) + -> Sort Output: "time", (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper."time", (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Partial GroupAggregate - Output: hyper_2."time", PARTIAL avg(hyper_2.temp) - Group Key: hyper_2."time" - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.temp + Sort Key: "time" + -> Custom Scan (AsyncAppend) + Output: "time", (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper."time", (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST -(26 rows) + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time only (partial aggregation) @@ -234,35 +235,36 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")) - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Partial GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), PARTIAL avg(hyper_2.temp) - Group Key: time_bucket('@ 2 days'::interval, hyper_2."time") - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.temp + Sort Key: (time_bucket('@ 2 days'::interval, "time")) + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST -(26 rows) + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -274,8 +276,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -291,15 +293,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### Grouping on time and device (full aggregation) @@ -311,8 +311,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: (time_bucket('@ 2 days'::interval, "time")), device, (avg(temp)) -> Append @@ -328,15 +328,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### Grouping on time and device (full aggregation) @@ -348,8 +346,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Custom Scan (AsyncAppend) Output: (date_trunc('month'::text, "time")), device, (avg(temp)) -> Append @@ -365,15 +363,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT date_trunc('month'::text, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: (date_trunc('month'::text, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: date_trunc('month'::text, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: date_trunc('month'::text, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY date_trunc('month'::text, "time") ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### Grouping on time and device (full aggregation) @@ -386,8 +382,8 @@ GROUP BY 1,2 HAVING device > 4 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Custom Scan (AsyncAppend) Output: (time_bucket('@ 2 days'::interval, "time")), device, (avg(temp)) -> Append @@ -403,15 +399,13 @@ HAVING device > 4 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 +(21 rows) EXPLAIN (verbose, costs off) SELECT time_bucket('2 days', time) AS time, device, avg(temp) @@ -456,8 +450,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Custom Scan (AsyncAppend) Output: device, (avg(temp)) -> Append @@ -473,15 +467,13 @@ GROUP BY 1 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> GroupAggregate - Output: hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 +(21 rows) ######### No push down on some functions @@ -532,30 +524,34 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - HashAggregate - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp), sum((hyper.temp * (((random() <= '1'::double precision))::integer)::double precision)) - Group Key: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device - -> Custom Scan (AsyncAppend) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: time_bucket('@ 2 days'::interval, hyper_3."time"), hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(21 rows) + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Append + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp), sum((hyper.temp * (((random() <= '1'::double precision))::integer)::double precision)) + Group Key: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device, hyper.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), hyper_1.device, avg(hyper_1.temp), sum((hyper_1.temp * (((random() <= '1'::double precision))::integer)::double precision)) + Group Key: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp), sum((hyper_2.temp * (((random() <= '1'::double precision))::integer)::double precision)) + Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(25 rows) ######### No push down on some functions @@ -570,29 +566,37 @@ HAVING avg(temp) * custom_sum(device) > 0.8 QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - HashAggregate - Output: hyper."time", hyper.device, avg(hyper.temp) - Group Key: hyper."time", hyper.device - Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp + Custom Scan (AsyncAppend) + Output: "time", device, (avg(temp)) + -> Append + -> GroupAggregate + Output: hyper."time", hyper.device, avg(hyper.temp) + Group Key: hyper."time", hyper.device + Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.device, hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp + -> GroupAggregate + Output: hyper_1."time", hyper_1.device, avg(hyper_1.temp) + Group Key: hyper_1."time", hyper_1.device + Filter: ((avg(hyper_1.temp) * (custom_sum(hyper_1.device))::double precision) > '0.8'::double precision) + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp + -> GroupAggregate + Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) + Group Key: hyper_2."time", hyper_2.device + Filter: ((avg(hyper_2.temp) * (custom_sum(hyper_2.device))::double precision) > '0.8'::double precision) + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(22 rows) +(30 rows) ######### No push down on some functions @@ -606,28 +610,34 @@ GROUP BY 1,2 QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - HashAggregate - Output: hyper."time", hyper.device, avg(hyper.temp), custom_sum(hyper.device) - Group Key: hyper."time", hyper.device - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp + Custom Scan (AsyncAppend) + Output: "time", device, (avg(temp)), (custom_sum(device)) + -> Append + -> GroupAggregate + Output: hyper."time", hyper.device, avg(hyper.temp), custom_sum(hyper.device) + Group Key: hyper."time", hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.device, hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp + -> GroupAggregate + Output: hyper_1."time", hyper_1.device, avg(hyper_1.temp), custom_sum(hyper_1.device) + Group Key: hyper_1."time", hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp + -> GroupAggregate + Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp), custom_sum(hyper_2.device) + Group Key: hyper_2."time", hyper_2.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(21 rows) +(27 rows) ######### Constification and runtime push down of time-related functions @@ -637,8 +647,8 @@ GROUP BY 1,2 (1 row) - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -654,18 +664,16 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -681,23 +689,21 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) tsl_override_current_timestamptz ---------------------------------- (1 row) - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -713,15 +719,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### LIMIT push down cases @@ -1040,69 +1044,65 @@ FROM hyper INNER JOIN top_n USING (device) WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Sort - Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp - Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join - Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp - Inner Unique: true - Hash Cond: (hyper.device = top_n.device) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + Group Key: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device + -> Nested Loop + Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp + Inner Unique: true + Join Filter: (hyper.device = top_n.device) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp, time_bucket('@ 1 min'::interval, hyper_1."time") + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp, time_bucket('@ 1 min'::interval, hyper_2."time") + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_3 + Output: hyper_3."time", hyper_3.device, hyper_3.temp, time_bucket('@ 1 min'::interval, hyper_3."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize + Output: top_n.device + -> Subquery Scan on top_n Output: top_n.device - -> Subquery Scan on top_n - Output: top_n.device - -> Limit + -> Limit + Output: device, (avg(temp)) + -> Sort Output: device, (avg(temp)) - -> Sort + Sort Key: (avg(temp)) DESC + -> Custom Scan (AsyncAppend) Output: device, (avg(temp)) - Sort Key: (avg(temp)) DESC - -> Custom Scan (AsyncAppend) - Output: device, (avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper_4.device, (avg(hyper_4.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: hyper_5.device, (avg(hyper_5.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> GroupAggregate - Output: hyper_6.device, avg(hyper_6.temp) - Group Key: hyper_6.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_6 - Output: hyper_6.device, hyper_6.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(60 rows) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper_4.device, (avg(hyper_4.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_5.device, (avg(hyper_5.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_6.device, (avg(hyper_6.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(56 rows) ######### CTEs/Sub-queries @@ -1114,41 +1114,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -1269,15 +1268,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### Grouping on time and device (full aggregation) @@ -1307,15 +1304,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### Grouping on time and device (full aggregation) @@ -1345,15 +1340,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT date_trunc('month'::text, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY date_trunc('month'::text, "time") ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: (date_trunc('month'::text, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: date_trunc('month'::text, hyper_2."time"), hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: date_trunc('month'::text, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY date_trunc('month'::text, "time") ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY date_trunc('month'::text, "time") ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### Grouping on time and device (full aggregation) @@ -1548,34 +1541,32 @@ GROUP BY 1,2 HAVING avg(temp) * custom_sum(device) > 0.8 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ - Sort - Output: hyper."time", hyper.device, (avg(hyper.temp)) - Sort Key: hyper."time", hyper.device - -> HashAggregate - Output: hyper."time", hyper.device, avg(hyper.temp) - Group Key: hyper."time", hyper.device - Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(25 rows) + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + GroupAggregate + Output: hyper."time", hyper.device, avg(hyper.temp) + Group Key: hyper."time", hyper.device + Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: hyper_1."time", hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_3 + Output: hyper_3."time", hyper_3.device, hyper_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(23 rows) ######### No push down on some functions @@ -1639,15 +1630,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ @@ -1667,15 +1656,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) tsl_override_current_timestamptz ---------------------------------- @@ -1700,15 +1687,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### LIMIT push down cases @@ -2035,69 +2020,65 @@ FROM hyper INNER JOIN top_n USING (device) WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Sort - Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp - Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join - Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp - Inner Unique: true - Hash Cond: (hyper.device = top_n.device) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + Group Key: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device + -> Nested Loop + Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp + Inner Unique: true + Join Filter: (hyper.device = top_n.device) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp, time_bucket('@ 1 min'::interval, hyper_1."time") + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp, time_bucket('@ 1 min'::interval, hyper_2."time") + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_3 + Output: hyper_3."time", hyper_3.device, hyper_3.temp, time_bucket('@ 1 min'::interval, hyper_3."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize + Output: top_n.device + -> Subquery Scan on top_n Output: top_n.device - -> Subquery Scan on top_n - Output: top_n.device - -> Limit + -> Limit + Output: device, (avg(temp)) + -> Sort Output: device, (avg(temp)) - -> Sort + Sort Key: (avg(temp)) DESC + -> Custom Scan (AsyncAppend) Output: device, (avg(temp)) - Sort Key: (avg(temp)) DESC - -> Custom Scan (AsyncAppend) - Output: device, (avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper_4.device, (avg(hyper_4.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: hyper_5.device, (avg(hyper_5.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> GroupAggregate - Output: hyper_6.device, avg(hyper_6.temp) - Group Key: hyper_6.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_6 - Output: hyper_6.device, hyper_6.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(60 rows) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper_4.device, (avg(hyper_4.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_5.device, (avg(hyper_5.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_6.device, (avg(hyper_6.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(56 rows) ######### CTEs/Sub-queries @@ -2109,41 +2090,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -2816,41 +2796,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -2875,33 +2854,40 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 ORDER BY 1 LIMIT 10 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit - Output: hyper."time", (avg(hyper.temp)) - -> GroupAggregate - Output: hyper."time", avg(hyper.temp) - Group Key: hyper."time" + Output: "time", (avg(temp)) + -> Finalize GroupAggregate + Output: "time", avg(temp) + Group Key: "time" -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.temp + Output: "time", (PARTIAL avg(temp)) -> Merge Append - Sort Key: hyper_1."time" - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.temp + Sort Key: hyper."time" + -> Partial GroupAggregate + Output: hyper."time", PARTIAL avg(hyper.temp) + Group Key: hyper."time" + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST + -> Partial GroupAggregate + Output: hyper_1."time", PARTIAL avg(hyper_1.temp) + Group Key: hyper_1."time" + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST -(24 rows) + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY "time" ASC NULLS LAST +(31 rows) ######### Grouping on time only (partial aggregation) @@ -2913,33 +2899,38 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 ORDER BY 1 LIMIT 10 - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit Output: (time_bucket('@ 2 days'::interval, hyper."time")), (avg(hyper.temp)) - -> GroupAggregate + -> Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, hyper."time")), avg(hyper.temp) Group Key: (time_bucket('@ 2 days'::interval, hyper."time")) - -> Custom Scan (AsyncAppend) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.temp - -> Merge Append - Sort Key: (time_bucket('@ 2 days'::interval, hyper_1."time")) - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.temp + -> Merge Append + Sort Key: (time_bucket('@ 2 days'::interval, hyper."time")) + -> Partial GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper."time")), PARTIAL avg(hyper.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper."time") + -> Custom Scan (DataNodeScan) on public.hyper + Output: time_bucket('@ 2 days'::interval, hyper."time"), hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.temp + -> Partial GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), PARTIAL avg(hyper_1.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper_1."time") + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: time_bucket('@ 2 days'::interval, hyper_3."time"), hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST +(29 rows) ######### Grouping on time and device (full aggregation) @@ -3066,33 +3057,35 @@ GROUP BY 1,2 HAVING device > 4 ORDER BY 1,2 LIMIT 10 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, (avg(hyper.temp)) - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device - -> Custom Scan (AsyncAppend) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, hyper.temp - -> Merge Append - Sort Key: (time_bucket('@ 2 days'::interval, hyper_1."time")), hyper_1.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: time_bucket('@ 2 days'::interval, hyper_3."time"), hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Merge Append + Sort Key: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device, hyper.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), hyper_1.device, avg(hyper_1.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(26 rows) EXPLAIN (verbose, costs off) SELECT time_bucket('2 days', time) AS time, device, avg(temp) @@ -3140,33 +3133,37 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 ORDER BY 1 LIMIT 10 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit - Output: hyper.device, (avg(hyper.temp)) - -> GroupAggregate - Output: hyper.device, avg(hyper.temp) - Group Key: hyper.device - -> Custom Scan (AsyncAppend) - Output: hyper.device, hyper.temp - -> Merge Append - Sort Key: hyper_1.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1.device, hyper_1.temp + Output: device, (avg(temp)) + -> Custom Scan (AsyncAppend) + Output: device, (avg(temp)) + -> Merge Append + Sort Key: hyper.device + -> GroupAggregate + Output: hyper.device, avg(hyper.temp) + Group Key: hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper.device, hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2.device, hyper_2.temp + -> GroupAggregate + Output: hyper_1.device, avg(hyper_1.temp) + Group Key: hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1.device, hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(28 rows) ######### No push down on some functions @@ -3747,69 +3744,65 @@ FROM hyper INNER JOIN top_n USING (device) WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Sort - Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp - Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join - Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp - Inner Unique: true - Hash Cond: (hyper.device = top_n.device) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + Group Key: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device + -> Nested Loop + Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp + Inner Unique: true + Join Filter: (hyper.device = top_n.device) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")), hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp, time_bucket('@ 1 min'::interval, hyper_1."time") + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp, time_bucket('@ 1 min'::interval, hyper_2."time") + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_3 + Output: hyper_3."time", hyper_3.device, hyper_3.temp, time_bucket('@ 1 min'::interval, hyper_3."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize + Output: top_n.device + -> Subquery Scan on top_n Output: top_n.device - -> Subquery Scan on top_n - Output: top_n.device - -> Limit + -> Limit + Output: device, (avg(temp)) + -> Sort Output: device, (avg(temp)) - -> Sort + Sort Key: (avg(temp)) DESC + -> Custom Scan (AsyncAppend) Output: device, (avg(temp)) - Sort Key: (avg(temp)) DESC - -> Custom Scan (AsyncAppend) - Output: device, (avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper_4.device, (avg(hyper_4.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: hyper_5.device, (avg(hyper_5.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> GroupAggregate - Output: hyper_6.device, avg(hyper_6.temp) - Group Key: hyper_6.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_6 - Output: hyper_6.device, hyper_6.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(60 rows) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper_4.device, (avg(hyper_4.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_5.device, (avg(hyper_5.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_6.device, (avg(hyper_6.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(56 rows) ######### CTEs/Sub-queries @@ -3821,41 +3814,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -3880,33 +3872,36 @@ WHERE time >= '2019-01-01' GROUP BY 1 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: "time", avg(temp) Group Key: "time" - -> Custom Scan (AsyncAppend) + -> Sort Output: "time", (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper."time", (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 -(24 rows) + Sort Key: "time" + -> Custom Scan (AsyncAppend) + Output: "time", (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper."time", (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time only (partial aggregation) @@ -3918,33 +3913,36 @@ WHERE time >= '2019-01-01' GROUP BY 1 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")) - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 -(24 rows) + Sort Key: (time_bucket('@ 2 days'::interval, "time")) + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -4148,33 +4146,36 @@ WHERE time >= '2019-01-01' GROUP BY 1 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: device, avg(temp) Group Key: device - -> Custom Scan (AsyncAppend) + -> Sort Output: device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper.device, (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk - Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_1.device, (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk - Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_2.device, (PARTIAL avg(hyper_2.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk - Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 -(24 rows) + Sort Key: device + -> Custom Scan (AsyncAppend) + Output: device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper.device, (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk + Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_1.device, (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk + Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_2.device, (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk + Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### No push down on some functions @@ -4779,10 +4780,10 @@ ORDER BY 1,2 -> Sort Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp Inner Unique: true - Hash Cond: (hyper.device = top_n.device) + Join Filter: (hyper.device = top_n.device) -> Custom Scan (AsyncAppend) Output: hyper."time", hyper.device, hyper.temp -> Append @@ -4801,7 +4802,7 @@ ORDER BY 1,2 Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + -> Materialize Output: top_n.device -> Subquery Scan on top_n Output: top_n.device @@ -4847,41 +4848,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper1d @@ -5016,33 +5016,36 @@ WHERE time >= '2019-01-01' GROUP BY 1,2 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), device, avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")), device - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_1 - Chunks: _dist_hyper_2_20_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_2 - Chunks: _dist_hyper_2_21_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 -(24 rows) + Sort Key: (time_bucket('@ 2 days'::interval, "time")), device + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_1 + Chunks: _dist_hyper_2_20_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_2 + Chunks: _dist_hyper_2_21_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -5054,33 +5057,36 @@ WHERE time >= '2019-01-01' GROUP BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: (date_trunc('month'::text, "time")), device, avg(temp) Group Key: (date_trunc('month'::text, "time")), device - -> Custom Scan (AsyncAppend) + -> Sort Output: (date_trunc('month'::text, "time")), device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (date_trunc('month'::text, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_1 - Chunks: _dist_hyper_2_20_chunk - Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (date_trunc('month'::text, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_2 - Chunks: _dist_hyper_2_21_chunk - Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (date_trunc('month'::text, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 -(24 rows) + Sort Key: (date_trunc('month'::text, "time")), device + -> Custom Scan (AsyncAppend) + Output: (date_trunc('month'::text, "time")), device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_1 + Chunks: _dist_hyper_2_20_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_2 + Chunks: _dist_hyper_2_21_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -5093,33 +5099,36 @@ GROUP BY 1,2 HAVING device > 4 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), device, avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")), device - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_1 - Chunks: _dist_hyper_2_20_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_2 - Chunks: _dist_hyper_2_21_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 -(24 rows) + Sort Key: (time_bucket('@ 2 days'::interval, "time")), device + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_1 + Chunks: _dist_hyper_2_20_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_2 + Chunks: _dist_hyper_2_21_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 +(27 rows) EXPLAIN (verbose, costs off) SELECT time_bucket('2 days', time) AS time, device, avg(temp) @@ -5790,10 +5799,10 @@ ORDER BY 1,2 -> Sort Output: (time_bucket('@ 1 min'::interval, hyper1d."time")), hyper1d.device, hyper1d.temp Sort Key: (time_bucket('@ 1 min'::interval, hyper1d."time")), hyper1d.device - -> Hash Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, hyper1d."time"), hyper1d.device, hyper1d.temp Inner Unique: true - Hash Cond: (hyper1d.device = top_n.device) + Join Filter: (hyper1d.device = top_n.device) -> Custom Scan (AsyncAppend) Output: hyper1d."time", hyper1d.device, hyper1d.temp -> Append @@ -5812,7 +5821,7 @@ ORDER BY 1,2 Data node: db_dist_query_3 Chunks: _dist_hyper_2_19_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + -> Materialize Output: top_n.device -> Subquery Scan on top_n Output: top_n.device @@ -5821,12 +5830,13 @@ ORDER BY 1,2 -> Sort Output: device, (avg(temp)) Sort Key: (avg(temp)) DESC - -> Finalize HashAggregate + -> Finalize GroupAggregate Output: device, avg(temp) Group Key: device -> Custom Scan (AsyncAppend) Output: device, (PARTIAL avg(temp)) - -> Append + -> Merge Append + Sort Key: hyper1d_4.device -> Custom Scan (DataNodeScan) Output: hyper1d_4.device, (PARTIAL avg(hyper1d_4.temp)) Relations: Aggregate on (public.hyper1d) @@ -5845,7 +5855,7 @@ ORDER BY 1,2 Data node: db_dist_query_3 Chunks: _dist_hyper_2_19_chunk Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST -(61 rows) +(62 rows) ######### CTEs/Sub-queries @@ -5857,41 +5867,40 @@ WHERE h1.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' AND h2.time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) - Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Custom Scan (DataNodeScan) on public.hyper h1_1 - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_2 - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper h1_3 - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(32 rows) + Output: h1."time", h1.device, h1.temp + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append + -> Custom Scan (DataNodeScan) on public.hyper h1_1 + Output: h1_1."time", h1_1.device, h1_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_2 + Output: h1_2."time", h1_2.device, h1_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper h1_3 + Output: h1_3."time", h1_3.device, h1_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: reference diff --git a/tsl/test/expected/dist_query-15.out b/tsl/test/expected/dist_query-15.out index 8dcb37d65..392d9e96f 100644 --- a/tsl/test/expected/dist_query-15.out +++ b/tsl/test/expected/dist_query-15.out @@ -194,35 +194,36 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: "time", avg(temp) Group Key: "time" - -> Custom Scan (AsyncAppend) + -> Sort Output: "time", (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper."time", (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Partial GroupAggregate - Output: hyper_2."time", PARTIAL avg(hyper_2.temp) - Group Key: hyper_2."time" - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.temp + Sort Key: "time" + -> Custom Scan (AsyncAppend) + Output: "time", (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper."time", (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST -(26 rows) + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time only (partial aggregation) @@ -234,33 +235,36 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")) - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 -(24 rows) + Sort Key: (time_bucket('@ 2 days'::interval, "time")) + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -272,8 +276,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -289,15 +293,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### Grouping on time and device (full aggregation) @@ -380,8 +382,8 @@ GROUP BY 1,2 HAVING device > 4 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Custom Scan (AsyncAppend) Output: (time_bucket('@ 2 days'::interval, "time")), device, (avg(temp)) -> Append @@ -397,17 +399,13 @@ HAVING device > 4 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp) - Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device - -> Result - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(25 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 +(21 rows) EXPLAIN (verbose, costs off) SELECT time_bucket('2 days', time) AS time, device, avg(temp) @@ -452,8 +450,8 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Custom Scan (AsyncAppend) Output: device, (avg(temp)) -> Append @@ -469,15 +467,13 @@ GROUP BY 1 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> GroupAggregate - Output: hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 +(21 rows) ######### No push down on some functions @@ -528,36 +524,40 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1,2 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - HashAggregate - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp), sum((hyper.temp * (((random() <= '1'::double precision))::integer)::double precision)) - Group Key: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device - -> Custom Scan (AsyncAppend) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, hyper.temp - -> Append - -> Result - Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: time_bucket('@ 2 days'::interval, hyper_3."time"), hyper_3.device, hyper_3.temp - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(27 rows) + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Append + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp), sum((hyper.temp * (((random() <= '1'::double precision))::integer)::double precision)) + Group Key: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device + -> Result + Output: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device, hyper.temp + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.device, hyper.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), hyper_1.device, avg(hyper_1.temp), sum((hyper_1.temp * (((random() <= '1'::double precision))::integer)::double precision)) + Group Key: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device + -> Result + Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, avg(hyper_2.temp), sum((hyper_2.temp * (((random() <= '1'::double precision))::integer)::double precision)) + Group Key: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device + -> Result + Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST +(31 rows) ######### No push down on some functions @@ -572,29 +572,37 @@ HAVING avg(temp) * custom_sum(device) > 0.8 QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - HashAggregate - Output: hyper."time", hyper.device, avg(hyper.temp) - Group Key: hyper."time", hyper.device - Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp + Custom Scan (AsyncAppend) + Output: "time", device, (avg(temp)) + -> Append + -> GroupAggregate + Output: hyper."time", hyper.device, avg(hyper.temp) + Group Key: hyper."time", hyper.device + Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.device, hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp + -> GroupAggregate + Output: hyper_1."time", hyper_1.device, avg(hyper_1.temp) + Group Key: hyper_1."time", hyper_1.device + Filter: ((avg(hyper_1.temp) * (custom_sum(hyper_1.device))::double precision) > '0.8'::double precision) + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp + -> GroupAggregate + Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) + Group Key: hyper_2."time", hyper_2.device + Filter: ((avg(hyper_2.temp) * (custom_sum(hyper_2.device))::double precision) > '0.8'::double precision) + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(22 rows) +(30 rows) ######### No push down on some functions @@ -608,28 +616,34 @@ GROUP BY 1,2 QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - HashAggregate - Output: hyper."time", hyper.device, avg(hyper.temp), custom_sum(hyper.device) - Group Key: hyper."time", hyper.device - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp + Custom Scan (AsyncAppend) + Output: "time", device, (avg(temp)), (custom_sum(device)) + -> Append + -> GroupAggregate + Output: hyper."time", hyper.device, avg(hyper.temp), custom_sum(hyper.device) + Group Key: hyper."time", hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.device, hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp + -> GroupAggregate + Output: hyper_1."time", hyper_1.device, avg(hyper_1.temp), custom_sum(hyper_1.device) + Group Key: hyper_1."time", hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp + -> GroupAggregate + Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp), custom_sum(hyper_2.device) + Group Key: hyper_2."time", hyper_2.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(21 rows) +(27 rows) ######### Constification and runtime push down of time-related functions @@ -639,8 +653,8 @@ GROUP BY 1,2 (1 row) - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -656,18 +670,16 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -683,23 +695,21 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) tsl_override_current_timestamptz ---------------------------------- (1 row) - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Custom Scan (AsyncAppend) Output: "time", device, (avg(temp)) -> Append @@ -715,15 +725,13 @@ GROUP BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(23 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(21 rows) ######### LIMIT push down cases @@ -1052,65 +1060,67 @@ ORDER BY 1,2 ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Sort - Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp - Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join - Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp - Inner Unique: true - Hash Cond: (hyper.device = top_n.device) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append + Group Key: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device + -> Nested Loop + Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp + Inner Unique: true + Join Filter: (hyper.device = top_n.device) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")), hyper_1.device + -> Result + Output: hyper_1."time", hyper_1.device, hyper_1.temp, time_bucket('@ 1 min'::interval, hyper_1."time") -> Custom Scan (DataNodeScan) on public.hyper hyper_1 Output: hyper_1."time", hyper_1.device, hyper_1.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Result + Output: hyper_2."time", hyper_2.device, hyper_2.temp, time_bucket('@ 1 min'::interval, hyper_2."time") -> Custom Scan (DataNodeScan) on public.hyper hyper_2 Output: hyper_2."time", hyper_2.device, hyper_2.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Result + Output: hyper_3."time", hyper_3.device, hyper_3.temp, time_bucket('@ 1 min'::interval, hyper_3."time") -> Custom Scan (DataNodeScan) on public.hyper hyper_3 Output: hyper_3."time", hyper_3.device, hyper_3.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + -> Materialize + Output: top_n.device + -> Subquery Scan on top_n Output: top_n.device - -> Subquery Scan on top_n - Output: top_n.device - -> Limit + -> Limit + Output: device, (avg(temp)) + -> Sort Output: device, (avg(temp)) - -> Sort + Sort Key: (avg(temp)) DESC + -> Custom Scan (AsyncAppend) Output: device, (avg(temp)) - Sort Key: (avg(temp)) DESC - -> Custom Scan (AsyncAppend) - Output: device, (avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper_4.device, (avg(hyper_4.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: hyper_5.device, (avg(hyper_5.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> GroupAggregate - Output: hyper_6.device, avg(hyper_6.temp) - Group Key: hyper_6.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_6 - Output: hyper_6.device, hyper_6.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(60 rows) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper_4.device, (avg(hyper_4.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_5.device, (avg(hyper_5.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_6.device, (avg(hyper_6.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(62 rows) ######### CTEs/Sub-queries @@ -1127,44 +1137,37 @@ ORDER BY 1,2 GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Result + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Result - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append -> Custom Scan (DataNodeScan) on public.hyper h1_1 Output: h1_1."time", h1_1.device, h1_1.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") -> Custom Scan (DataNodeScan) on public.hyper h1_2 Output: h1_2."time", h1_2.device, h1_2.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") -> Custom Scan (DataNodeScan) on public.hyper h1_3 Output: h1_3."time", h1_3.device, h1_3.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Result - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(40 rows) +(33 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -1285,15 +1288,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### Grouping on time and device (full aggregation) @@ -1566,34 +1567,32 @@ GROUP BY 1,2 HAVING avg(temp) * custom_sum(device) > 0.8 ORDER BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ - Sort - Output: hyper."time", hyper.device, (avg(hyper.temp)) - Sort Key: hyper."time", hyper.device - -> HashAggregate - Output: hyper."time", hyper.device, avg(hyper.temp) - Group Key: hyper."time", hyper.device - Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(25 rows) + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + GroupAggregate + Output: hyper."time", hyper.device, avg(hyper.temp) + Group Key: hyper."time", hyper.device + Filter: ((avg(hyper.temp) * (custom_sum(hyper.device))::double precision) > '0.8'::double precision) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: hyper_1."time", hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_2 + Output: hyper_2."time", hyper_2.device, hyper_2.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) on public.hyper hyper_3 + Output: hyper_3."time", hyper_3.device, hyper_3.temp + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(23 rows) ######### No push down on some functions @@ -1657,15 +1656,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ @@ -1685,15 +1682,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) tsl_override_current_timestamptz ---------------------------------- @@ -1718,15 +1713,13 @@ ORDER BY 1,2 Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST - -> GroupAggregate - Output: hyper_2."time", hyper_2.device, avg(hyper_2.temp) - Group Key: hyper_2."time", hyper_2.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT "time", device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 ORDER BY "time" ASC NULLS LAST, device ASC NULLS LAST +(22 rows) ######### LIMIT push down cases @@ -2063,65 +2056,67 @@ ORDER BY 1,2 ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Sort - Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp - Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join - Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp - Inner Unique: true - Hash Cond: (hyper.device = top_n.device) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append + Group Key: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device + -> Nested Loop + Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp + Inner Unique: true + Join Filter: (hyper.device = top_n.device) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")), hyper_1.device + -> Result + Output: hyper_1."time", hyper_1.device, hyper_1.temp, time_bucket('@ 1 min'::interval, hyper_1."time") -> Custom Scan (DataNodeScan) on public.hyper hyper_1 Output: hyper_1."time", hyper_1.device, hyper_1.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Result + Output: hyper_2."time", hyper_2.device, hyper_2.temp, time_bucket('@ 1 min'::interval, hyper_2."time") -> Custom Scan (DataNodeScan) on public.hyper hyper_2 Output: hyper_2."time", hyper_2.device, hyper_2.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Result + Output: hyper_3."time", hyper_3.device, hyper_3.temp, time_bucket('@ 1 min'::interval, hyper_3."time") -> Custom Scan (DataNodeScan) on public.hyper hyper_3 Output: hyper_3."time", hyper_3.device, hyper_3.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + -> Materialize + Output: top_n.device + -> Subquery Scan on top_n Output: top_n.device - -> Subquery Scan on top_n - Output: top_n.device - -> Limit + -> Limit + Output: device, (avg(temp)) + -> Sort Output: device, (avg(temp)) - -> Sort + Sort Key: (avg(temp)) DESC + -> Custom Scan (AsyncAppend) Output: device, (avg(temp)) - Sort Key: (avg(temp)) DESC - -> Custom Scan (AsyncAppend) - Output: device, (avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper_4.device, (avg(hyper_4.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: hyper_5.device, (avg(hyper_5.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> GroupAggregate - Output: hyper_6.device, avg(hyper_6.temp) - Group Key: hyper_6.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_6 - Output: hyper_6.device, hyper_6.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(60 rows) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper_4.device, (avg(hyper_4.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_5.device, (avg(hyper_5.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_6.device, (avg(hyper_6.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(62 rows) ######### CTEs/Sub-queries @@ -2138,44 +2133,37 @@ ORDER BY 1,2 GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Result + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Result - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append -> Custom Scan (DataNodeScan) on public.hyper h1_1 Output: h1_1."time", h1_1.device, h1_1.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") -> Custom Scan (DataNodeScan) on public.hyper h1_2 Output: h1_2."time", h1_2.device, h1_2.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") -> Custom Scan (DataNodeScan) on public.hyper h1_3 Output: h1_3."time", h1_3.device, h1_3.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Result - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(40 rows) +(33 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -2863,44 +2851,37 @@ ORDER BY 1,2 GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Result + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Result - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append -> Custom Scan (DataNodeScan) on public.hyper h1_1 Output: h1_1."time", h1_1.device, h1_1.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") -> Custom Scan (DataNodeScan) on public.hyper h1_2 Output: h1_2."time", h1_2.device, h1_2.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") -> Custom Scan (DataNodeScan) on public.hyper h1_3 Output: h1_3."time", h1_3.device, h1_3.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Result - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(40 rows) +(33 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -2925,33 +2906,40 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 ORDER BY 1 LIMIT 10 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit - Output: hyper."time", (avg(hyper.temp)) - -> GroupAggregate - Output: hyper."time", avg(hyper.temp) - Group Key: hyper."time" + Output: "time", (avg(temp)) + -> Finalize GroupAggregate + Output: "time", avg(temp) + Group Key: "time" -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.temp + Output: "time", (PARTIAL avg(temp)) -> Merge Append - Sort Key: hyper_1."time" - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.temp + Sort Key: hyper."time" + -> Partial GroupAggregate + Output: hyper."time", PARTIAL avg(hyper.temp) + Group Key: hyper."time" + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST + -> Partial GroupAggregate + Output: hyper_1."time", PARTIAL avg(hyper_1.temp) + Group Key: hyper_1."time" + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY "time" ASC NULLS LAST -(24 rows) + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY "time" ASC NULLS LAST +(31 rows) ######### Grouping on time only (partial aggregation) @@ -2963,39 +2951,42 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 ORDER BY 1 LIMIT 10 - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit Output: (time_bucket('@ 2 days'::interval, hyper."time")), (avg(hyper.temp)) - -> GroupAggregate + -> Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, hyper."time")), avg(hyper.temp) Group Key: (time_bucket('@ 2 days'::interval, hyper."time")) - -> Custom Scan (AsyncAppend) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.temp - -> Merge Append - Sort Key: (time_bucket('@ 2 days'::interval, hyper_1."time")) + -> Merge Append + Sort Key: (time_bucket('@ 2 days'::interval, hyper."time")) + -> Partial GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper."time")), PARTIAL avg(hyper.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper."time") + -> Result + Output: time_bucket('@ 2 days'::interval, hyper."time"), hyper.temp + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST + -> Partial GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), PARTIAL avg(hyper_1.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper_1."time") -> Result Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.temp -> Custom Scan (DataNodeScan) on public.hyper hyper_1 Output: hyper_1."time", hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST - -> Result - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.temp - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST - -> Result - Output: time_bucket('@ 2 days'::interval, hyper_3."time"), hyper_3.temp - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST -(30 rows) + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST +(33 rows) ######### Grouping on time and device (full aggregation) @@ -3134,38 +3125,38 @@ GROUP BY 1,2 HAVING device > 4 ORDER BY 1,2 LIMIT 10 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, (avg(hyper.temp)) - -> GroupAggregate - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device - -> Custom Scan (AsyncAppend) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, hyper.temp - -> Merge Append - Sort Key: (time_bucket('@ 2 days'::interval, hyper_1."time")), hyper_1.device - -> Result - Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1."time", hyper_1.device, hyper_1.temp - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: time_bucket('@ 2 days'::interval, hyper_2."time"), hyper_2.device, hyper_2.temp - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2."time", hyper_2.device, hyper_2.temp - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: time_bucket('@ 2 days'::interval, hyper_3."time"), hyper_3.device, hyper_3.temp - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3."time", hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Merge Append + Sort Key: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper."time")), hyper.device, avg(hyper.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device + -> Result + Output: time_bucket('@ 2 days'::interval, hyper."time"), hyper.device, hyper.temp + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper."time", hyper.device, hyper.temp + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> GroupAggregate + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), hyper_1.device, avg(hyper_1.temp) + Group Key: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device + -> Result + Output: time_bucket('@ 2 days'::interval, hyper_1."time"), hyper_1.device, hyper_1.temp + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1."time", hyper_1.device, hyper_1.temp + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 ORDER BY public.time_bucket('2 days'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST (30 rows) EXPLAIN (verbose, costs off) @@ -3214,33 +3205,37 @@ WHERE time BETWEEN '2019-01-01' AND '2019-01-01 15:00' GROUP BY 1 ORDER BY 1 LIMIT 10 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit - Output: hyper.device, (avg(hyper.temp)) - -> GroupAggregate - Output: hyper.device, avg(hyper.temp) - Group Key: hyper.device - -> Custom Scan (AsyncAppend) - Output: hyper.device, hyper.temp - -> Merge Append - Sort Key: hyper_1.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_1 - Output: hyper_1.device, hyper_1.temp + Output: device, (avg(temp)) + -> Custom Scan (AsyncAppend) + Output: device, (avg(temp)) + -> Merge Append + Sort Key: hyper.device + -> GroupAggregate + Output: hyper.device, avg(hyper.temp) + Group Key: hyper.device + -> Custom Scan (DataNodeScan) on public.hyper + Output: hyper.device, hyper.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_2 - Output: hyper_2.device, hyper_2.temp + -> GroupAggregate + Output: hyper_1.device, avg(hyper_1.temp) + Group Key: hyper_1.device + -> Custom Scan (DataNodeScan) on public.hyper hyper_1 + Output: hyper_1.device, hyper_1.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) on public.hyper hyper_3 - Output: hyper_3.device, hyper_3.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(24 rows) + -> Custom Scan (DataNodeScan) + Output: hyper_2.device, (avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(28 rows) ######### No push down on some functions @@ -3837,65 +3832,67 @@ ORDER BY 1,2 ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- GroupAggregate Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, avg(hyper.temp) - Group Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Sort - Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp - Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join - Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp - Inner Unique: true - Hash Cond: (hyper.device = top_n.device) - -> Custom Scan (AsyncAppend) - Output: hyper."time", hyper.device, hyper.temp - -> Append + Group Key: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device + -> Nested Loop + Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp + Inner Unique: true + Join Filter: (hyper.device = top_n.device) + -> Custom Scan (AsyncAppend) + Output: hyper."time", hyper.device, hyper.temp + -> Merge Append + Sort Key: (time_bucket('@ 1 min'::interval, hyper_1."time")), hyper_1.device + -> Result + Output: hyper_1."time", hyper_1.device, hyper_1.temp, time_bucket('@ 1 min'::interval, hyper_1."time") -> Custom Scan (DataNodeScan) on public.hyper hyper_1 Output: hyper_1."time", hyper_1.device, hyper_1.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Result + Output: hyper_2."time", hyper_2.device, hyper_2.temp, time_bucket('@ 1 min'::interval, hyper_2."time") -> Custom Scan (DataNodeScan) on public.hyper hyper_2 Output: hyper_2."time", hyper_2.device, hyper_2.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Result + Output: hyper_3."time", hyper_3.device, hyper_3.temp, time_bucket('@ 1 min'::interval, hyper_3."time") -> Custom Scan (DataNodeScan) on public.hyper hyper_3 Output: hyper_3."time", hyper_3.device, hyper_3.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + -> Materialize + Output: top_n.device + -> Subquery Scan on top_n Output: top_n.device - -> Subquery Scan on top_n - Output: top_n.device - -> Limit + -> Limit + Output: device, (avg(temp)) + -> Sort Output: device, (avg(temp)) - -> Sort + Sort Key: (avg(temp)) DESC + -> Custom Scan (AsyncAppend) Output: device, (avg(temp)) - Sort Key: (avg(temp)) DESC - -> Custom Scan (AsyncAppend) - Output: device, (avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper_4.device, (avg(hyper_4.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> Custom Scan (DataNodeScan) - Output: hyper_5.device, (avg(hyper_5.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk - Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST - -> GroupAggregate - Output: hyper_6.device, avg(hyper_6.temp) - Group Key: hyper_6.device - -> Custom Scan (DataNodeScan) on public.hyper hyper_6 - Output: hyper_6.device, hyper_6.temp - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk - Remote SQL: SELECT device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY device ASC NULLS LAST -(60 rows) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper_4.device, (avg(hyper_4.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_5.device, (avg(hyper_5.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST + -> Custom Scan (DataNodeScan) + Output: hyper_6.device, (avg(hyper_6.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk + Remote SQL: SELECT device, avg(temp) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST +(62 rows) ######### CTEs/Sub-queries @@ -3912,44 +3909,37 @@ ORDER BY 1,2 GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Result + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Result - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append -> Custom Scan (DataNodeScan) on public.hyper h1_1 Output: h1_1."time", h1_1.device, h1_1.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") -> Custom Scan (DataNodeScan) on public.hyper h1_2 Output: h1_2."time", h1_2.device, h1_2.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") -> Custom Scan (DataNodeScan) on public.hyper h1_3 Output: h1_3."time", h1_3.device, h1_3.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Result - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(40 rows) +(33 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper @@ -3974,33 +3964,36 @@ WHERE time >= '2019-01-01' GROUP BY 1 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: "time", avg(temp) Group Key: "time" - -> Custom Scan (AsyncAppend) + -> Sort Output: "time", (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper."time", (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk - Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 -(24 rows) + Sort Key: "time" + -> Custom Scan (AsyncAppend) + Output: "time", (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper."time", (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_1."time", (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_2."time", (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk + Remote SQL: SELECT "time", _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time only (partial aggregation) @@ -4012,33 +4005,36 @@ WHERE time >= '2019-01-01' GROUP BY 1 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")) - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 -(24 rows) + Sort Key: (time_bucket('@ 2 days'::interval, "time")) + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper."time")), (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_1."time")), (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper_2."time")), (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -4242,33 +4238,36 @@ WHERE time >= '2019-01-01' GROUP BY 1 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: device, avg(temp) Group Key: device - -> Custom Scan (AsyncAppend) + -> Sort Output: device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: hyper.device, (PARTIAL avg(hyper.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_1 - Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk - Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_1.device, (PARTIAL avg(hyper_1.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_2 - Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk - Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 - -> Custom Scan (DataNodeScan) - Output: hyper_2.device, (PARTIAL avg(hyper_2.temp)) - Relations: Aggregate on (public.hyper) - Data node: db_dist_query_3 - Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk - Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 -(24 rows) + Sort Key: device + -> Custom Scan (AsyncAppend) + Output: device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: hyper.device, (PARTIAL avg(hyper.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_1 + Chunks: _dist_hyper_1_1_chunk, _dist_hyper_1_4_chunk, _dist_hyper_1_8_chunk, _dist_hyper_1_12_chunk, _dist_hyper_1_13_chunk, _dist_hyper_1_15_chunk, _dist_hyper_1_17_chunk + Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_1.device, (PARTIAL avg(hyper_1.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_2 + Chunks: _dist_hyper_1_2_chunk, _dist_hyper_1_5_chunk, _dist_hyper_1_7_chunk, _dist_hyper_1_9_chunk, _dist_hyper_1_11_chunk, _dist_hyper_1_14_chunk, _dist_hyper_1_18_chunk + Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4, 5, 6, 7]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 + -> Custom Scan (DataNodeScan) + Output: hyper_2.device, (PARTIAL avg(hyper_2.temp)) + Relations: Aggregate on (public.hyper) + Data node: db_dist_query_3 + Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk + Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 +(27 rows) ######### No push down on some functions @@ -4885,10 +4884,10 @@ ORDER BY 1,2 -> Sort Output: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device, hyper.temp Sort Key: (time_bucket('@ 1 min'::interval, hyper."time")), hyper.device - -> Hash Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, hyper."time"), hyper.device, hyper.temp Inner Unique: true - Hash Cond: (hyper.device = top_n.device) + Join Filter: (hyper.device = top_n.device) -> Custom Scan (AsyncAppend) Output: hyper."time", hyper.device, hyper.temp -> Append @@ -4907,7 +4906,7 @@ ORDER BY 1,2 Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk, _dist_hyper_1_6_chunk, _dist_hyper_1_10_chunk, _dist_hyper_1_16_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1, 2, 3, 4]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + -> Materialize Output: top_n.device -> Subquery Scan on top_n Output: top_n.device @@ -4958,44 +4957,37 @@ ORDER BY 1,2 GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Result + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Result - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append -> Custom Scan (DataNodeScan) on public.hyper h1_1 Output: h1_1."time", h1_1.device, h1_1.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") -> Custom Scan (DataNodeScan) on public.hyper h1_2 Output: h1_2."time", h1_2.device, h1_2.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") -> Custom Scan (DataNodeScan) on public.hyper h1_3 Output: h1_3."time", h1_3.device, h1_3.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Result - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(40 rows) +(33 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: hyper1d @@ -5130,33 +5122,36 @@ WHERE time >= '2019-01-01' GROUP BY 1,2 - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), device, avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")), device - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_1 - Chunks: _dist_hyper_2_20_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_2 - Chunks: _dist_hyper_2_21_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 -(24 rows) + Sort Key: (time_bucket('@ 2 days'::interval, "time")), device + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_1 + Chunks: _dist_hyper_2_20_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_2 + Chunks: _dist_hyper_2_21_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -5168,33 +5163,36 @@ WHERE time >= '2019-01-01' GROUP BY 1,2 - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Finalize GroupAggregate Output: (date_trunc('month'::text, "time")), device, avg(temp) Group Key: (date_trunc('month'::text, "time")), device - -> Custom Scan (AsyncAppend) + -> Sort Output: (date_trunc('month'::text, "time")), device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (date_trunc('month'::text, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_1 - Chunks: _dist_hyper_2_20_chunk - Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (date_trunc('month'::text, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_2 - Chunks: _dist_hyper_2_21_chunk - Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (date_trunc('month'::text, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 -(24 rows) + Sort Key: (date_trunc('month'::text, "time")), device + -> Custom Scan (AsyncAppend) + Output: (date_trunc('month'::text, "time")), device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_1 + Chunks: _dist_hyper_2_20_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_2 + Chunks: _dist_hyper_2_21_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (date_trunc('month'::text, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT date_trunc('month'::text, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1, 2 +(27 rows) ######### Grouping on time and device (full aggregation) @@ -5207,33 +5205,36 @@ GROUP BY 1,2 HAVING device > 4 - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Finalize HashAggregate + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Finalize GroupAggregate Output: (time_bucket('@ 2 days'::interval, "time")), device, avg(temp) Group Key: (time_bucket('@ 2 days'::interval, "time")), device - -> Custom Scan (AsyncAppend) + -> Sort Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) - -> Append - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_1 - Chunks: _dist_hyper_2_20_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_2 - Chunks: _dist_hyper_2_21_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 - -> Custom Scan (DataNodeScan) - Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) - Relations: Aggregate on (public.hyper1d) - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 -(24 rows) + Sort Key: (time_bucket('@ 2 days'::interval, "time")), device + -> Custom Scan (AsyncAppend) + Output: (time_bucket('@ 2 days'::interval, "time")), device, (PARTIAL avg(temp)) + -> Append + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d."time")), hyper1d.device, (PARTIAL avg(hyper1d.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_1 + Chunks: _dist_hyper_2_20_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_1."time")), hyper1d_1.device, (PARTIAL avg(hyper1d_1.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_2 + Chunks: _dist_hyper_2_21_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[8]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 + -> Custom Scan (DataNodeScan) + Output: (time_bucket('@ 2 days'::interval, hyper1d_2."time")), hyper1d_2.device, (PARTIAL avg(hyper1d_2.temp)) + Relations: Aggregate on (public.hyper1d) + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT public.time_bucket('@ 2 days'::interval, "time"), device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND ((device > 4)) GROUP BY 1, 2 +(27 rows) EXPLAIN (verbose, costs off) SELECT time_bucket('2 days', time) AS time, device, avg(temp) @@ -5916,10 +5917,10 @@ ORDER BY 1,2 -> Sort Output: (time_bucket('@ 1 min'::interval, hyper1d."time")), hyper1d.device, hyper1d.temp Sort Key: (time_bucket('@ 1 min'::interval, hyper1d."time")), hyper1d.device - -> Hash Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, hyper1d."time"), hyper1d.device, hyper1d.temp Inner Unique: true - Hash Cond: (hyper1d.device = top_n.device) + Join Filter: (hyper1d.device = top_n.device) -> Custom Scan (AsyncAppend) Output: hyper1d."time", hyper1d.device, hyper1d.temp -> Append @@ -5938,7 +5939,7 @@ ORDER BY 1,2 Data node: db_dist_query_3 Chunks: _dist_hyper_2_19_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Hash + -> Materialize Output: top_n.device -> Subquery Scan on top_n Output: top_n.device @@ -5947,12 +5948,13 @@ ORDER BY 1,2 -> Sort Output: device, (avg(temp)) Sort Key: (avg(temp)) DESC - -> Finalize HashAggregate + -> Finalize GroupAggregate Output: device, avg(temp) Group Key: device -> Custom Scan (AsyncAppend) Output: device, (PARTIAL avg(temp)) - -> Append + -> Merge Append + Sort Key: hyper1d_4.device -> Custom Scan (DataNodeScan) Output: hyper1d_4.device, (PARTIAL avg(hyper1d_4.temp)) Relations: Aggregate on (public.hyper1d) @@ -5971,7 +5973,7 @@ ORDER BY 1,2 Data node: db_dist_query_3 Chunks: _dist_hyper_2_19_chunk Remote SQL: SELECT device, _timescaledb_internal.partialize_agg(avg(temp)) FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) GROUP BY 1 ORDER BY device ASC NULLS LAST -(61 rows) +(62 rows) ######### CTEs/Sub-queries @@ -5988,44 +5990,37 @@ ORDER BY 1,2 GroupAggregate Output: (time_bucket('@ 1 min'::interval, h1."time")), h1.device, avg(h1.temp), max(h2.temp) Group Key: time_bucket('@ 1 min'::interval, h1."time"), h1.device - -> Merge Join + -> Nested Loop Output: time_bucket('@ 1 min'::interval, h1."time"), h1.device, h1.temp, h2.temp - Merge Cond: ((time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time"))) AND (h1.device = h2.device)) - -> Custom Scan (AsyncAppend) + Join Filter: ((h1.device = h2.device) AND (time_bucket('@ 1 min'::interval, h1."time") = (time_bucket('@ 1 min'::interval, h2."time")))) + -> Result + Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") + -> Custom Scan (DataNodeScan) on public.hyper1d h2 + Output: h2.temp, h2."time", h2.device + Data node: db_dist_query_3 + Chunks: _dist_hyper_2_19_chunk + Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST + -> Materialize Output: h1."time", h1.device, h1.temp - -> Merge Append - Sort Key: (time_bucket('@ 1 min'::interval, h1_1."time")), h1_1.device - -> Result - Output: h1_1."time", h1_1.device, h1_1.temp, time_bucket('@ 1 min'::interval, h1_1."time") + -> Custom Scan (AsyncAppend) + Output: h1."time", h1.device, h1.temp + -> Append -> Custom Scan (DataNodeScan) on public.hyper h1_1 Output: h1_1."time", h1_1.device, h1_1.temp Data node: db_dist_query_1 Chunks: _dist_hyper_1_1_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: h1_2."time", h1_2.device, h1_2.temp, time_bucket('@ 1 min'::interval, h1_2."time") -> Custom Scan (DataNodeScan) on public.hyper h1_2 Output: h1_2."time", h1_2.device, h1_2.temp Data node: db_dist_query_2 Chunks: _dist_hyper_1_2_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Result - Output: h1_3."time", h1_3.device, h1_3.temp, time_bucket('@ 1 min'::interval, h1_3."time") -> Custom Scan (DataNodeScan) on public.hyper h1_3 Output: h1_3."time", h1_3.device, h1_3.temp Data node: db_dist_query_3 Chunks: _dist_hyper_1_3_chunk Remote SQL: SELECT "time", device, temp FROM public.hyper WHERE _timescaledb_internal.chunks_in(public.hyper.*, ARRAY[1]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST - -> Materialize - Output: h2.temp, h2."time", h2.device, (time_bucket('@ 1 min'::interval, h2."time")) - -> Result - Output: h2.temp, h2."time", h2.device, time_bucket('@ 1 min'::interval, h2."time") - -> Custom Scan (DataNodeScan) on public.hyper1d h2 - Output: h2.temp, h2."time", h2.device - Data node: db_dist_query_3 - Chunks: _dist_hyper_2_19_chunk - Remote SQL: SELECT "time", device, temp FROM public.hyper1d WHERE _timescaledb_internal.chunks_in(public.hyper1d.*, ARRAY[5]) AND (("time" >= '2019-01-01 00:00:00-08'::timestamp with time zone)) AND (("time" <= '2019-01-01 15:00:00-08'::timestamp with time zone)) ORDER BY public.time_bucket('00:01:00'::interval, "time") ASC NULLS LAST, device ASC NULLS LAST -(40 rows) +(33 rows) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% RUNNING TESTS on table: reference diff --git a/tsl/test/sql/dist_param.sql b/tsl/test/sql/dist_param.sql index cad0db1e3..2fef38107 100644 --- a/tsl/test/sql/dist_param.sql +++ b/tsl/test/sql/dist_param.sql @@ -33,20 +33,29 @@ analyze metric_dist; select count(*) from show_chunks('metric_dist'); -- dictionary -create table metric_name(id int primary key, name text); +create table metric_name(id int, name text collate "C", + constraint metric_name_name unique (name), + constraint metric_name_id primary key (id)); insert into metric_name values (1, 'cpu1'), (3, 'cpu3'), (7, 'cpu7'); +insert into metric_name select x, 'other' || x + from generate_series(1000, 10000) x +; analyze metric_name; -- for predictable plans -set enable_hashjoin to off; -set enable_mergejoin to off; set enable_hashagg to off; +set enable_material to off; +set enable_mergejoin to off; +-- not present on PG 12 +\set ECHO errors +select 'set enable_memoize to off' from pg_settings where name = 'enable_memoize' \gexec +\set ECHO all -- Subquery + IN select id, max(value), count(*) from metric_dist where id in (select id from metric_name where name like 'cpu%') - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by id order by id ; @@ -55,17 +64,41 @@ explain (costs off, verbose) select id, max(value), count(*) from metric_dist where id in (select id from metric_name where name like 'cpu%') - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by id order by id ; +-- Check that the GUC to disable these plans works. Our cost model is very +-- heuristic and may be often wrong, so there needs to be a way to disable them. +set timescaledb.enable_parameterized_data_node_scan to false; + +select id, max(value), count(*) +from metric_dist +where id in (select id from metric_name where name like 'cpu%') + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' +group by id +order by id +; + +explain (costs off, verbose) +select id, max(value), count(*) +from metric_dist +where id in (select id from metric_name where name like 'cpu%') + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' +group by id +order by id +; + +reset timescaledb.enable_parameterized_data_node_scan; + + -- Shippable EC join select name, max(value), count(*) from metric_dist join metric_name using (id) where name like 'cpu%' - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; @@ -74,17 +107,37 @@ explain (costs off, verbose) select name, max(value), count(*) from metric_dist join metric_name using (id) where name like 'cpu%' - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; +-- Shipping still might make sense if the local table is outer. +explain (costs off, verbose) +select name, max(value), count(*) +from metric_dist right join metric_name using (id) +where name like 'cpu%' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' +group by name +order by name +; + +-- Shipping doesn't make sense if the distributed table is outer. +explain (costs off, verbose) +select name, max(value), count(*) +from metric_dist +left join (select * from metric_name where name like 'cpu%') t using (id) +where ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' +group by name +order by name +; -- Non-shippable EC join explain (costs off, verbose) select name, max(value), count(*) from metric_dist join metric_name on name = concat('cpu', metric_dist.id) -where ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' +where metric_name.name like 'cpu%' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; @@ -97,7 +150,8 @@ order by name select name, max(value), count(*) from metric_dist join metric_name on texteq('cpu' || textin(int4out(metric_dist.id)), name) -where ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' +where metric_name.name like 'cpu%' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; @@ -106,7 +160,8 @@ explain (costs off, verbose) select name, max(value), count(*) from metric_dist join metric_name on texteq('cpu' || textin(int4out(metric_dist.id)), name) -where ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' +where metric_name.name like 'cpu%' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; @@ -117,7 +172,8 @@ explain (costs off, verbose) select name, max(value), count(*) from metric_dist join metric_name on texteq(concat('cpu', textin(int4out(metric_dist.id))), name) -where ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' +where metric_name.name like 'cpu%' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; @@ -128,7 +184,7 @@ select distinct on (id) id, ts, value from metric_dist where id in (select id from metric_name where name like 'cpu%') - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' order by id, ts, value limit 1 ; @@ -138,7 +194,7 @@ select distinct on (id) id, ts, value from metric_dist where id in (select id from metric_name where name like 'cpu%') - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' order by id, ts, value limit 1 ; @@ -149,7 +205,7 @@ select distinct on (name) name, ts, value from metric_dist join metric_name using (id) where name like 'cpu%' - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' order by name, ts, value limit 1 ; @@ -159,26 +215,21 @@ select distinct on (name) name, ts, value from metric_dist join metric_name using (id) where name like 'cpu%' - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' order by name, ts, value limit 1 ; --- If the local table is very big, the parameterized nested loop might download --- the entire dist table or even more than that (in case of not equi-join). +-- If there are a lot of rows chosen from the local table, the parameterized +-- nested loop might download the entire dist table or even more than that (in +-- case of not equi-join or duplicate join keys). -- Check that the parameterized plan is not chosen in this case. -create table metric_name_big as select * from metric_name; -insert into metric_name_big select x, 'other' || x - from generate_series(1000, 10000) x -; -analyze metric_name_big; - explain (costs off, verbose) select name, max(value), count(*) from metric_dist -join metric_name_big using (id) -where ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' +join metric_name using (id) +where ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by name order by name ; @@ -192,7 +243,7 @@ explain (costs off, verbose) select id, max(value) from metric_dist where id = any((select array_agg(id) from metric_name where name like 'cpu%')::int[]) - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by id order by id ; @@ -206,7 +257,7 @@ analyze metric_location; select id, max(value) from metric_dist natural join metric_location natural join metric_name where name like 'cpu%' and texteq(location, 'Yerevan') - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by id ; @@ -214,7 +265,7 @@ explain (costs off, verbose) select id, max(value) from metric_dist natural join metric_location natural join metric_name where name like 'cpu%' and texteq(location, 'Yerevan') - and ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' + and ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' group by id ; @@ -222,7 +273,7 @@ group by id -- stats. create table max_value_times as select distinct on (id) id, ts from metric_dist -where ts between '2022-02-02 02:02:02+03' and '2022-02-03 02:02:02+03' +where ts between '2022-02-02 02:02:02+03' and '2022-03-03 02:02:02+03' order by id, value desc ; analyze max_value_times; @@ -233,3 +284,14 @@ from metric_dist natural join max_value_times natural join metric_name where name like 'cpu%' order by 1 ; + +-- Two distributed hypertables, each joined to reference and all joined together. +-- The query finds the percentage of time points where one metric is higher than +-- another, and also tweaked not to use initplans.Requires hash join. +explain (analyze, verbose, costs off, timing off, summary off) +select count(*) filter (where m1.value > m2.value) / count(*) +from metric_dist m1 +join metric_dist m2 using (ts) +where m1.id in (select id from metric_name where name = 'cpu1') + and m2.id in (select id from metric_name where name = 'cpu3') +; diff --git a/tsl/test/sql/dist_query.sql.in b/tsl/test/sql/dist_query.sql.in index e37cbf9d6..1ea83b9a4 100644 --- a/tsl/test/sql/dist_query.sql.in +++ b/tsl/test/sql/dist_query.sql.in @@ -43,6 +43,11 @@ SET enable_partitionwise_aggregate = ON; -- Print queries to stdout and send to server \set OUTPUT_CMD '\\p \\g' +-- For predictable plans +set enable_hashjoin to off; +set enable_mergejoin to off; +set enable_hashagg to off; + --------------------------------------------------------------------- -- EXPLAINs without ordering ---------------------------------------------------------------------