mirror of
https://github.com/timescale/timescaledb.git
synced 2025-05-17 19:13:16 +08:00
Fix join on time column of compressed chunk
Do not allow paths that are parameterized on a compressed column to exist when creating paths for a compressed chunk.
This commit is contained in:
parent
940187936c
commit
1d4b9d6977
@ -25,6 +25,7 @@ argument or resolve the type ambiguity by casting to the intended type.
|
|||||||
* #4685 Improve chunk exclusion for space dimensions
|
* #4685 Improve chunk exclusion for space dimensions
|
||||||
* #4696 Report warning when enabling compression on hypertable
|
* #4696 Report warning when enabling compression on hypertable
|
||||||
* #4720 Fix chunk exclusion for prepared statements and dst changes
|
* #4720 Fix chunk exclusion for prepared statements and dst changes
|
||||||
|
* #4737 Fix join on time column of compressed chunk
|
||||||
* #4738 Fix the assorted epoll_ctl() errors that could occur with COPY into a distributed hypertable
|
* #4738 Fix the assorted epoll_ctl() errors that could occur with COPY into a distributed hypertable
|
||||||
* #4739 Fix continuous aggregate migrate check constraint
|
* #4739 Fix continuous aggregate migrate check constraint
|
||||||
* #4745 Fix FK constraint violation error while insert into hypertable which references partitioned table
|
* #4745 Fix FK constraint violation error while insert into hypertable which references partitioned table
|
||||||
@ -33,6 +34,7 @@ argument or resolve the type ambiguity by casting to the intended type.
|
|||||||
|
|
||||||
**Thanks**
|
**Thanks**
|
||||||
* @boxhock and @cocowalla for reporting a segfault when JOINing hypertables
|
* @boxhock and @cocowalla for reporting a segfault when JOINing hypertables
|
||||||
|
* @choisnetm, @dustinsorensen, @jayadevanm and @joeyberkovitz for reporting a problem with JOINs on compressed hypertables
|
||||||
* @maxtwardowski for reporting problems with chunk exclusion and space dimensions
|
* @maxtwardowski for reporting problems with chunk exclusion and space dimensions
|
||||||
* @yuezhihan for reporting GROUP BY error when setting compress_segmentby with an enum column
|
* @yuezhihan for reporting GROUP BY error when setting compress_segmentby with an enum column
|
||||||
* @carobme for reporting constraint error during continuous aggregate migration
|
* @carobme for reporting constraint error during continuous aggregate migration
|
||||||
|
@ -60,6 +60,16 @@ static void decompress_chunk_add_plannerinfo(PlannerInfo *root, CompressionInfo
|
|||||||
static SortInfo build_sortinfo(Chunk *chunk, RelOptInfo *chunk_rel, CompressionInfo *info,
|
static SortInfo build_sortinfo(Chunk *chunk, RelOptInfo *chunk_rel, CompressionInfo *info,
|
||||||
List *pathkeys);
|
List *pathkeys);
|
||||||
|
|
||||||
|
static bool
|
||||||
|
is_compressed_column(CompressionInfo *info, AttrNumber attno)
|
||||||
|
{
|
||||||
|
char *column_name = get_attname(info->compressed_rte->relid, attno, false);
|
||||||
|
FormData_hypertable_compression *column_info =
|
||||||
|
get_column_compressioninfo(info->hypertable_compression_info, column_name);
|
||||||
|
|
||||||
|
return column_info->algo_id != 0;
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Like ts_make_pathkey_from_sortop but passes down the compressed relid so that existing
|
* Like ts_make_pathkey_from_sortop but passes down the compressed relid so that existing
|
||||||
* equivalence members that are marked as childen are properly checked.
|
* equivalence members that are marked as childen are properly checked.
|
||||||
@ -438,6 +448,46 @@ ts_decompress_chunk_generate_paths(PlannerInfo *root, RelOptInfo *chunk_rel, Hyp
|
|||||||
*/
|
*/
|
||||||
if (bms_overlap(parent_relids, child_path->param_info->ppi_req_outer))
|
if (bms_overlap(parent_relids, child_path->param_info->ppi_req_outer))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
|
ListCell *lc_ri;
|
||||||
|
bool references_compressed = false;
|
||||||
|
/*
|
||||||
|
* Check if this path is parameterized on a compressed
|
||||||
|
* column. Ideally those paths wouldn't be generated
|
||||||
|
* in the first place but since we create compressed
|
||||||
|
* EquivalenceMembers for all EquivalenceClasses these
|
||||||
|
* Paths can happen and will fail at execution since
|
||||||
|
* the left and right side of the expression are not
|
||||||
|
* compatible. Therefore we skip any Path that is
|
||||||
|
* parameterized on a compressed column here.
|
||||||
|
*/
|
||||||
|
foreach (lc_ri, child_path->param_info->ppi_clauses)
|
||||||
|
{
|
||||||
|
RestrictInfo *ri = lfirst_node(RestrictInfo, lc_ri);
|
||||||
|
|
||||||
|
if (ri->right_em && IsA(ri->right_em->em_expr, Var) &&
|
||||||
|
castNode(Var, ri->right_em->em_expr)->varno == info->compressed_rel->relid)
|
||||||
|
{
|
||||||
|
Var *var = castNode(Var, ri->right_em->em_expr);
|
||||||
|
if (is_compressed_column(info, var->varattno))
|
||||||
|
{
|
||||||
|
references_compressed = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ri->left_em && IsA(ri->left_em->em_expr, Var) &&
|
||||||
|
castNode(Var, ri->left_em->em_expr)->varno == info->compressed_rel->relid)
|
||||||
|
{
|
||||||
|
Var *var = castNode(Var, ri->left_em->em_expr);
|
||||||
|
if (is_compressed_column(info, var->varattno))
|
||||||
|
{
|
||||||
|
references_compressed = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (references_compressed)
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
path = decompress_chunk_path_create(root, info, 0, child_path);
|
path = decompress_chunk_path_create(root, info, 0, child_path);
|
||||||
|
@ -2439,14 +2439,34 @@ FROM :TEST_TABLE m1
|
|||||||
m1.device_id
|
m1.device_id
|
||||||
LIMIT 10;
|
LIMIT 10;
|
||||||
QUERY PLAN
|
QUERY PLAN
|
||||||
--------------------------------------------------------------------------------------------------------------------------------------------
|
-------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
Limit (actual rows=10 loops=1)
|
Limit (actual rows=10 loops=1)
|
||||||
-> Incremental Sort (actual rows=10 loops=1)
|
-> Incremental Sort (actual rows=10 loops=1)
|
||||||
Sort Key: m1."time", m1.device_id
|
Sort Key: m1."time", m1.device_id
|
||||||
Presorted Key: m1."time"
|
Presorted Key: m1."time"
|
||||||
Full-sort Groups: 1 Sort Method: quicksort
|
Full-sort Groups: 1 Sort Method: quicksort
|
||||||
-> Merge Join (actual rows=11 loops=1)
|
-> Merge Join (actual rows=11 loops=1)
|
||||||
Merge Cond: (m1."time" = m3_1."time")
|
Merge Cond: (m3_1."time" = m1."time")
|
||||||
|
-> Merge Append (actual rows=3 loops=1)
|
||||||
|
Sort Key: m3_1."time"
|
||||||
|
-> Sort (actual rows=3 loops=1)
|
||||||
|
Sort Key: m3_1."time"
|
||||||
|
Sort Method: quicksort
|
||||||
|
-> Custom Scan (DecompressChunk) on _hyper_1_1_chunk m3_1 (actual rows=360 loops=1)
|
||||||
|
-> Seq Scan on compress_hyper_5_15_chunk compress_hyper_5_15_chunk_2 (actual rows=1 loops=1)
|
||||||
|
Filter: (device_id = 3)
|
||||||
|
Rows Removed by Filter: 4
|
||||||
|
-> Index Scan Backward using _hyper_1_2_chunk_metrics_time_idx on _hyper_1_2_chunk m3_2 (actual rows=1 loops=1)
|
||||||
|
Filter: (device_id = 3)
|
||||||
|
Rows Removed by Filter: 2
|
||||||
|
-> Sort (actual rows=1 loops=1)
|
||||||
|
Sort Key: m3_3."time"
|
||||||
|
Sort Method: quicksort
|
||||||
|
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk m3_3 (actual rows=504 loops=1)
|
||||||
|
-> Seq Scan on compress_hyper_5_16_chunk compress_hyper_5_16_chunk_2 (actual rows=1 loops=1)
|
||||||
|
Filter: (device_id = 3)
|
||||||
|
Rows Removed by Filter: 4
|
||||||
|
-> Materialize (actual rows=11 loops=1)
|
||||||
-> Merge Join (actual rows=11 loops=1)
|
-> Merge Join (actual rows=11 loops=1)
|
||||||
Merge Cond: (m1."time" = m2."time")
|
Merge Cond: (m1."time" = m2."time")
|
||||||
Join Filter: (m1.device_id = m2.device_id)
|
Join Filter: (m1.device_id = m2.device_id)
|
||||||
@ -2476,26 +2496,6 @@ FROM :TEST_TABLE m1
|
|||||||
Sort Key: m2_3."time"
|
Sort Key: m2_3."time"
|
||||||
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk m2_3 (never executed)
|
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk m2_3 (never executed)
|
||||||
-> Seq Scan on compress_hyper_5_16_chunk compress_hyper_5_16_chunk_1 (never executed)
|
-> Seq Scan on compress_hyper_5_16_chunk compress_hyper_5_16_chunk_1 (never executed)
|
||||||
-> Materialize (actual rows=11 loops=1)
|
|
||||||
-> Merge Append (actual rows=3 loops=1)
|
|
||||||
Sort Key: m3_1."time"
|
|
||||||
-> Sort (actual rows=3 loops=1)
|
|
||||||
Sort Key: m3_1."time"
|
|
||||||
Sort Method: quicksort
|
|
||||||
-> Custom Scan (DecompressChunk) on _hyper_1_1_chunk m3_1 (actual rows=360 loops=1)
|
|
||||||
-> Seq Scan on compress_hyper_5_15_chunk compress_hyper_5_15_chunk_2 (actual rows=1 loops=1)
|
|
||||||
Filter: (device_id = 3)
|
|
||||||
Rows Removed by Filter: 4
|
|
||||||
-> Index Scan Backward using _hyper_1_2_chunk_metrics_time_idx on _hyper_1_2_chunk m3_2 (actual rows=1 loops=1)
|
|
||||||
Filter: (device_id = 3)
|
|
||||||
Rows Removed by Filter: 2
|
|
||||||
-> Sort (actual rows=1 loops=1)
|
|
||||||
Sort Key: m3_3."time"
|
|
||||||
Sort Method: quicksort
|
|
||||||
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk m3_3 (actual rows=504 loops=1)
|
|
||||||
-> Seq Scan on compress_hyper_5_16_chunk compress_hyper_5_16_chunk_2 (actual rows=1 loops=1)
|
|
||||||
Filter: (device_id = 3)
|
|
||||||
Rows Removed by Filter: 4
|
|
||||||
(56 rows)
|
(56 rows)
|
||||||
|
|
||||||
:PREFIX
|
:PREFIX
|
||||||
|
@ -2439,14 +2439,34 @@ FROM :TEST_TABLE m1
|
|||||||
m1.device_id
|
m1.device_id
|
||||||
LIMIT 10;
|
LIMIT 10;
|
||||||
QUERY PLAN
|
QUERY PLAN
|
||||||
--------------------------------------------------------------------------------------------------------------------------------------------
|
-------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
Limit (actual rows=10 loops=1)
|
Limit (actual rows=10 loops=1)
|
||||||
-> Incremental Sort (actual rows=10 loops=1)
|
-> Incremental Sort (actual rows=10 loops=1)
|
||||||
Sort Key: m1."time", m1.device_id
|
Sort Key: m1."time", m1.device_id
|
||||||
Presorted Key: m1."time"
|
Presorted Key: m1."time"
|
||||||
Full-sort Groups: 1 Sort Method: quicksort
|
Full-sort Groups: 1 Sort Method: quicksort
|
||||||
-> Merge Join (actual rows=11 loops=1)
|
-> Merge Join (actual rows=11 loops=1)
|
||||||
Merge Cond: (m1."time" = m3_1."time")
|
Merge Cond: (m3_1."time" = m1."time")
|
||||||
|
-> Merge Append (actual rows=3 loops=1)
|
||||||
|
Sort Key: m3_1."time"
|
||||||
|
-> Sort (actual rows=3 loops=1)
|
||||||
|
Sort Key: m3_1."time"
|
||||||
|
Sort Method: quicksort
|
||||||
|
-> Custom Scan (DecompressChunk) on _hyper_1_1_chunk m3_1 (actual rows=360 loops=1)
|
||||||
|
-> Seq Scan on compress_hyper_5_15_chunk compress_hyper_5_15_chunk_2 (actual rows=1 loops=1)
|
||||||
|
Filter: (device_id = 3)
|
||||||
|
Rows Removed by Filter: 4
|
||||||
|
-> Index Scan Backward using _hyper_1_2_chunk_metrics_time_idx on _hyper_1_2_chunk m3_2 (actual rows=1 loops=1)
|
||||||
|
Filter: (device_id = 3)
|
||||||
|
Rows Removed by Filter: 2
|
||||||
|
-> Sort (actual rows=1 loops=1)
|
||||||
|
Sort Key: m3_3."time"
|
||||||
|
Sort Method: quicksort
|
||||||
|
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk m3_3 (actual rows=504 loops=1)
|
||||||
|
-> Seq Scan on compress_hyper_5_16_chunk compress_hyper_5_16_chunk_2 (actual rows=1 loops=1)
|
||||||
|
Filter: (device_id = 3)
|
||||||
|
Rows Removed by Filter: 4
|
||||||
|
-> Materialize (actual rows=11 loops=1)
|
||||||
-> Merge Join (actual rows=11 loops=1)
|
-> Merge Join (actual rows=11 loops=1)
|
||||||
Merge Cond: (m1."time" = m2."time")
|
Merge Cond: (m1."time" = m2."time")
|
||||||
Join Filter: (m1.device_id = m2.device_id)
|
Join Filter: (m1.device_id = m2.device_id)
|
||||||
@ -2476,26 +2496,6 @@ FROM :TEST_TABLE m1
|
|||||||
Sort Key: m2_3."time"
|
Sort Key: m2_3."time"
|
||||||
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk m2_3 (never executed)
|
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk m2_3 (never executed)
|
||||||
-> Seq Scan on compress_hyper_5_16_chunk compress_hyper_5_16_chunk_1 (never executed)
|
-> Seq Scan on compress_hyper_5_16_chunk compress_hyper_5_16_chunk_1 (never executed)
|
||||||
-> Materialize (actual rows=11 loops=1)
|
|
||||||
-> Merge Append (actual rows=3 loops=1)
|
|
||||||
Sort Key: m3_1."time"
|
|
||||||
-> Sort (actual rows=3 loops=1)
|
|
||||||
Sort Key: m3_1."time"
|
|
||||||
Sort Method: quicksort
|
|
||||||
-> Custom Scan (DecompressChunk) on _hyper_1_1_chunk m3_1 (actual rows=360 loops=1)
|
|
||||||
-> Seq Scan on compress_hyper_5_15_chunk compress_hyper_5_15_chunk_2 (actual rows=1 loops=1)
|
|
||||||
Filter: (device_id = 3)
|
|
||||||
Rows Removed by Filter: 4
|
|
||||||
-> Index Scan Backward using _hyper_1_2_chunk_metrics_time_idx on _hyper_1_2_chunk m3_2 (actual rows=1 loops=1)
|
|
||||||
Filter: (device_id = 3)
|
|
||||||
Rows Removed by Filter: 2
|
|
||||||
-> Sort (actual rows=1 loops=1)
|
|
||||||
Sort Key: m3_3."time"
|
|
||||||
Sort Method: quicksort
|
|
||||||
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk m3_3 (actual rows=504 loops=1)
|
|
||||||
-> Seq Scan on compress_hyper_5_16_chunk compress_hyper_5_16_chunk_2 (actual rows=1 loops=1)
|
|
||||||
Filter: (device_id = 3)
|
|
||||||
Rows Removed by Filter: 4
|
|
||||||
(56 rows)
|
(56 rows)
|
||||||
|
|
||||||
:PREFIX
|
:PREFIX
|
||||||
|
@ -589,16 +589,17 @@ ON met.device_id = lookup.did and met.v0 = lookup.version
|
|||||||
WHERE met.time > '2000-01-19 19:00:00-05'
|
WHERE met.time > '2000-01-19 19:00:00-05'
|
||||||
and met.time < '2000-01-20 20:00:00-05';
|
and met.time < '2000-01-20 20:00:00-05';
|
||||||
QUERY PLAN
|
QUERY PLAN
|
||||||
----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
Nested Loop (actual rows=2 loops=1)
|
Nested Loop (actual rows=2 loops=1)
|
||||||
|
Join Filter: ((met.device_id = "*VALUES*".column1) AND (met.v0 = "*VALUES*".column2))
|
||||||
|
Rows Removed by Join Filter: 92
|
||||||
-> Values Scan on "*VALUES*" (actual rows=2 loops=1)
|
-> Values Scan on "*VALUES*" (actual rows=2 loops=1)
|
||||||
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=1 loops=2)
|
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=47 loops=2)
|
||||||
Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND ("*VALUES*".column1 = device_id) AND ("*VALUES*".column2 = v0))
|
Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone))
|
||||||
Rows Removed by Filter: 47
|
Rows Removed by Filter: 1
|
||||||
-> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=2)
|
-> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=2)
|
||||||
Index Cond: (device_id = "*VALUES*".column1)
|
|
||||||
Filter: ((_ts_meta_max_1 > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone))
|
Filter: ((_ts_meta_max_1 > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone))
|
||||||
(8 rows)
|
(9 rows)
|
||||||
|
|
||||||
--add filter to segment by (device_id) and compressed attr column (v0)
|
--add filter to segment by (device_id) and compressed attr column (v0)
|
||||||
:PREFIX
|
:PREFIX
|
||||||
@ -655,19 +656,19 @@ JOIN LATERAL
|
|||||||
ON met.device_id = q.node and met.device_id_peer = q.device_id_peer
|
ON met.device_id = q.node and met.device_id_peer = q.device_id_peer
|
||||||
and met.v0 = q.v0 and met.v0 > 2 and time = '2018-01-19 20:00:00-05';
|
and met.v0 = q.v0 and met.v0 > 2 and time = '2018-01-19 20:00:00-05';
|
||||||
QUERY PLAN
|
QUERY PLAN
|
||||||
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
Nested Loop (actual rows=1 loops=1)
|
Nested Loop (actual rows=1 loops=1)
|
||||||
Join Filter: (nodetime.node = met.device_id)
|
Join Filter: (("*VALUES*".column2 = met.device_id_peer) AND ("*VALUES*".column3 = met.v0))
|
||||||
-> Nested Loop (actual rows=1 loops=1)
|
-> Nested Loop (actual rows=1 loops=1)
|
||||||
Join Filter: (nodetime.node = "*VALUES*".column1)
|
Join Filter: (nodetime.node = "*VALUES*".column1)
|
||||||
Rows Removed by Join Filter: 1
|
Rows Removed by Join Filter: 1
|
||||||
-> Seq Scan on nodetime (actual rows=1 loops=1)
|
-> Seq Scan on nodetime (actual rows=1 loops=1)
|
||||||
-> Values Scan on "*VALUES*" (actual rows=2 loops=1)
|
-> Values Scan on "*VALUES*" (actual rows=2 loops=1)
|
||||||
-> Custom Scan (DecompressChunk) on _hyper_1_4_chunk met (actual rows=1 loops=1)
|
-> Custom Scan (DecompressChunk) on _hyper_1_4_chunk met (actual rows=1 loops=1)
|
||||||
Filter: ((v0 > 2) AND ("time" = 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND ("*VALUES*".column1 = device_id) AND ("*VALUES*".column2 = device_id_peer) AND ("*VALUES*".column3 = v0))
|
Filter: ((v0 > 2) AND ("time" = 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND (nodetime.node = device_id))
|
||||||
Rows Removed by Filter: 47
|
Rows Removed by Filter: 47
|
||||||
-> Index Scan using compress_hyper_2_9_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_9_chunk (actual rows=1 loops=1)
|
-> Index Scan using compress_hyper_2_9_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_9_chunk (actual rows=1 loops=1)
|
||||||
Index Cond: ((device_id = "*VALUES*".column1) AND (device_id_peer = "*VALUES*".column2))
|
Index Cond: (device_id = nodetime.node)
|
||||||
Filter: ((_ts_meta_min_1 <= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND (_ts_meta_max_1 >= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone))
|
Filter: ((_ts_meta_min_1 <= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND (_ts_meta_max_1 >= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone))
|
||||||
(13 rows)
|
(13 rows)
|
||||||
|
|
||||||
|
@ -589,16 +589,17 @@ ON met.device_id = lookup.did and met.v0 = lookup.version
|
|||||||
WHERE met.time > '2000-01-19 19:00:00-05'
|
WHERE met.time > '2000-01-19 19:00:00-05'
|
||||||
and met.time < '2000-01-20 20:00:00-05';
|
and met.time < '2000-01-20 20:00:00-05';
|
||||||
QUERY PLAN
|
QUERY PLAN
|
||||||
----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
Nested Loop (actual rows=2 loops=1)
|
Nested Loop (actual rows=2 loops=1)
|
||||||
|
Join Filter: ((met.device_id = "*VALUES*".column1) AND (met.v0 = "*VALUES*".column2))
|
||||||
|
Rows Removed by Join Filter: 92
|
||||||
-> Values Scan on "*VALUES*" (actual rows=2 loops=1)
|
-> Values Scan on "*VALUES*" (actual rows=2 loops=1)
|
||||||
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=1 loops=2)
|
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=47 loops=2)
|
||||||
Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND ("*VALUES*".column1 = device_id) AND ("*VALUES*".column2 = v0))
|
Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone))
|
||||||
Rows Removed by Filter: 47
|
Rows Removed by Filter: 1
|
||||||
-> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=2)
|
-> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=2)
|
||||||
Index Cond: (device_id = "*VALUES*".column1)
|
|
||||||
Filter: ((_ts_meta_max_1 > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone))
|
Filter: ((_ts_meta_max_1 > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone))
|
||||||
(8 rows)
|
(9 rows)
|
||||||
|
|
||||||
--add filter to segment by (device_id) and compressed attr column (v0)
|
--add filter to segment by (device_id) and compressed attr column (v0)
|
||||||
:PREFIX
|
:PREFIX
|
||||||
@ -655,19 +656,19 @@ JOIN LATERAL
|
|||||||
ON met.device_id = q.node and met.device_id_peer = q.device_id_peer
|
ON met.device_id = q.node and met.device_id_peer = q.device_id_peer
|
||||||
and met.v0 = q.v0 and met.v0 > 2 and time = '2018-01-19 20:00:00-05';
|
and met.v0 = q.v0 and met.v0 > 2 and time = '2018-01-19 20:00:00-05';
|
||||||
QUERY PLAN
|
QUERY PLAN
|
||||||
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
Nested Loop (actual rows=1 loops=1)
|
Nested Loop (actual rows=1 loops=1)
|
||||||
Join Filter: (nodetime.node = met.device_id)
|
Join Filter: (("*VALUES*".column2 = met.device_id_peer) AND ("*VALUES*".column3 = met.v0))
|
||||||
-> Nested Loop (actual rows=1 loops=1)
|
-> Nested Loop (actual rows=1 loops=1)
|
||||||
Join Filter: (nodetime.node = "*VALUES*".column1)
|
Join Filter: (nodetime.node = "*VALUES*".column1)
|
||||||
Rows Removed by Join Filter: 1
|
Rows Removed by Join Filter: 1
|
||||||
-> Seq Scan on nodetime (actual rows=1 loops=1)
|
-> Seq Scan on nodetime (actual rows=1 loops=1)
|
||||||
-> Values Scan on "*VALUES*" (actual rows=2 loops=1)
|
-> Values Scan on "*VALUES*" (actual rows=2 loops=1)
|
||||||
-> Custom Scan (DecompressChunk) on _hyper_1_4_chunk met (actual rows=1 loops=1)
|
-> Custom Scan (DecompressChunk) on _hyper_1_4_chunk met (actual rows=1 loops=1)
|
||||||
Filter: ((v0 > 2) AND ("time" = 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND ("*VALUES*".column1 = device_id) AND ("*VALUES*".column2 = device_id_peer) AND ("*VALUES*".column3 = v0))
|
Filter: ((v0 > 2) AND ("time" = 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND (nodetime.node = device_id))
|
||||||
Rows Removed by Filter: 47
|
Rows Removed by Filter: 47
|
||||||
-> Index Scan using compress_hyper_2_9_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_9_chunk (actual rows=1 loops=1)
|
-> Index Scan using compress_hyper_2_9_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_9_chunk (actual rows=1 loops=1)
|
||||||
Index Cond: ((device_id = "*VALUES*".column1) AND (device_id_peer = "*VALUES*".column2))
|
Index Cond: (device_id = nodetime.node)
|
||||||
Filter: ((_ts_meta_min_1 <= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND (_ts_meta_max_1 >= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone))
|
Filter: ((_ts_meta_min_1 <= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND (_ts_meta_max_1 >= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone))
|
||||||
(13 rows)
|
(13 rows)
|
||||||
|
|
||||||
|
@ -589,16 +589,17 @@ ON met.device_id = lookup.did and met.v0 = lookup.version
|
|||||||
WHERE met.time > '2000-01-19 19:00:00-05'
|
WHERE met.time > '2000-01-19 19:00:00-05'
|
||||||
and met.time < '2000-01-20 20:00:00-05';
|
and met.time < '2000-01-20 20:00:00-05';
|
||||||
QUERY PLAN
|
QUERY PLAN
|
||||||
----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
Nested Loop (actual rows=2 loops=1)
|
Nested Loop (actual rows=2 loops=1)
|
||||||
|
Join Filter: ((met.device_id = "*VALUES*".column1) AND (met.v0 = "*VALUES*".column2))
|
||||||
|
Rows Removed by Join Filter: 92
|
||||||
-> Values Scan on "*VALUES*" (actual rows=2 loops=1)
|
-> Values Scan on "*VALUES*" (actual rows=2 loops=1)
|
||||||
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=1 loops=2)
|
-> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=47 loops=2)
|
||||||
Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND ("*VALUES*".column1 = device_id) AND ("*VALUES*".column2 = v0))
|
Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone))
|
||||||
Rows Removed by Filter: 47
|
Rows Removed by Filter: 1
|
||||||
-> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=2)
|
-> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=2)
|
||||||
Index Cond: (device_id = "*VALUES*".column1)
|
|
||||||
Filter: ((_ts_meta_max_1 > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone))
|
Filter: ((_ts_meta_max_1 > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone))
|
||||||
(8 rows)
|
(9 rows)
|
||||||
|
|
||||||
--add filter to segment by (device_id) and compressed attr column (v0)
|
--add filter to segment by (device_id) and compressed attr column (v0)
|
||||||
:PREFIX
|
:PREFIX
|
||||||
@ -655,19 +656,19 @@ JOIN LATERAL
|
|||||||
ON met.device_id = q.node and met.device_id_peer = q.device_id_peer
|
ON met.device_id = q.node and met.device_id_peer = q.device_id_peer
|
||||||
and met.v0 = q.v0 and met.v0 > 2 and time = '2018-01-19 20:00:00-05';
|
and met.v0 = q.v0 and met.v0 > 2 and time = '2018-01-19 20:00:00-05';
|
||||||
QUERY PLAN
|
QUERY PLAN
|
||||||
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
Nested Loop (actual rows=1 loops=1)
|
Nested Loop (actual rows=1 loops=1)
|
||||||
Join Filter: (nodetime.node = met.device_id)
|
Join Filter: (("*VALUES*".column2 = met.device_id_peer) AND ("*VALUES*".column3 = met.v0))
|
||||||
-> Nested Loop (actual rows=1 loops=1)
|
-> Nested Loop (actual rows=1 loops=1)
|
||||||
Join Filter: (nodetime.node = "*VALUES*".column1)
|
Join Filter: (nodetime.node = "*VALUES*".column1)
|
||||||
Rows Removed by Join Filter: 1
|
Rows Removed by Join Filter: 1
|
||||||
-> Seq Scan on nodetime (actual rows=1 loops=1)
|
-> Seq Scan on nodetime (actual rows=1 loops=1)
|
||||||
-> Values Scan on "*VALUES*" (actual rows=2 loops=1)
|
-> Values Scan on "*VALUES*" (actual rows=2 loops=1)
|
||||||
-> Custom Scan (DecompressChunk) on _hyper_1_4_chunk met (actual rows=1 loops=1)
|
-> Custom Scan (DecompressChunk) on _hyper_1_4_chunk met (actual rows=1 loops=1)
|
||||||
Filter: ((v0 > 2) AND ("time" = 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND ("*VALUES*".column1 = device_id) AND ("*VALUES*".column2 = device_id_peer) AND ("*VALUES*".column3 = v0))
|
Filter: ((v0 > 2) AND ("time" = 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND (nodetime.node = device_id))
|
||||||
Rows Removed by Filter: 47
|
Rows Removed by Filter: 47
|
||||||
-> Index Scan using compress_hyper_2_9_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_9_chunk (actual rows=1 loops=1)
|
-> Index Scan using compress_hyper_2_9_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_9_chunk (actual rows=1 loops=1)
|
||||||
Index Cond: ((device_id = "*VALUES*".column1) AND (device_id_peer = "*VALUES*".column2))
|
Index Cond: (device_id = nodetime.node)
|
||||||
Filter: ((_ts_meta_min_1 <= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND (_ts_meta_max_1 >= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone))
|
Filter: ((_ts_meta_min_1 <= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND (_ts_meta_max_1 >= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone))
|
||||||
(13 rows)
|
(13 rows)
|
||||||
|
|
||||||
|
35
tsl/test/shared/expected/decompress_join-12.out
Normal file
35
tsl/test/shared/expected/decompress_join-12.out
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
-- This file and its contents are licensed under the Timescale License.
|
||||||
|
-- Please see the included NOTICE for copyright information and
|
||||||
|
-- LICENSE-TIMESCALE for a copy of the license.
|
||||||
|
-- disable memoize on PG14+
|
||||||
|
SELECT CASE WHEN current_setting('server_version_num')::int/10000 >= 14 THEN set_config('enable_memoize','off',false) ELSE 'off' END AS enable_memoize;
|
||||||
|
enable_memoize
|
||||||
|
off
|
||||||
|
(1 row)
|
||||||
|
|
||||||
|
SET enable_indexscan TO false;
|
||||||
|
-- test join on compressed time column
|
||||||
|
-- #3079, #4465
|
||||||
|
CREATE TABLE compressed_join_temp AS SELECT * FROM metrics ORDER BY time DESC LIMIT 10;
|
||||||
|
ANALYZE compressed_join_temp;
|
||||||
|
EXPLAIN (analyze,costs off,timing off,summary off) SELECT *
|
||||||
|
FROM compressed_join_temp t
|
||||||
|
INNER JOIN metrics_compressed m ON t.time = m.time AND t.device_id = m.device_id
|
||||||
|
LIMIT 1;
|
||||||
|
QUERY PLAN
|
||||||
|
Limit (actual rows=1 loops=1)
|
||||||
|
-> Hash Join (actual rows=1 loops=1)
|
||||||
|
Hash Cond: ((m."time" = t."time") AND (m.device_id = t.device_id))
|
||||||
|
-> Append (actual rows=43181 loops=1)
|
||||||
|
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk m (actual rows=17990 loops=1)
|
||||||
|
-> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1)
|
||||||
|
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk m_1 (actual rows=25190 loops=1)
|
||||||
|
-> Seq Scan on compress_hyper_X_X_chunk (actual rows=30 loops=1)
|
||||||
|
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk m_2 (actual rows=1 loops=1)
|
||||||
|
-> Seq Scan on compress_hyper_X_X_chunk (actual rows=1 loops=1)
|
||||||
|
-> Hash (actual rows=10 loops=1)
|
||||||
|
Buckets: 1024 Batches: 1
|
||||||
|
-> Seq Scan on compressed_join_temp t (actual rows=10 loops=1)
|
||||||
|
(13 rows)
|
||||||
|
|
||||||
|
DROP TABLE compressed_join_temp;
|
35
tsl/test/shared/expected/decompress_join-13.out
Normal file
35
tsl/test/shared/expected/decompress_join-13.out
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
-- This file and its contents are licensed under the Timescale License.
|
||||||
|
-- Please see the included NOTICE for copyright information and
|
||||||
|
-- LICENSE-TIMESCALE for a copy of the license.
|
||||||
|
-- disable memoize on PG14+
|
||||||
|
SELECT CASE WHEN current_setting('server_version_num')::int/10000 >= 14 THEN set_config('enable_memoize','off',false) ELSE 'off' END AS enable_memoize;
|
||||||
|
enable_memoize
|
||||||
|
off
|
||||||
|
(1 row)
|
||||||
|
|
||||||
|
SET enable_indexscan TO false;
|
||||||
|
-- test join on compressed time column
|
||||||
|
-- #3079, #4465
|
||||||
|
CREATE TABLE compressed_join_temp AS SELECT * FROM metrics ORDER BY time DESC LIMIT 10;
|
||||||
|
ANALYZE compressed_join_temp;
|
||||||
|
EXPLAIN (analyze,costs off,timing off,summary off) SELECT *
|
||||||
|
FROM compressed_join_temp t
|
||||||
|
INNER JOIN metrics_compressed m ON t.time = m.time AND t.device_id = m.device_id
|
||||||
|
LIMIT 1;
|
||||||
|
QUERY PLAN
|
||||||
|
Limit (actual rows=1 loops=1)
|
||||||
|
-> Hash Join (actual rows=1 loops=1)
|
||||||
|
Hash Cond: ((m_1."time" = t."time") AND (m_1.device_id = t.device_id))
|
||||||
|
-> Append (actual rows=43181 loops=1)
|
||||||
|
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk m_1 (actual rows=17990 loops=1)
|
||||||
|
-> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1)
|
||||||
|
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk m_2 (actual rows=25190 loops=1)
|
||||||
|
-> Seq Scan on compress_hyper_X_X_chunk (actual rows=30 loops=1)
|
||||||
|
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk m_3 (actual rows=1 loops=1)
|
||||||
|
-> Seq Scan on compress_hyper_X_X_chunk (actual rows=1 loops=1)
|
||||||
|
-> Hash (actual rows=10 loops=1)
|
||||||
|
Buckets: 1024 Batches: 1
|
||||||
|
-> Seq Scan on compressed_join_temp t (actual rows=10 loops=1)
|
||||||
|
(13 rows)
|
||||||
|
|
||||||
|
DROP TABLE compressed_join_temp;
|
35
tsl/test/shared/expected/decompress_join-14.out
Normal file
35
tsl/test/shared/expected/decompress_join-14.out
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
-- This file and its contents are licensed under the Timescale License.
|
||||||
|
-- Please see the included NOTICE for copyright information and
|
||||||
|
-- LICENSE-TIMESCALE for a copy of the license.
|
||||||
|
-- disable memoize on PG14+
|
||||||
|
SELECT CASE WHEN current_setting('server_version_num')::int/10000 >= 14 THEN set_config('enable_memoize','off',false) ELSE 'off' END AS enable_memoize;
|
||||||
|
enable_memoize
|
||||||
|
off
|
||||||
|
(1 row)
|
||||||
|
|
||||||
|
SET enable_indexscan TO false;
|
||||||
|
-- test join on compressed time column
|
||||||
|
-- #3079, #4465
|
||||||
|
CREATE TABLE compressed_join_temp AS SELECT * FROM metrics ORDER BY time DESC LIMIT 10;
|
||||||
|
ANALYZE compressed_join_temp;
|
||||||
|
EXPLAIN (analyze,costs off,timing off,summary off) SELECT *
|
||||||
|
FROM compressed_join_temp t
|
||||||
|
INNER JOIN metrics_compressed m ON t.time = m.time AND t.device_id = m.device_id
|
||||||
|
LIMIT 1;
|
||||||
|
QUERY PLAN
|
||||||
|
Limit (actual rows=1 loops=1)
|
||||||
|
-> Hash Join (actual rows=1 loops=1)
|
||||||
|
Hash Cond: ((m_1."time" = t."time") AND (m_1.device_id = t.device_id))
|
||||||
|
-> Append (actual rows=43181 loops=1)
|
||||||
|
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk m_1 (actual rows=17990 loops=1)
|
||||||
|
-> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1)
|
||||||
|
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk m_2 (actual rows=25190 loops=1)
|
||||||
|
-> Seq Scan on compress_hyper_X_X_chunk (actual rows=30 loops=1)
|
||||||
|
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk m_3 (actual rows=1 loops=1)
|
||||||
|
-> Seq Scan on compress_hyper_X_X_chunk (actual rows=1 loops=1)
|
||||||
|
-> Hash (actual rows=10 loops=1)
|
||||||
|
Buckets: 1024 Batches: 1
|
||||||
|
-> Seq Scan on compressed_join_temp t (actual rows=10 loops=1)
|
||||||
|
(13 rows)
|
||||||
|
|
||||||
|
DROP TABLE compressed_join_temp;
|
1
tsl/test/shared/sql/.gitignore
vendored
1
tsl/test/shared/sql/.gitignore
vendored
@ -1,5 +1,6 @@
|
|||||||
/continuous_aggs_compression-*.sql
|
/continuous_aggs_compression-*.sql
|
||||||
/constify_now-*.sql
|
/constify_now-*.sql
|
||||||
|
/decompress_join-*.sql
|
||||||
/dist_remote_error-*.sql
|
/dist_remote_error-*.sql
|
||||||
/dist_remote_error.text
|
/dist_remote_error.text
|
||||||
/gapfill-*.sql
|
/gapfill-*.sql
|
||||||
|
@ -14,8 +14,9 @@ set(TEST_FILES_SHARED
|
|||||||
subtract_integer_from_now.sql)
|
subtract_integer_from_now.sql)
|
||||||
|
|
||||||
set(TEST_TEMPLATES_SHARED
|
set(TEST_TEMPLATES_SHARED
|
||||||
gapfill.sql.in generated_columns.sql.in ordered_append.sql.in
|
decompress_join.sql.in gapfill.sql.in generated_columns.sql.in
|
||||||
ordered_append_join.sql.in transparent_decompress_chunk.sql.in)
|
ordered_append.sql.in ordered_append_join.sql.in
|
||||||
|
transparent_decompress_chunk.sql.in)
|
||||||
|
|
||||||
if((${PG_VERSION_MAJOR} GREATER_EQUAL "14"))
|
if((${PG_VERSION_MAJOR} GREATER_EQUAL "14"))
|
||||||
list(APPEND TEST_FILES_SHARED compression_dml.sql memoize.sql)
|
list(APPEND TEST_FILES_SHARED compression_dml.sql memoize.sql)
|
||||||
|
20
tsl/test/shared/sql/decompress_join.sql.in
Normal file
20
tsl/test/shared/sql/decompress_join.sql.in
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
-- This file and its contents are licensed under the Timescale License.
|
||||||
|
-- Please see the included NOTICE for copyright information and
|
||||||
|
-- LICENSE-TIMESCALE for a copy of the license.
|
||||||
|
|
||||||
|
-- disable memoize on PG14+
|
||||||
|
SELECT CASE WHEN current_setting('server_version_num')::int/10000 >= 14 THEN set_config('enable_memoize','off',false) ELSE 'off' END AS enable_memoize;
|
||||||
|
SET enable_indexscan TO false;
|
||||||
|
|
||||||
|
-- test join on compressed time column
|
||||||
|
-- #3079, #4465
|
||||||
|
CREATE TABLE compressed_join_temp AS SELECT * FROM metrics ORDER BY time DESC LIMIT 10;
|
||||||
|
ANALYZE compressed_join_temp;
|
||||||
|
|
||||||
|
EXPLAIN (analyze,costs off,timing off,summary off) SELECT *
|
||||||
|
FROM compressed_join_temp t
|
||||||
|
INNER JOIN metrics_compressed m ON t.time = m.time AND t.device_id = m.device_id
|
||||||
|
LIMIT 1;
|
||||||
|
|
||||||
|
DROP TABLE compressed_join_temp;
|
||||||
|
|
Loading…
x
Reference in New Issue
Block a user