diff --git a/CHANGELOG.md b/CHANGELOG.md index 675939a5b..d4536559d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,11 +23,13 @@ that a backfill can trigger. By default, all invalidations are processed. **Bugfixes** * #1591 Fix locf treat_null_as_missing option * #1594 Fix error in compression constraint check +* #1606 Fix constify params during runtime exclusion * #1607 Delete compression policy when drop hypertable * #1608 Add jobs to timescaledb_information.policy_stats **Thanks** * @optijon for reporting an issue with locf treat_null_as_missing option +* @acarrera42 for reporting an issue with constify params during runtime exclusion * @ChristopherZellermann for reporting an issue with the compression constraint check ## 1.5.1 (2019-11-12) diff --git a/src/chunk_append/exec.c b/src/chunk_append/exec.c index 045c17b52..4c4750b80 100644 --- a/src/chunk_append/exec.c +++ b/src/chunk_append/exec.c @@ -723,6 +723,11 @@ constify_param_mutator(Node *node, void *context) if (node == NULL) return NULL; + /* Don't descend into subplans to constify their parameters, because they may not be valid yet + */ + if (IsA(node, SubPlan)) + return node; + if (IsA(node, Param)) { Param *param = castNode(Param, node); diff --git a/test/expected/append-10.out b/test/expected/append-10.out index fe60f45b7..9acbc3d2c 100644 --- a/test/expected/append-10.out +++ b/test/expected/append-10.out @@ -106,14 +106,14 @@ INSERT INTO metrics_timestamptz SELECT generate_series('2000-01-01'::date, '2000 INSERT INTO metrics_timestamptz SELECT generate_series('2000-01-01'::date, '2000-02-01'::date, '5m'::interval), 3; ANALYZE metrics_timestamptz; -- create space partitioned hypertable -CREATE TABLE metrics_space(time timestamptz NOT NULL, device_id int NOT NULL, v1 float, v2 float); +CREATE TABLE metrics_space(time timestamptz NOT NULL, device_id int NOT NULL, v1 float, v2 float, v3 text); SELECT create_hypertable('metrics_space','time','device_id',3); create_hypertable ---------------------------- (6,public,metrics_space,t) (1 row) -INSERT INTO metrics_space SELECT time, device_id, device_id + 0.25, device_id + 0.75 FROM generate_series('2000-01-01'::date, '2000-01-14'::date, '5m'::interval) g1(time), generate_series(1,10,1) g2(device_id); +INSERT INTO metrics_space SELECT time, device_id, device_id + 0.25, device_id + 0.75, device_id FROM generate_series('2000-01-01'::date, '2000-01-14'::date, '5m'::interval) g1(time), generate_series(1,10,1) g2(device_id); ANALYZE metrics_space; \ir :TEST_QUERY_NAME -- This file and its contents are licensed under the Apache License 2.0. @@ -1278,6 +1278,104 @@ reset enable_material; Rows Removed by Filter: 1155 (22 rows) +:PREFIX SELECT time FROM metrics_space WHERE time > '2000-01-10'::timestamptz AND device_id IN (VALUES(1)) ORDER BY time; + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------- + Sort (actual rows=1152 loops=1) + Sort Key: _hyper_6_25_chunk."time" + Sort Method: quicksort + -> Nested Loop (actual rows=1152 loops=1) + -> HashAggregate (actual rows=1 loops=1) + Group Key: 1 + -> Result (actual rows=1 loops=1) + -> Append (actual rows=1152 loops=1) + -> Index Only Scan using _hyper_6_25_chunk_metrics_space_device_id_time_idx on _hyper_6_25_chunk (actual rows=767 loops=1) + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + Heap Fetches: 767 + -> Index Only Scan using _hyper_6_26_chunk_metrics_space_device_id_time_idx on _hyper_6_26_chunk (actual rows=0 loops=1) + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + Heap Fetches: 0 + -> Index Only Scan using _hyper_6_27_chunk_metrics_space_device_id_time_idx on _hyper_6_27_chunk (actual rows=0 loops=1) + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + Heap Fetches: 0 + -> Index Only Scan using _hyper_6_28_chunk_metrics_space_device_id_time_idx on _hyper_6_28_chunk (actual rows=385 loops=1) + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + Heap Fetches: 385 + -> Index Only Scan using _hyper_6_29_chunk_metrics_space_device_id_time_idx on _hyper_6_29_chunk (actual rows=0 loops=1) + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + Heap Fetches: 0 + -> Index Only Scan using _hyper_6_30_chunk_metrics_space_device_id_time_idx on _hyper_6_30_chunk (actual rows=0 loops=1) + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + Heap Fetches: 0 +(26 rows) + +:PREFIX SELECT time FROM metrics_space WHERE time > '2000-01-10'::timestamptz AND v3 IN (VALUES('1')) ORDER BY time; + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------- + Sort (actual rows=1152 loops=1) + Sort Key: _hyper_6_25_chunk."time" + Sort Method: quicksort + -> Hash Semi Join (actual rows=1152 loops=1) + Hash Cond: (_hyper_6_25_chunk.v3 = ('1'::text)) + -> Append (actual rows=11520 loops=1) + -> Index Scan Backward using _hyper_6_25_chunk_metrics_space_time_idx on _hyper_6_25_chunk (actual rows=3068 loops=1) + Index Cond: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_6_26_chunk_metrics_space_time_idx on _hyper_6_26_chunk (actual rows=3068 loops=1) + Index Cond: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_6_27_chunk_metrics_space_time_idx on _hyper_6_27_chunk (actual rows=1534 loops=1) + Index Cond: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Seq Scan on _hyper_6_28_chunk (actual rows=1540 loops=1) + Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Seq Scan on _hyper_6_29_chunk (actual rows=1540 loops=1) + Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Seq Scan on _hyper_6_30_chunk (actual rows=770 loops=1) + Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Hash (actual rows=1 loops=1) + Buckets: 1024 Batches: 1 + -> Result (actual rows=1 loops=1) +(21 rows) + +:PREFIX SELECT * FROM metrics_space +WHERE time = (VALUES ('2019-12-24' at time zone 'UTC')) + AND v3 NOT IN (VALUES ('1')); + QUERY PLAN +------------------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on metrics_space (actual rows=0 loops=1) + Chunks excluded during startup: 0 + Chunks excluded during runtime: 9 + InitPlan 1 (returns $0) + -> Result (actual rows=1 loops=1) + -> Index Scan using _hyper_6_22_chunk_metrics_space_time_idx on _hyper_6_22_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + SubPlan 2 + -> Result (never executed) + -> Index Scan using _hyper_6_23_chunk_metrics_space_time_idx on _hyper_6_23_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_24_chunk_metrics_space_time_idx on _hyper_6_24_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_25_chunk_metrics_space_time_idx on _hyper_6_25_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_26_chunk_metrics_space_time_idx on _hyper_6_26_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_27_chunk_metrics_space_time_idx on _hyper_6_27_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_28_chunk_metrics_space_time_idx on _hyper_6_28_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_29_chunk_metrics_space_time_idx on _hyper_6_29_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_30_chunk_metrics_space_time_idx on _hyper_6_30_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) +(34 rows) + -- test CURRENT_DATE -- should be 0 chunks :PREFIX SELECT time FROM metrics_date WHERE time > CURRENT_DATE ORDER BY time; @@ -1530,31 +1628,31 @@ WHERE time > '2000-01-10'::timestamptz ORDER BY time DESC, device_id; QUERY PLAN ------------------------------------------------------------------------------------------- - Sort (actual rows=557 loops=1) + Sort (actual rows=522 loops=1) Sort Key: _hyper_6_30_chunk."time" DESC, _hyper_6_30_chunk.device_id Sort Method: quicksort - -> Append (actual rows=557 loops=1) - -> Sample Scan on _hyper_6_30_chunk (actual rows=40 loops=1) + -> Append (actual rows=522 loops=1) + -> Sample Scan on _hyper_6_30_chunk (actual rows=35 loops=1) Sampling: bernoulli ('5'::real) REPEATABLE ('0'::double precision) Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) - -> Sample Scan on _hyper_6_29_chunk (actual rows=68 loops=1) + -> Sample Scan on _hyper_6_29_chunk (actual rows=61 loops=1) Sampling: bernoulli ('5'::real) REPEATABLE ('0'::double precision) Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) - -> Sample Scan on _hyper_6_28_chunk (actual rows=68 loops=1) + -> Sample Scan on _hyper_6_28_chunk (actual rows=61 loops=1) Sampling: bernoulli ('5'::real) REPEATABLE ('0'::double precision) Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Sample Scan on _hyper_6_27_chunk (actual rows=65 loops=1) Sampling: bernoulli ('5'::real) REPEATABLE ('0'::double precision) Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 113 - -> Sample Scan on _hyper_6_26_chunk (actual rows=158 loops=1) + -> Sample Scan on _hyper_6_26_chunk (actual rows=150 loops=1) Sampling: bernoulli ('5'::real) REPEATABLE ('0'::double precision) Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) - Rows Removed by Filter: 220 - -> Sample Scan on _hyper_6_25_chunk (actual rows=158 loops=1) + Rows Removed by Filter: 218 + -> Sample Scan on _hyper_6_25_chunk (actual rows=150 loops=1) Sampling: bernoulli ('5'::real) REPEATABLE ('0'::double precision) Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) - Rows Removed by Filter: 220 + Rows Removed by Filter: 218 (25 rows) -- test runtime exclusion @@ -2029,7 +2127,7 @@ RESET enable_hashagg; -- to trigger this we need a Sort node that is below ChunkAppend CREATE TABLE join_limit (time timestamptz, device_id int); SELECT table_name FROM create_hypertable('join_limit','time',create_default_indexes:=false); -psql:include/append_query.sql:309: NOTICE: adding not-null constraint to column "time" +psql:include/append_query.sql:315: NOTICE: adding not-null constraint to column "time" table_name ------------ join_limit diff --git a/test/expected/append-11.out b/test/expected/append-11.out index 1faf96282..9dd893637 100644 --- a/test/expected/append-11.out +++ b/test/expected/append-11.out @@ -106,14 +106,14 @@ INSERT INTO metrics_timestamptz SELECT generate_series('2000-01-01'::date, '2000 INSERT INTO metrics_timestamptz SELECT generate_series('2000-01-01'::date, '2000-02-01'::date, '5m'::interval), 3; ANALYZE metrics_timestamptz; -- create space partitioned hypertable -CREATE TABLE metrics_space(time timestamptz NOT NULL, device_id int NOT NULL, v1 float, v2 float); +CREATE TABLE metrics_space(time timestamptz NOT NULL, device_id int NOT NULL, v1 float, v2 float, v3 text); SELECT create_hypertable('metrics_space','time','device_id',3); create_hypertable ---------------------------- (6,public,metrics_space,t) (1 row) -INSERT INTO metrics_space SELECT time, device_id, device_id + 0.25, device_id + 0.75 FROM generate_series('2000-01-01'::date, '2000-01-14'::date, '5m'::interval) g1(time), generate_series(1,10,1) g2(device_id); +INSERT INTO metrics_space SELECT time, device_id, device_id + 0.25, device_id + 0.75, device_id FROM generate_series('2000-01-01'::date, '2000-01-14'::date, '5m'::interval) g1(time), generate_series(1,10,1) g2(device_id); ANALYZE metrics_space; \ir :TEST_QUERY_NAME -- This file and its contents are licensed under the Apache License 2.0. @@ -1278,6 +1278,104 @@ reset enable_material; Rows Removed by Filter: 1155 (22 rows) +:PREFIX SELECT time FROM metrics_space WHERE time > '2000-01-10'::timestamptz AND device_id IN (VALUES(1)) ORDER BY time; + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------- + Sort (actual rows=1152 loops=1) + Sort Key: _hyper_6_25_chunk."time" + Sort Method: quicksort + -> Nested Loop (actual rows=1152 loops=1) + -> HashAggregate (actual rows=1 loops=1) + Group Key: 1 + -> Result (actual rows=1 loops=1) + -> Append (actual rows=1152 loops=1) + -> Index Only Scan using _hyper_6_25_chunk_metrics_space_device_id_time_idx on _hyper_6_25_chunk (actual rows=767 loops=1) + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + Heap Fetches: 767 + -> Index Only Scan using _hyper_6_26_chunk_metrics_space_device_id_time_idx on _hyper_6_26_chunk (actual rows=0 loops=1) + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + Heap Fetches: 0 + -> Index Only Scan using _hyper_6_27_chunk_metrics_space_device_id_time_idx on _hyper_6_27_chunk (actual rows=0 loops=1) + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + Heap Fetches: 0 + -> Index Only Scan using _hyper_6_28_chunk_metrics_space_device_id_time_idx on _hyper_6_28_chunk (actual rows=385 loops=1) + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + Heap Fetches: 385 + -> Index Only Scan using _hyper_6_29_chunk_metrics_space_device_id_time_idx on _hyper_6_29_chunk (actual rows=0 loops=1) + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + Heap Fetches: 0 + -> Index Only Scan using _hyper_6_30_chunk_metrics_space_device_id_time_idx on _hyper_6_30_chunk (actual rows=0 loops=1) + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + Heap Fetches: 0 +(26 rows) + +:PREFIX SELECT time FROM metrics_space WHERE time > '2000-01-10'::timestamptz AND v3 IN (VALUES('1')) ORDER BY time; + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------- + Sort (actual rows=1152 loops=1) + Sort Key: _hyper_6_25_chunk."time" + Sort Method: quicksort + -> Hash Semi Join (actual rows=1152 loops=1) + Hash Cond: (_hyper_6_25_chunk.v3 = ('1'::text)) + -> Append (actual rows=11520 loops=1) + -> Index Scan Backward using _hyper_6_25_chunk_metrics_space_time_idx on _hyper_6_25_chunk (actual rows=3068 loops=1) + Index Cond: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_6_26_chunk_metrics_space_time_idx on _hyper_6_26_chunk (actual rows=3068 loops=1) + Index Cond: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_6_27_chunk_metrics_space_time_idx on _hyper_6_27_chunk (actual rows=1534 loops=1) + Index Cond: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Seq Scan on _hyper_6_28_chunk (actual rows=1540 loops=1) + Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Seq Scan on _hyper_6_29_chunk (actual rows=1540 loops=1) + Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Seq Scan on _hyper_6_30_chunk (actual rows=770 loops=1) + Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Hash (actual rows=1 loops=1) + Buckets: 1024 Batches: 1 + -> Result (actual rows=1 loops=1) +(21 rows) + +:PREFIX SELECT * FROM metrics_space +WHERE time = (VALUES ('2019-12-24' at time zone 'UTC')) + AND v3 NOT IN (VALUES ('1')); + QUERY PLAN +------------------------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on metrics_space (actual rows=0 loops=1) + Chunks excluded during startup: 0 + Chunks excluded during runtime: 9 + InitPlan 1 (returns $0) + -> Result (actual rows=1 loops=1) + -> Index Scan using _hyper_6_22_chunk_metrics_space_time_idx on _hyper_6_22_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + SubPlan 2 + -> Result (never executed) + -> Index Scan using _hyper_6_23_chunk_metrics_space_time_idx on _hyper_6_23_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_24_chunk_metrics_space_time_idx on _hyper_6_24_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_25_chunk_metrics_space_time_idx on _hyper_6_25_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_26_chunk_metrics_space_time_idx on _hyper_6_26_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_27_chunk_metrics_space_time_idx on _hyper_6_27_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_28_chunk_metrics_space_time_idx on _hyper_6_28_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_29_chunk_metrics_space_time_idx on _hyper_6_29_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_30_chunk_metrics_space_time_idx on _hyper_6_30_chunk (never executed) + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) +(34 rows) + -- test CURRENT_DATE -- should be 0 chunks :PREFIX SELECT time FROM metrics_date WHERE time > CURRENT_DATE ORDER BY time; @@ -1530,31 +1628,31 @@ WHERE time > '2000-01-10'::timestamptz ORDER BY time DESC, device_id; QUERY PLAN ------------------------------------------------------------------------------------------- - Sort (actual rows=557 loops=1) + Sort (actual rows=522 loops=1) Sort Key: _hyper_6_30_chunk."time" DESC, _hyper_6_30_chunk.device_id Sort Method: quicksort - -> Append (actual rows=557 loops=1) - -> Sample Scan on _hyper_6_30_chunk (actual rows=40 loops=1) + -> Append (actual rows=522 loops=1) + -> Sample Scan on _hyper_6_30_chunk (actual rows=35 loops=1) Sampling: bernoulli ('5'::real) REPEATABLE ('0'::double precision) Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) - -> Sample Scan on _hyper_6_29_chunk (actual rows=68 loops=1) + -> Sample Scan on _hyper_6_29_chunk (actual rows=61 loops=1) Sampling: bernoulli ('5'::real) REPEATABLE ('0'::double precision) Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) - -> Sample Scan on _hyper_6_28_chunk (actual rows=68 loops=1) + -> Sample Scan on _hyper_6_28_chunk (actual rows=61 loops=1) Sampling: bernoulli ('5'::real) REPEATABLE ('0'::double precision) Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Sample Scan on _hyper_6_27_chunk (actual rows=65 loops=1) Sampling: bernoulli ('5'::real) REPEATABLE ('0'::double precision) Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 113 - -> Sample Scan on _hyper_6_26_chunk (actual rows=158 loops=1) + -> Sample Scan on _hyper_6_26_chunk (actual rows=150 loops=1) Sampling: bernoulli ('5'::real) REPEATABLE ('0'::double precision) Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) - Rows Removed by Filter: 220 - -> Sample Scan on _hyper_6_25_chunk (actual rows=158 loops=1) + Rows Removed by Filter: 218 + -> Sample Scan on _hyper_6_25_chunk (actual rows=150 loops=1) Sampling: bernoulli ('5'::real) REPEATABLE ('0'::double precision) Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) - Rows Removed by Filter: 220 + Rows Removed by Filter: 218 (25 rows) -- test runtime exclusion @@ -2029,7 +2127,7 @@ RESET enable_hashagg; -- to trigger this we need a Sort node that is below ChunkAppend CREATE TABLE join_limit (time timestamptz, device_id int); SELECT table_name FROM create_hypertable('join_limit','time',create_default_indexes:=false); -psql:include/append_query.sql:309: NOTICE: adding not-null constraint to column "time" +psql:include/append_query.sql:315: NOTICE: adding not-null constraint to column "time" table_name ------------ join_limit diff --git a/test/expected/append-9.6.out b/test/expected/append-9.6.out index 1d9acb76c..ee71a4b91 100644 --- a/test/expected/append-9.6.out +++ b/test/expected/append-9.6.out @@ -106,14 +106,14 @@ INSERT INTO metrics_timestamptz SELECT generate_series('2000-01-01'::date, '2000 INSERT INTO metrics_timestamptz SELECT generate_series('2000-01-01'::date, '2000-02-01'::date, '5m'::interval), 3; ANALYZE metrics_timestamptz; -- create space partitioned hypertable -CREATE TABLE metrics_space(time timestamptz NOT NULL, device_id int NOT NULL, v1 float, v2 float); +CREATE TABLE metrics_space(time timestamptz NOT NULL, device_id int NOT NULL, v1 float, v2 float, v3 text); SELECT create_hypertable('metrics_space','time','device_id',3); create_hypertable ---------------------------- (6,public,metrics_space,t) (1 row) -INSERT INTO metrics_space SELECT time, device_id, device_id + 0.25, device_id + 0.75 FROM generate_series('2000-01-01'::date, '2000-01-14'::date, '5m'::interval) g1(time), generate_series(1,10,1) g2(device_id); +INSERT INTO metrics_space SELECT time, device_id, device_id + 0.25, device_id + 0.75, device_id FROM generate_series('2000-01-01'::date, '2000-01-14'::date, '5m'::interval) g1(time), generate_series(1,10,1) g2(device_id); ANALYZE metrics_space; \ir :TEST_QUERY_NAME -- This file and its contents are licensed under the Apache License 2.0. @@ -1101,6 +1101,94 @@ reset enable_material; Index Cond: ((device_id = ANY ('{1,2}'::integer[])) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) (11 rows) +:PREFIX SELECT time FROM metrics_space WHERE time > '2000-01-10'::timestamptz AND device_id IN (VALUES(1)) ORDER BY time; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Sort + Sort Key: _hyper_6_25_chunk."time" + -> Nested Loop + -> HashAggregate + Group Key: 1 + -> Result + -> Append + -> Index Only Scan using _hyper_6_25_chunk_metrics_space_device_id_time_idx on _hyper_6_25_chunk + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + -> Index Only Scan using _hyper_6_26_chunk_metrics_space_device_id_time_idx on _hyper_6_26_chunk + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + -> Index Only Scan using _hyper_6_27_chunk_metrics_space_device_id_time_idx on _hyper_6_27_chunk + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + -> Index Only Scan using _hyper_6_28_chunk_metrics_space_device_id_time_idx on _hyper_6_28_chunk + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + -> Index Only Scan using _hyper_6_29_chunk_metrics_space_device_id_time_idx on _hyper_6_29_chunk + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) + -> Index Only Scan using _hyper_6_30_chunk_metrics_space_device_id_time_idx on _hyper_6_30_chunk + Index Cond: ((device_id = (1)) AND ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone)) +(19 rows) + +:PREFIX SELECT time FROM metrics_space WHERE time > '2000-01-10'::timestamptz AND v3 IN (VALUES('1')) ORDER BY time; + QUERY PLAN +----------------------------------------------------------------------------------------------------------- + Sort + Sort Key: _hyper_6_25_chunk."time" + -> Hash Semi Join + Hash Cond: (_hyper_6_25_chunk.v3 = ('1'::text)) + -> Append + -> Index Scan Backward using _hyper_6_25_chunk_metrics_space_time_idx on _hyper_6_25_chunk + Index Cond: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_6_26_chunk_metrics_space_time_idx on _hyper_6_26_chunk + Index Cond: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_6_27_chunk_metrics_space_time_idx on _hyper_6_27_chunk + Index Cond: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Seq Scan on _hyper_6_28_chunk + Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Seq Scan on _hyper_6_29_chunk + Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Seq Scan on _hyper_6_30_chunk + Filter: ("time" > 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) + -> Hash + -> Result +(19 rows) + +:PREFIX SELECT * FROM metrics_space +WHERE time = (VALUES ('2019-12-24' at time zone 'UTC')) + AND v3 NOT IN (VALUES ('1')); + QUERY PLAN +-------------------------------------------------------------------------------------- + Custom Scan (ChunkAppend) on metrics_space + Chunks excluded during startup: 0 + InitPlan 1 (returns $0) + -> Result + -> Index Scan using _hyper_6_22_chunk_metrics_space_time_idx on _hyper_6_22_chunk + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + SubPlan 2 + -> Result + -> Index Scan using _hyper_6_23_chunk_metrics_space_time_idx on _hyper_6_23_chunk + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_24_chunk_metrics_space_time_idx on _hyper_6_24_chunk + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_25_chunk_metrics_space_time_idx on _hyper_6_25_chunk + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_26_chunk_metrics_space_time_idx on _hyper_6_26_chunk + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_27_chunk_metrics_space_time_idx on _hyper_6_27_chunk + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_28_chunk_metrics_space_time_idx on _hyper_6_28_chunk + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_29_chunk_metrics_space_time_idx on _hyper_6_29_chunk + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) + -> Index Scan using _hyper_6_30_chunk_metrics_space_time_idx on _hyper_6_30_chunk + Index Cond: ("time" = $0) + Filter: (NOT (hashed SubPlan 2)) +(33 rows) + -- test CURRENT_DATE -- should be 0 chunks :PREFIX SELECT time FROM metrics_date WHERE time > CURRENT_DATE ORDER BY time; @@ -1725,7 +1813,7 @@ RESET enable_hashagg; -- to trigger this we need a Sort node that is below ChunkAppend CREATE TABLE join_limit (time timestamptz, device_id int); SELECT table_name FROM create_hypertable('join_limit','time',create_default_indexes:=false); -psql:include/append_query.sql:309: NOTICE: adding not-null constraint to column "time" +psql:include/append_query.sql:315: NOTICE: adding not-null constraint to column "time" table_name ------------ join_limit diff --git a/test/sql/include/append_load.sql b/test/sql/include/append_load.sql index d893d9916..124ca034b 100644 --- a/test/sql/include/append_load.sql +++ b/test/sql/include/append_load.sql @@ -71,8 +71,8 @@ INSERT INTO metrics_timestamptz SELECT generate_series('2000-01-01'::date, '2000 ANALYZE metrics_timestamptz; -- create space partitioned hypertable -CREATE TABLE metrics_space(time timestamptz NOT NULL, device_id int NOT NULL, v1 float, v2 float); +CREATE TABLE metrics_space(time timestamptz NOT NULL, device_id int NOT NULL, v1 float, v2 float, v3 text); SELECT create_hypertable('metrics_space','time','device_id',3); -INSERT INTO metrics_space SELECT time, device_id, device_id + 0.25, device_id + 0.75 FROM generate_series('2000-01-01'::date, '2000-01-14'::date, '5m'::interval) g1(time), generate_series(1,10,1) g2(device_id); +INSERT INTO metrics_space SELECT time, device_id, device_id + 0.25, device_id + 0.75, device_id FROM generate_series('2000-01-01'::date, '2000-01-14'::date, '5m'::interval) g1(time), generate_series(1,10,1) g2(device_id); ANALYZE metrics_space; diff --git a/test/sql/include/append_query.sql b/test/sql/include/append_query.sql index 7e872d144..59c642feb 100644 --- a/test/sql/include/append_query.sql +++ b/test/sql/include/append_query.sql @@ -208,6 +208,12 @@ reset enable_material; -- test filtering on space partition :PREFIX SELECT time FROM metrics_space WHERE time > '2000-01-10'::timestamptz AND device_id = 1 ORDER BY time; :PREFIX SELECT time FROM metrics_space WHERE time > '2000-01-10'::timestamptz AND device_id IN (1,2) ORDER BY time; +:PREFIX SELECT time FROM metrics_space WHERE time > '2000-01-10'::timestamptz AND device_id IN (VALUES(1)) ORDER BY time; +:PREFIX SELECT time FROM metrics_space WHERE time > '2000-01-10'::timestamptz AND v3 IN (VALUES('1')) ORDER BY time; + +:PREFIX SELECT * FROM metrics_space +WHERE time = (VALUES ('2019-12-24' at time zone 'UTC')) + AND v3 NOT IN (VALUES ('1')); -- test CURRENT_DATE -- should be 0 chunks