mirror of
https://github.com/timescale/timescaledb.git
synced 2025-05-15 10:11:29 +08:00
Add GUC option to enable or disable segmentwise recompression. If disabled, then a full recompression is done instead when recompression is attempted through `compress_chunk`. If `recompress_chunk_segmentwise` is used when GUC is disabled, then an error is thrown. Closes #7381.
629 lines
38 KiB
Plaintext
629 lines
38 KiB
Plaintext
-- This file and its contents are licensed under the Timescale License.
|
|
-- Please see the included NOTICE for copyright information and
|
|
-- LICENSE-TIMESCALE for a copy of the license.
|
|
\c :TEST_DBNAME :ROLE_SUPERUSER
|
|
CREATE OR REPLACE VIEW compressed_chunk_info_view AS
|
|
SELECT
|
|
h.schema_name AS hypertable_schema,
|
|
h.table_name AS hypertable_name,
|
|
c.schema_name as chunk_schema,
|
|
c.table_name as chunk_name,
|
|
c.status as chunk_status,
|
|
comp.schema_name as compressed_chunk_schema,
|
|
comp.table_name as compressed_chunk_name,
|
|
c.id as chunk_id
|
|
FROM
|
|
_timescaledb_catalog.hypertable h JOIN
|
|
_timescaledb_catalog.chunk c ON h.id = c.hypertable_id
|
|
LEFT JOIN _timescaledb_catalog.chunk comp
|
|
ON comp.id = c.compressed_chunk_id
|
|
;
|
|
CREATE OR REPLACE VIEW compression_rowcnt_view AS
|
|
select ccs.numrows_pre_compression, ccs.numrows_post_compression,
|
|
(v.chunk_schema || '.' || v.chunk_name) as chunk_name,
|
|
v.chunk_id as chunk_id
|
|
from _timescaledb_catalog.compression_chunk_size ccs
|
|
join compressed_chunk_info_view v on ccs.chunk_id = v.chunk_id;
|
|
------------- only one segment exists and only one segment affected ---------
|
|
create table mytab_oneseg (time timestamptz not null, a int, b int, c int);
|
|
SELECT create_hypertable('mytab_oneseg', 'time', chunk_time_interval => interval '1 day');
|
|
create_hypertable
|
|
---------------------------
|
|
(1,public,mytab_oneseg,t)
|
|
(1 row)
|
|
|
|
insert into mytab_oneseg values
|
|
('2023-01-01 21:56:20.048355+02'::timestamptz, 2, NULL, 2),
|
|
('2023-01-01 21:56:10.048355+02'::timestamptz, 2, NULL, 2); --same chunk same segment
|
|
alter table mytab_oneseg set (timescaledb.compress, timescaledb.compress_segmentby = 'a, c');
|
|
NOTICE: default order by for hypertable "mytab_oneseg" is set to ""time" DESC"
|
|
select show_chunks as chunk_to_compress_1 from show_chunks('mytab_oneseg') limit 1 \gset
|
|
select compress_chunk(:'chunk_to_compress_1');
|
|
compress_chunk
|
|
----------------------------------------
|
|
_timescaledb_internal._hyper_1_1_chunk
|
|
(1 row)
|
|
|
|
SELECT compressed_chunk_schema || '.' || compressed_chunk_name as compressed_chunk_name_1
|
|
from compressed_chunk_info_view where hypertable_name = 'mytab_oneseg' \gset
|
|
SELECT ctid, * FROM :compressed_chunk_name_1;
|
|
ctid | _ts_meta_count | a | c | _ts_meta_min_1 | _ts_meta_max_1 | time | b
|
|
-------+----------------+---+---+-------------------------------------+-------------------------------------+----------------------------------------------------------------------+---
|
|
(0,1) | 2 | 2 | 2 | Sun Jan 01 11:56:10.048355 2023 PST | Sun Jan 01 11:56:20.048355 2023 PST | BAAAApQ3/qlnY///////Z2mAAAAAAgAAAAIAAAAAAAAA7gAFKG/+g/vGAAUob/+1KMU= |
|
|
(1 row)
|
|
|
|
-- after compressing the chunk
|
|
select numrows_pre_compression, numrows_post_compression from _timescaledb_catalog.compression_chunk_size;
|
|
numrows_pre_compression | numrows_post_compression
|
|
-------------------------+--------------------------
|
|
2 | 1
|
|
(1 row)
|
|
|
|
insert into mytab_oneseg values ('2023-01-01 19:56:20.048355+02'::timestamptz, 2, NULL, 2);
|
|
-- after inserting new row in compressed chunk
|
|
select numrows_pre_compression, numrows_post_compression from _timescaledb_catalog.compression_chunk_size;
|
|
numrows_pre_compression | numrows_post_compression
|
|
-------------------------+--------------------------
|
|
2 | 1
|
|
(1 row)
|
|
|
|
select _timescaledb_functions.recompress_chunk_segmentwise(:'chunk_to_compress_1');
|
|
recompress_chunk_segmentwise
|
|
----------------------------------------
|
|
_timescaledb_internal._hyper_1_1_chunk
|
|
(1 row)
|
|
|
|
-- check the ctid of the rows in the recompressed chunk to verify that we've written new data
|
|
SELECT ctid, * FROM :compressed_chunk_name_1;
|
|
ctid | _ts_meta_count | a | c | _ts_meta_min_1 | _ts_meta_max_1 | time | b
|
|
-------+----------------+---+---+-------------------------------------+-------------------------------------+----------------------------------------------------------------------------------+---
|
|
(0,2) | 3 | 2 | 2 | Sun Jan 01 09:56:20.048355 2023 PST | Sun Jan 01 11:56:20.048355 2023 PST | BAAAApQ2Uhq14/////5TcU6AAAAAAwAAAAMAAAAAAAAO7gAFKG/+g/vGAAUob/+1KMUAAAADV+w1/w== |
|
|
(1 row)
|
|
|
|
-- after recompressing chunk
|
|
select numrows_pre_compression, numrows_post_compression from _timescaledb_catalog.compression_chunk_size;
|
|
numrows_pre_compression | numrows_post_compression
|
|
-------------------------+--------------------------
|
|
2 | 1
|
|
(1 row)
|
|
|
|
insert into mytab_oneseg values ('2023-01-01 19:56:20.048355+02'::timestamptz, 2, NULL, 2);
|
|
select chunk_id
|
|
from compressed_chunk_info_view where hypertable_name = 'mytab_oneseg' \gset
|
|
-- check we are handling unexpected chunk status (partially compressed but not compressed)
|
|
update _timescaledb_catalog.chunk set status = 8 where id = :chunk_id;
|
|
\set ON_ERROR_STOP 0
|
|
select _timescaledb_functions.recompress_chunk_segmentwise(:'chunk_to_compress_1');
|
|
ERROR: unexpected chunk status 8 in chunk _timescaledb_internal._hyper_1_1_chunk
|
|
\set ON_ERROR_STOP 1
|
|
---------------- test1: one affected segment, one unaffected --------------
|
|
-- unaffected segment will still be recompressed in a future PR we want to avoid doing this
|
|
create table mytab_twoseg (time timestamptz not null, a int, b int, c int);
|
|
SELECT create_hypertable('mytab_twoseg', 'time', chunk_time_interval => interval '1 day');
|
|
create_hypertable
|
|
---------------------------
|
|
(3,public,mytab_twoseg,t)
|
|
(1 row)
|
|
|
|
insert into mytab_twoseg values
|
|
('2023-01-01 21:56:20.048355+02'::timestamptz, 2, NULL, 2),
|
|
('2023-01-01 21:56:20.048355+02'::timestamptz, 3, NULL, 3), --same chunk diff segment
|
|
('2023-01-01 21:57:20.048355+02'::timestamptz, 3, NULL, 3);
|
|
alter table mytab_twoseg set (timescaledb.compress, timescaledb.compress_segmentby = 'a, c');
|
|
NOTICE: default order by for hypertable "mytab_twoseg" is set to ""time" DESC"
|
|
select show_chunks as chunk_to_compress_2 from show_chunks('mytab_twoseg') limit 1 \gset
|
|
select compress_chunk(:'chunk_to_compress_2');
|
|
compress_chunk
|
|
----------------------------------------
|
|
_timescaledb_internal._hyper_3_3_chunk
|
|
(1 row)
|
|
|
|
-- stats are no longer updated during segmentwise recompression
|
|
select * from compression_rowcnt_view where chunk_name = :'chunk_to_compress_2';
|
|
numrows_pre_compression | numrows_post_compression | chunk_name | chunk_id
|
|
-------------------------+--------------------------+----------------------------------------+----------
|
|
3 | 2 | _timescaledb_internal._hyper_3_3_chunk | 3
|
|
(1 row)
|
|
|
|
insert into mytab_twoseg values ('2023-01-01 19:56:20.048355+02'::timestamptz, 2, NULL, 2);
|
|
select * from :chunk_to_compress_2 ORDER BY a, c, time DESC;
|
|
time | a | b | c
|
|
-------------------------------------+---+---+---
|
|
Sun Jan 01 11:56:20.048355 2023 PST | 2 | | 2
|
|
Sun Jan 01 11:57:20.048355 2023 PST | 3 | | 3
|
|
Sun Jan 01 11:56:20.048355 2023 PST | 3 | | 3
|
|
Sun Jan 01 09:56:20.048355 2023 PST | 2 | | 2
|
|
(4 rows)
|
|
|
|
SELECT compressed_chunk_schema || '.' || compressed_chunk_name as compressed_chunk_name_2
|
|
from compressed_chunk_info_view where hypertable_name = 'mytab_twoseg' \gset
|
|
select ctid, * from :compressed_chunk_name_2;
|
|
ctid | _ts_meta_count | a | c | _ts_meta_min_1 | _ts_meta_max_1 | time | b
|
|
-------+----------------+---+---+-------------------------------------+-------------------------------------+----------------------------------------------------------------------+---
|
|
(0,1) | 1 | 2 | 2 | Sun Jan 01 11:56:20.048355 2023 PST | Sun Jan 01 11:56:20.048355 2023 PST | BAAAApQ3/0H94wAClDf/Qf3jAAAAAQAAAAEAAAAAAAAADgAFKG/+g/vG |
|
|
(0,2) | 2 | 3 | 3 | Sun Jan 01 11:56:20.048355 2023 PST | Sun Jan 01 11:57:20.048355 2023 PST | BAAAApQ3/0H94//////8bHkAAAAAAgAAAAIAAAAAAAAA7gAFKHAFqwnGAAUocAzSF8U= |
|
|
(2 rows)
|
|
|
|
select _timescaledb_functions.recompress_chunk_segmentwise(:'chunk_to_compress_2');
|
|
recompress_chunk_segmentwise
|
|
----------------------------------------
|
|
_timescaledb_internal._hyper_3_3_chunk
|
|
(1 row)
|
|
|
|
-- verify that metadata count looks good
|
|
select ctid, * from :compressed_chunk_name_2;
|
|
ctid | _ts_meta_count | a | c | _ts_meta_min_1 | _ts_meta_max_1 | time | b
|
|
-------+----------------+---+---+-------------------------------------+-------------------------------------+----------------------------------------------------------------------+---
|
|
(0,2) | 2 | 3 | 3 | Sun Jan 01 11:56:20.048355 2023 PST | Sun Jan 01 11:57:20.048355 2023 PST | BAAAApQ3/0H94//////8bHkAAAAAAgAAAAIAAAAAAAAA7gAFKHAFqwnGAAUocAzSF8U= |
|
|
(0,3) | 2 | 2 | 2 | Sun Jan 01 09:56:20.048355 2023 PST | Sun Jan 01 11:56:20.048355 2023 PST | BAAAApQ2Uhq14/////5S2LgAAAAAAgAAAAIAAAAAAAAA7gAFKG/+g/vGAAUoc1jSi8U= |
|
|
(2 rows)
|
|
|
|
-- verify that initial data is returned as expected
|
|
select * from :chunk_to_compress_2 ORDER BY a, c, time DESC;
|
|
time | a | b | c
|
|
-------------------------------------+---+---+---
|
|
Sun Jan 01 11:56:20.048355 2023 PST | 2 | | 2
|
|
Sun Jan 01 09:56:20.048355 2023 PST | 2 | | 2
|
|
Sun Jan 01 11:57:20.048355 2023 PST | 3 | | 3
|
|
Sun Jan 01 11:56:20.048355 2023 PST | 3 | | 3
|
|
(4 rows)
|
|
|
|
-- stats are no longer updated during segmentwise recompression
|
|
select * from compression_rowcnt_view where chunk_name = :'chunk_to_compress_2';
|
|
numrows_pre_compression | numrows_post_compression | chunk_name | chunk_id
|
|
-------------------------+--------------------------+----------------------------------------+----------
|
|
3 | 2 | _timescaledb_internal._hyper_3_3_chunk | 3
|
|
(1 row)
|
|
|
|
----------------- more than one batch per segment ----------------------
|
|
-- test that metadata sequence number is correct
|
|
create table mytab2(time timestamptz not null, a int, b int, c int);
|
|
select create_hypertable('mytab2', 'time', chunk_time_interval => interval '1 week');
|
|
create_hypertable
|
|
---------------------
|
|
(5,public,mytab2,t)
|
|
(1 row)
|
|
|
|
insert into mytab2 (time, a, c) select t,s,s from
|
|
generate_series('2023-01-01 00:00:00+00'::timestamptz, '2023-01-01 00:00:00+00'::timestamptz + interval '1 day', interval '30 sec') t cross join generate_series(0,2, 1) s;
|
|
alter table mytab2 set (timescaledb.compress, timescaledb.compress_segmentby = 'a, c');
|
|
NOTICE: default order by for hypertable "mytab2" is set to ""time" DESC"
|
|
select compress_chunk(c) from show_chunks('mytab2') c;
|
|
compress_chunk
|
|
----------------------------------------
|
|
_timescaledb_internal._hyper_5_5_chunk
|
|
(1 row)
|
|
|
|
SELECT compressed_chunk_schema || '.' || compressed_chunk_name as compressed_chunk_name_2
|
|
from compressed_chunk_info_view where hypertable_name = 'mytab2'
|
|
and compressed_chunk_name is not null limit 1 \gset
|
|
insert into mytab2 values ('2023-01-01 00:00:02+00'::timestamptz, 0, NULL, 0); -- goes into the uncompressed chunk
|
|
select show_chunks('mytab2') as chunk_to_compress_2 \gset
|
|
select ctid, * from :compressed_chunk_name_2;
|
|
ctid | _ts_meta_count | a | c | _ts_meta_min_1 | _ts_meta_max_1 | time | b
|
|
-------+----------------+---+---+------------------------------+------------------------------+----------------------------------------------------------------------------------+---
|
|
(0,1) | 1000 | 0 | 0 | Sun Jan 01 07:40:30 2023 PST | Sun Jan 01 16:00:00 2023 PST | BAAAApQ0bFLXgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKHbNWYAAAAUodtDtBv8AAD5gAAAAAA== |
|
|
(0,2) | 1000 | 0 | 0 | Sat Dec 31 23:20:30 2022 PST | Sun Jan 01 07:40:00 2023 PST | BAAAApQtcC8rgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKGjVEigAAAUoaNilrv8AAD5gAAAAAA== |
|
|
(0,3) | 881 | 0 | 0 | Sat Dec 31 16:00:00 2022 PST | Sat Dec 31 23:20:00 2022 PST | BAAAApQnSNVgAP/////+NjyAAAADcQAAAAMAAAAAAAAP7gAFKFrcytAAAAUoWuBeVv8AADbwAAAAAA== |
|
|
(0,4) | 1000 | 1 | 1 | Sun Jan 01 07:40:30 2023 PST | Sun Jan 01 16:00:00 2023 PST | BAAAApQ0bFLXgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKHbNWYAAAAUodtDtBv8AAD5gAAAAAA== |
|
|
(0,5) | 1000 | 1 | 1 | Sat Dec 31 23:20:30 2022 PST | Sun Jan 01 07:40:00 2023 PST | BAAAApQtcC8rgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKGjVEigAAAUoaNilrv8AAD5gAAAAAA== |
|
|
(0,6) | 881 | 1 | 1 | Sat Dec 31 16:00:00 2022 PST | Sat Dec 31 23:20:00 2022 PST | BAAAApQnSNVgAP/////+NjyAAAADcQAAAAMAAAAAAAAP7gAFKFrcytAAAAUoWuBeVv8AADbwAAAAAA== |
|
|
(0,7) | 1000 | 2 | 2 | Sun Jan 01 07:40:30 2023 PST | Sun Jan 01 16:00:00 2023 PST | BAAAApQ0bFLXgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKHbNWYAAAAUodtDtBv8AAD5gAAAAAA== |
|
|
(0,8) | 1000 | 2 | 2 | Sat Dec 31 23:20:30 2022 PST | Sun Jan 01 07:40:00 2023 PST | BAAAApQtcC8rgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKGjVEigAAAUoaNilrv8AAD5gAAAAAA== |
|
|
(0,9) | 881 | 2 | 2 | Sat Dec 31 16:00:00 2022 PST | Sat Dec 31 23:20:00 2022 PST | BAAAApQnSNVgAP/////+NjyAAAADcQAAAAMAAAAAAAAP7gAFKFrcytAAAAUoWuBeVv8AADbwAAAAAA== |
|
|
(9 rows)
|
|
|
|
-- after compression
|
|
-- stats are no longer updated during segmentwise recompression
|
|
select * from compression_rowcnt_view where chunk_name = :'chunk_to_compress_2';
|
|
numrows_pre_compression | numrows_post_compression | chunk_name | chunk_id
|
|
-------------------------+--------------------------+----------------------------------------+----------
|
|
8643 | 9 | _timescaledb_internal._hyper_5_5_chunk | 5
|
|
(1 row)
|
|
|
|
select _timescaledb_functions.recompress_chunk_segmentwise(:'chunk_to_compress_2');
|
|
recompress_chunk_segmentwise
|
|
----------------------------------------
|
|
_timescaledb_internal._hyper_5_5_chunk
|
|
(1 row)
|
|
|
|
select ctid, * from :compressed_chunk_name_2;
|
|
ctid | _ts_meta_count | a | c | _ts_meta_min_1 | _ts_meta_max_1 | time | b
|
|
--------+----------------+---+---+------------------------------+------------------------------+------------------------------------------------------------------------------------------+---
|
|
(0,4) | 1000 | 1 | 1 | Sun Jan 01 07:40:30 2023 PST | Sun Jan 01 16:00:00 2023 PST | BAAAApQ0bFLXgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKHbNWYAAAAUodtDtBv8AAD5gAAAAAA== |
|
|
(0,5) | 1000 | 1 | 1 | Sat Dec 31 23:20:30 2022 PST | Sun Jan 01 07:40:00 2023 PST | BAAAApQtcC8rgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKGjVEigAAAUoaNilrv8AAD5gAAAAAA== |
|
|
(0,6) | 881 | 1 | 1 | Sat Dec 31 16:00:00 2022 PST | Sat Dec 31 23:20:00 2022 PST | BAAAApQnSNVgAP/////+NjyAAAADcQAAAAMAAAAAAAAP7gAFKFrcytAAAAUoWuBeVv8AADbwAAAAAA== |
|
|
(0,7) | 1000 | 2 | 2 | Sun Jan 01 07:40:30 2023 PST | Sun Jan 01 16:00:00 2023 PST | BAAAApQ0bFLXgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKHbNWYAAAAUodtDtBv8AAD5gAAAAAA== |
|
|
(0,8) | 1000 | 2 | 2 | Sat Dec 31 23:20:30 2022 PST | Sun Jan 01 07:40:00 2023 PST | BAAAApQtcC8rgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKGjVEigAAAUoaNilrv8AAD5gAAAAAA== |
|
|
(0,9) | 881 | 2 | 2 | Sat Dec 31 16:00:00 2022 PST | Sat Dec 31 23:20:00 2022 PST | BAAAApQnSNVgAP/////+NjyAAAADcQAAAAMAAAAAAAAP7gAFKFrcytAAAAUoWuBeVv8AADbwAAAAAA== |
|
|
(0,10) | 1000 | 0 | 0 | Sun Jan 01 07:40:30 2023 PST | Sun Jan 01 16:00:00 2023 PST | BAAAApQ0bFLXgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKHbNWYAAAAUodtDtBv8AAD5gAAAAAA== |
|
|
(0,11) | 1000 | 0 | 0 | Sat Dec 31 23:20:30 2022 PST | Sun Jan 01 07:40:00 2023 PST | BAAAApQtcC8rgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKGjVEigAAAUoaNilrv8AAD5gAAAAAA== |
|
|
(0,12) | 882 | 0 | 0 | Sat Dec 31 16:00:00 2022 PST | Sat Dec 31 23:20:00 2022 PST | BAAAApQnSNVgAP//////4XuAAAADcgAAAAQAAAAAAADf7gAFKFrcytAAAAUoWuBeVv8AADbgAAAAAAMZdQAAPQkA |
|
|
(9 rows)
|
|
|
|
-- stats are no longer updated during segmentwise recompression
|
|
select * from compression_rowcnt_view where chunk_name = :'chunk_to_compress_2';
|
|
numrows_pre_compression | numrows_post_compression | chunk_name | chunk_id
|
|
-------------------------+--------------------------+----------------------------------------+----------
|
|
8643 | 9 | _timescaledb_internal._hyper_5_5_chunk | 5
|
|
(1 row)
|
|
|
|
-- failing test from compression_ddl
|
|
CREATE TABLE test_defaults(time timestamptz NOT NULL, device_id int);
|
|
SELECT create_hypertable('test_defaults','time');
|
|
create_hypertable
|
|
----------------------------
|
|
(7,public,test_defaults,t)
|
|
(1 row)
|
|
|
|
ALTER TABLE test_defaults SET (timescaledb.compress,timescaledb.compress_segmentby='device_id');
|
|
NOTICE: default order by for hypertable "test_defaults" is set to ""time" DESC"
|
|
-- create 2 chunks
|
|
INSERT INTO test_defaults SELECT '2000-01-01', 1;
|
|
INSERT INTO test_defaults SELECT '2001-01-01', 1;
|
|
SELECT compress_chunk(show_chunks) AS "compressed_chunk" FROM show_chunks('test_defaults') ORDER BY show_chunks::text LIMIT 1 \gset
|
|
-- stats are no longer updated during segmentwise recompression
|
|
select * from compression_rowcnt_view where chunk_name = :'compressed_chunk';
|
|
numrows_pre_compression | numrows_post_compression | chunk_name | chunk_id
|
|
-------------------------+--------------------------+----------------------------------------+----------
|
|
1 | 1 | _timescaledb_internal._hyper_7_7_chunk | 7
|
|
(1 row)
|
|
|
|
SELECT * FROM test_defaults ORDER BY 1;
|
|
time | device_id
|
|
------------------------------+-----------
|
|
Sat Jan 01 00:00:00 2000 PST | 1
|
|
Mon Jan 01 00:00:00 2001 PST | 1
|
|
(2 rows)
|
|
|
|
ALTER TABLE test_defaults ADD COLUMN c1 int;
|
|
ALTER TABLE test_defaults ADD COLUMN c2 int NOT NULL DEFAULT 42;
|
|
SELECT * FROM test_defaults ORDER BY 1,2;
|
|
time | device_id | c1 | c2
|
|
------------------------------+-----------+----+----
|
|
Sat Jan 01 00:00:00 2000 PST | 1 | | 42
|
|
Mon Jan 01 00:00:00 2001 PST | 1 | | 42
|
|
(2 rows)
|
|
|
|
INSERT INTO test_defaults SELECT '2000-01-01', 2;
|
|
SELECT * FROM test_defaults ORDER BY 1,2;
|
|
time | device_id | c1 | c2
|
|
------------------------------+-----------+----+----
|
|
Sat Jan 01 00:00:00 2000 PST | 1 | | 42
|
|
Sat Jan 01 00:00:00 2000 PST | 2 | | 42
|
|
Mon Jan 01 00:00:00 2001 PST | 1 | | 42
|
|
(3 rows)
|
|
|
|
SELECT compress_chunk(:'compressed_chunk');
|
|
compress_chunk
|
|
----------------------------------------
|
|
_timescaledb_internal._hyper_7_7_chunk
|
|
(1 row)
|
|
|
|
SELECT * FROM test_defaults ORDER BY 1,2;
|
|
time | device_id | c1 | c2
|
|
------------------------------+-----------+----+----
|
|
Sat Jan 01 00:00:00 2000 PST | 1 | | 42
|
|
Sat Jan 01 00:00:00 2000 PST | 2 | | 42
|
|
Mon Jan 01 00:00:00 2001 PST | 1 | | 42
|
|
(3 rows)
|
|
|
|
-- stats are no longer updated during segmentwise recompression
|
|
select * from compression_rowcnt_view where chunk_name = :'compressed_chunk';
|
|
numrows_pre_compression | numrows_post_compression | chunk_name | chunk_id
|
|
-------------------------+--------------------------+----------------------------------------+----------
|
|
1 | 1 | _timescaledb_internal._hyper_7_7_chunk | 7
|
|
(1 row)
|
|
|
|
-- test prepared statements
|
|
-- PREPRE A SELECT before recompress and perform it after recompress
|
|
CREATE TABLE mytab_prep (time timestamptz, a int, b int, c int);
|
|
SELECT create_hypertable('mytab_prep', 'time');
|
|
NOTICE: adding not-null constraint to column "time"
|
|
create_hypertable
|
|
-------------------------
|
|
(9,public,mytab_prep,t)
|
|
(1 row)
|
|
|
|
INSERT INTO mytab_prep VALUES ('2023-01-01'::timestamptz, 2, NULL, 2),
|
|
('2023-01-01'::timestamptz, 2, NULL, 2);
|
|
alter table mytab_prep set (timescaledb.compress, timescaledb.compress_segmentby = 'a, c');
|
|
NOTICE: default order by for hypertable "mytab_prep" is set to ""time" DESC"
|
|
PREPARE p1 AS
|
|
SELECT * FROM mytab_prep ORDER BY a, c, time DESC;
|
|
select show_chunks as chunk_to_compress_prep from show_chunks('mytab_prep') limit 1 \gset
|
|
SELECT compress_chunk(:'chunk_to_compress_prep'); -- the output of the prepared plan would change before and after compress
|
|
compress_chunk
|
|
-----------------------------------------
|
|
_timescaledb_internal._hyper_9_10_chunk
|
|
(1 row)
|
|
|
|
INSERT INTO mytab_prep VALUES ('2023-01-01'::timestamptz, 2, 3, 2);
|
|
-- plan should be invalidated to return results from the uncompressed chunk also
|
|
EXPLAIN (COSTS OFF) EXECUTE p1;
|
|
QUERY PLAN
|
|
----------------------------------------------------------------------------------------------------------------------------
|
|
Merge Append
|
|
Sort Key: _hyper_9_10_chunk.a, _hyper_9_10_chunk.c, _hyper_9_10_chunk."time" DESC
|
|
-> Custom Scan (DecompressChunk) on _hyper_9_10_chunk
|
|
-> Index Scan using compress_hyper_10_11_chunk_a_c__ts_meta_min_1__ts_meta_max__idx on compress_hyper_10_11_chunk
|
|
-> Sort
|
|
Sort Key: _hyper_9_10_chunk.a, _hyper_9_10_chunk.c, _hyper_9_10_chunk."time" DESC
|
|
-> Seq Scan on _hyper_9_10_chunk
|
|
(7 rows)
|
|
|
|
EXECUTE p1;
|
|
time | a | b | c
|
|
------------------------------+---+---+---
|
|
Sun Jan 01 00:00:00 2023 PST | 2 | | 2
|
|
Sun Jan 01 00:00:00 2023 PST | 2 | | 2
|
|
Sun Jan 01 00:00:00 2023 PST | 2 | 3 | 2
|
|
(3 rows)
|
|
|
|
-- check plan again after recompression
|
|
SELECT compress_chunk(:'chunk_to_compress_prep');
|
|
compress_chunk
|
|
-----------------------------------------
|
|
_timescaledb_internal._hyper_9_10_chunk
|
|
(1 row)
|
|
|
|
EXPLAIN (COSTS OFF) EXECUTE p1;
|
|
QUERY PLAN
|
|
----------------------------------------------------------------------------------------------------------------------
|
|
Custom Scan (DecompressChunk) on _hyper_9_10_chunk
|
|
-> Index Scan using compress_hyper_10_11_chunk_a_c__ts_meta_min_1__ts_meta_max__idx on compress_hyper_10_11_chunk
|
|
(2 rows)
|
|
|
|
EXECUTE p1;
|
|
time | a | b | c
|
|
------------------------------+---+---+---
|
|
Sun Jan 01 00:00:00 2023 PST | 2 | 3 | 2
|
|
Sun Jan 01 00:00:00 2023 PST | 2 | | 2
|
|
Sun Jan 01 00:00:00 2023 PST | 2 | | 2
|
|
(3 rows)
|
|
|
|
-- verify segmentwise recompression when index exists, decompress + compress otherwise
|
|
-- we verify by checking the compressed chunk after recompression in both cases.
|
|
-- in the first case, it is the same before and after
|
|
-- in the second case, a new compressed chunk is created
|
|
CREATE TABLE mytab (time timestamptz, a int, b int, c int);
|
|
SELECT create_hypertable('mytab', 'time');
|
|
NOTICE: adding not-null constraint to column "time"
|
|
create_hypertable
|
|
---------------------
|
|
(11,public,mytab,t)
|
|
(1 row)
|
|
|
|
INSERT INTO mytab VALUES ('2023-01-01'::timestamptz, 2, NULL, 2),
|
|
('2023-01-01'::timestamptz, 2, NULL, 2);
|
|
select show_chunks as chunk_to_compress_mytab from show_chunks('mytab') limit 1 \gset
|
|
-- index exists, recompression should happen segment by segment so expect a debug message
|
|
alter table mytab set (timescaledb.compress, timescaledb.compress_segmentby = 'a, c');
|
|
NOTICE: default order by for hypertable "mytab" is set to ""time" DESC"
|
|
select compress_chunk(show_chunks('mytab'));
|
|
compress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_11_12_chunk
|
|
(1 row)
|
|
|
|
select compressed_chunk_name as compressed_chunk_name_before_recompression from compressed_chunk_info_view where hypertable_name = 'mytab' \gset
|
|
INSERT INTO mytab VALUES ('2023-01-01'::timestamptz, 2, 3, 2);
|
|
-- segmentwise recompression should not create a new compressed chunk, so verify compressed chunk is the same after recompression
|
|
SELECT compress_chunk(:'chunk_to_compress_mytab');
|
|
compress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_11_12_chunk
|
|
(1 row)
|
|
|
|
select compressed_chunk_name as compressed_chunk_name_after_recompression from compressed_chunk_info_view where hypertable_name = 'mytab' \gset
|
|
select :'compressed_chunk_name_before_recompression' as before_segmentwise_recompression, :'compressed_chunk_name_after_recompression' as after_segmentwise_recompression;
|
|
before_segmentwise_recompression | after_segmentwise_recompression
|
|
----------------------------------+---------------------------------
|
|
compress_hyper_12_13_chunk | compress_hyper_12_13_chunk
|
|
(1 row)
|
|
|
|
INSERT INTO mytab
|
|
SELECT t, a, 3, 2
|
|
FROM generate_series('2023-01-01'::timestamptz, '2023-01-02'::timestamptz, '1 hour'::interval) t
|
|
CROSS JOIN generate_series(1, 10, 1) a;
|
|
-- recompress will insert newly inserted tuples into compressed chunk along with inserting into the compressed chunk index
|
|
SELECT compress_chunk(:'chunk_to_compress_mytab');
|
|
compress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_11_12_chunk
|
|
(1 row)
|
|
|
|
-- make sure we are hitting the index and that the index contains the tuples
|
|
SET enable_seqscan TO off;
|
|
EXPLAIN (COSTS OFF) SELECT count(*) FROM mytab where a = 2;
|
|
QUERY PLAN
|
|
----------------------------------------------------------------------------------------------------------------------------
|
|
Aggregate
|
|
-> Custom Scan (DecompressChunk) on _hyper_11_12_chunk
|
|
-> Index Scan using compress_hyper_12_13_chunk_a_c__ts_meta_min_1__ts_meta_max__idx on compress_hyper_12_13_chunk
|
|
Index Cond: (a = 2)
|
|
(4 rows)
|
|
|
|
SELECT count(*) FROM mytab where a = 2;
|
|
count
|
|
-------
|
|
28
|
|
(1 row)
|
|
|
|
RESET enable_seqscan;
|
|
SELECT decompress_chunk(show_chunks('mytab'));
|
|
decompress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_11_12_chunk
|
|
(1 row)
|
|
|
|
alter table mytab set (timescaledb.compress = false);
|
|
alter table mytab set (timescaledb.compress);
|
|
WARNING: there was some uncertainty picking the default segment by for the hypertable: You do not have any indexes on columns that can be used for segment_by and thus we are not using segment_by for compression. Please make sure you are not missing any indexes
|
|
NOTICE: default segment by for hypertable "mytab" is set to ""
|
|
NOTICE: default order by for hypertable "mytab" is set to ""time" DESC"
|
|
select compress_chunk(show_chunks('mytab'));
|
|
compress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_11_12_chunk
|
|
(1 row)
|
|
|
|
select compressed_chunk_name as compressed_chunk_name_before_recompression from compressed_chunk_info_view where hypertable_name = 'mytab' \gset
|
|
INSERT INTO mytab VALUES ('2023-01-01'::timestamptz, 2, 3, 2);
|
|
-- expect to see a different compressed chunk after recompressing now as the operation is decompress + compress
|
|
SELECT compress_chunk(:'chunk_to_compress_mytab');
|
|
compress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_11_12_chunk
|
|
(1 row)
|
|
|
|
select compressed_chunk_name as compressed_chunk_name_after_recompression from compressed_chunk_info_view where hypertable_name = 'mytab' \gset
|
|
select :'compressed_chunk_name_before_recompression' as before_recompression, :'compressed_chunk_name_after_recompression' as after_recompression;
|
|
before_recompression | after_recompression
|
|
----------------------------+----------------------------
|
|
compress_hyper_13_14_chunk | compress_hyper_13_15_chunk
|
|
(1 row)
|
|
|
|
-- check behavior with NULL values in segmentby columns
|
|
select '2022-01-01 09:00:00+00' as start_time \gset
|
|
create table nullseg_one (time timestamptz, a int, b int);
|
|
select create_hypertable('nullseg_one', 'time');
|
|
NOTICE: adding not-null constraint to column "time"
|
|
create_hypertable
|
|
---------------------------
|
|
(14,public,nullseg_one,t)
|
|
(1 row)
|
|
|
|
insert into nullseg_one values (:'start_time', 1, 1), (:'start_time', 1, 2), (:'start_time', 2,2), (:'start_time', 2,3);
|
|
alter table nullseg_one set (timescaledb.compress, timescaledb.compress_segmentby= 'a');
|
|
NOTICE: default order by for hypertable "nullseg_one" is set to ""time" DESC"
|
|
select compress_chunk(show_chunks('nullseg_one'));
|
|
compress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_14_16_chunk
|
|
(1 row)
|
|
|
|
insert into nullseg_one values (:'start_time', NULL, 4);
|
|
select show_chunks as chunk_to_compress from show_chunks('nullseg_one') limit 1 \gset
|
|
select compressed_chunk_schema || '.' || compressed_chunk_name as compressed_chunk_name from compressed_chunk_info_view where hypertable_name = 'nullseg_one' \gset
|
|
SELECT compress_chunk(:'chunk_to_compress');
|
|
compress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_14_16_chunk
|
|
(1 row)
|
|
|
|
select * from :compressed_chunk_name;
|
|
_ts_meta_count | a | _ts_meta_min_1 | _ts_meta_max_1 | time | b
|
|
----------------+---+------------------------------+------------------------------+----------------------------------------------------------------------+----------------------------------------------------------
|
|
2 | 1 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | BAAAAAAAAAAAAgAAAAAAAAABAAAAAgAAAAEAAAAAAAAAAgAAAAAAAAAC
|
|
2 | 2 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | BAAAAAAAAAAAAwAAAAAAAAABAAAAAgAAAAEAAAAAAAAAAwAAAAAAAAAM
|
|
1 | | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA | BAAAAAAAAAAABAAAAAAAAAAEAAAAAQAAAAEAAAAAAAAABAAAAAAAAAAI
|
|
(3 rows)
|
|
|
|
-- insert again, check both index insertion works and NULL values properly handled
|
|
insert into nullseg_one values (:'start_time', NULL, 4);
|
|
SELECT compress_chunk(:'chunk_to_compress');
|
|
compress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_14_16_chunk
|
|
(1 row)
|
|
|
|
select * from :compressed_chunk_name;
|
|
_ts_meta_count | a | _ts_meta_min_1 | _ts_meta_max_1 | time | b
|
|
----------------+---+------------------------------+------------------------------+----------------------------------------------------------------------+----------------------------------------------------------
|
|
2 | 1 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | BAAAAAAAAAAAAgAAAAAAAAABAAAAAgAAAAEAAAAAAAAAAgAAAAAAAAAC
|
|
2 | 2 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | BAAAAAAAAAAAAwAAAAAAAAABAAAAAgAAAAEAAAAAAAAAAwAAAAAAAAAM
|
|
2 | | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | BAAAAAAAAAAABAAAAAAAAAAAAAAAAgAAAAEAAAAAAAAABAAAAAAAAAB4
|
|
(3 rows)
|
|
|
|
-- test multiple NULL segmentby columns
|
|
create table nullseg_many (time timestamptz, a int, b int, c int);
|
|
select create_hypertable('nullseg_many', 'time');
|
|
NOTICE: adding not-null constraint to column "time"
|
|
create_hypertable
|
|
----------------------------
|
|
(16,public,nullseg_many,t)
|
|
(1 row)
|
|
|
|
insert into nullseg_many values (:'start_time', 1, 1, 1), (:'start_time', 1, 2, 2), (:'start_time', 2,2, 2), (:'start_time', 2,3, 3), (:'start_time', 2, NULL, 3);
|
|
alter table nullseg_many set (timescaledb.compress, timescaledb.compress_segmentby= 'a, c');
|
|
NOTICE: default order by for hypertable "nullseg_many" is set to ""time" DESC"
|
|
select compress_chunk(show_chunks('nullseg_many'));
|
|
compress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_16_18_chunk
|
|
(1 row)
|
|
|
|
-- new segment (1, NULL)
|
|
insert into nullseg_many values (:'start_time', 1, 4, NULL);
|
|
select show_chunks as chunk_to_compress from show_chunks('nullseg_many') limit 1 \gset
|
|
select compressed_chunk_schema || '.' || compressed_chunk_name as compressed_chunk_name from compressed_chunk_info_view where hypertable_name = 'nullseg_many' \gset
|
|
SELECT compress_chunk(:'chunk_to_compress');
|
|
compress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_16_18_chunk
|
|
(1 row)
|
|
|
|
select * from :compressed_chunk_name;
|
|
_ts_meta_count | a | c | _ts_meta_min_1 | _ts_meta_max_1 | time | b
|
|
----------------+---+---+------------------------------+------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------------------------------
|
|
1 | 1 | 1 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA | BAAAAAAAAAAAAQAAAAAAAAABAAAAAQAAAAEAAAAAAAAAAgAAAAAAAAAC
|
|
1 | 1 | 2 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA | BAAAAAAAAAAAAgAAAAAAAAACAAAAAQAAAAEAAAAAAAAAAwAAAAAAAAAE
|
|
1 | 2 | 2 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA | BAAAAAAAAAAAAgAAAAAAAAACAAAAAQAAAAEAAAAAAAAAAwAAAAAAAAAE
|
|
2 | 2 | 3 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | BAEAAAAAAAAAAwAAAAAAAAADAAAAAQAAAAEAAAAAAAAAAwAAAAAAAAAGAAAAAgAAAAEAAAAAAAAAAQAAAAAAAAAC
|
|
1 | 1 | | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA | BAAAAAAAAAAABAAAAAAAAAAEAAAAAQAAAAEAAAAAAAAABAAAAAAAAAAI
|
|
(5 rows)
|
|
|
|
-- insert again, check both index insertion works and NULL values properly handled
|
|
-- should match existing segment (1, NULL)
|
|
insert into nullseg_many values (:'start_time', 1, NULL, NULL);
|
|
SELECT compress_chunk(:'chunk_to_compress');
|
|
compress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_16_18_chunk
|
|
(1 row)
|
|
|
|
select * from :compressed_chunk_name;
|
|
_ts_meta_count | a | c | _ts_meta_min_1 | _ts_meta_max_1 | time | b
|
|
----------------+---+---+------------------------------+------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------------------------------
|
|
1 | 1 | 1 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA | BAAAAAAAAAAAAQAAAAAAAAABAAAAAQAAAAEAAAAAAAAAAgAAAAAAAAAC
|
|
1 | 1 | 2 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA | BAAAAAAAAAAAAgAAAAAAAAACAAAAAQAAAAEAAAAAAAAAAwAAAAAAAAAE
|
|
1 | 2 | 2 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA | BAAAAAAAAAAAAgAAAAAAAAACAAAAAQAAAAEAAAAAAAAAAwAAAAAAAAAE
|
|
2 | 2 | 3 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | BAEAAAAAAAAAAwAAAAAAAAADAAAAAQAAAAEAAAAAAAAAAwAAAAAAAAAGAAAAAgAAAAEAAAAAAAAAAQAAAAAAAAAC
|
|
2 | 1 | | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST | BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | BAEAAAAAAAAABAAAAAAAAAAEAAAAAQAAAAEAAAAAAAAABAAAAAAAAAAIAAAAAgAAAAEAAAAAAAAAAQAAAAAAAAAB
|
|
(5 rows)
|
|
|
|
--- Test behaviour when enable_segmentwise_recompression GUC if OFF
|
|
CREATE TABLE guc_test(time timestamptz not null, a int, b int, c int);
|
|
SELECT create_hypertable('guc_test', by_range('time', INTERVAL '1 day'));
|
|
create_hypertable
|
|
-------------------
|
|
(18,t)
|
|
(1 row)
|
|
|
|
ALTER TABLE guc_test set (timescaledb.compress, timescaledb.compress_segmentby = 'a, b');
|
|
NOTICE: default order by for hypertable "guc_test" is set to ""time" DESC"
|
|
INSERT INTO guc_test VALUES ('2024-10-30 14:04:00.501519-06'::timestamptz, 1, 1, 1);
|
|
SELECT show_chunks as chunk_to_compress FROM show_chunks('guc_test') LIMIT 1 \gset
|
|
SELECT compress_chunk(:'chunk_to_compress');
|
|
compress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_18_20_chunk
|
|
(1 row)
|
|
|
|
INSERT INTO guc_test VALUES ('2024-10-30 14:14:00.501519-06'::timestamptz, 1, 1, 2);
|
|
-- When GUC is OFF, recompress function should throw an error
|
|
SET timescaledb.enable_segmentwise_recompression TO OFF;
|
|
\set ON_ERROR_STOP 0
|
|
SELECT _timescaledb_functions.recompress_chunk_segmentwise(:'chunk_to_compress');
|
|
ERROR: segmentwise recompression functionality disabled, enable it by first setting timescaledb.enable_segmentwise_recompression to on
|
|
\set ON_ERROR_STOP 1
|
|
-- When GUC is OFF, entire chunk should be fully uncompressed and compressed instead
|
|
SELECT compress_chunk(:'chunk_to_compress');
|
|
NOTICE: segmentwise recompression is disabled, performing full recompression on chunk "_timescaledb_internal._hyper_18_20_chunk"
|
|
compress_chunk
|
|
------------------------------------------
|
|
_timescaledb_internal._hyper_18_20_chunk
|
|
(1 row)
|
|
|