-- This file and its contents are licensed under the Timescale License.
-- Please see the included NOTICE for copyright information and
-- LICENSE-TIMESCALE for a copy of the license.
\c :TEST_DBNAME :ROLE_SUPERUSER
CREATE OR REPLACE VIEW compressed_chunk_info_view AS
SELECT
   h.schema_name AS hypertable_schema,
   h.table_name AS hypertable_name,
   c.schema_name as chunk_schema,
   c.table_name as chunk_name,
   c.status as chunk_status,
   comp.schema_name as compressed_chunk_schema,
   comp.table_name as compressed_chunk_name,
   c.id as chunk_id
FROM
   _timescaledb_catalog.hypertable h JOIN
  _timescaledb_catalog.chunk c ON h.id = c.hypertable_id
   LEFT JOIN _timescaledb_catalog.chunk comp
ON comp.id = c.compressed_chunk_id
;
CREATE OR REPLACE VIEW compression_rowcnt_view AS
select ccs.numrows_pre_compression, ccs.numrows_post_compression,
(v.chunk_schema || '.' || v.chunk_name) as chunk_name,
v.chunk_id as chunk_id
 from _timescaledb_catalog.compression_chunk_size ccs
join compressed_chunk_info_view v on ccs.chunk_id = v.chunk_id;
------------- only one segment exists and only one segment affected ---------
create table mytab_oneseg (time timestamptz not null, a int, b int, c int);
SELECT create_hypertable('mytab_oneseg', 'time', chunk_time_interval => interval '1 day');
     create_hypertable     
---------------------------
 (1,public,mytab_oneseg,t)
(1 row)

insert into mytab_oneseg values
('2023-01-01 21:56:20.048355+02'::timestamptz, 2, NULL, 2),
('2023-01-01 21:56:10.048355+02'::timestamptz, 2, NULL, 2); --same chunk same segment
alter table mytab_oneseg set (timescaledb.compress, timescaledb.compress_segmentby = 'a, c');
select show_chunks as chunk_to_compress_1 from show_chunks('mytab_oneseg') limit 1 \gset
select compress_chunk(:'chunk_to_compress_1');
             compress_chunk             
----------------------------------------
 _timescaledb_internal._hyper_1_1_chunk
(1 row)

SELECT compressed_chunk_schema || '.' || compressed_chunk_name as compressed_chunk_name_1
from compressed_chunk_info_view where hypertable_name = 'mytab_oneseg' \gset
SELECT ctid, * FROM :compressed_chunk_name_1;
 ctid  |                                 time                                 | a | b | c | _ts_meta_count | _ts_meta_sequence_num |           _ts_meta_min_1            |           _ts_meta_max_1            
-------+----------------------------------------------------------------------+---+---+---+----------------+-----------------------+-------------------------------------+-------------------------------------
 (0,1) | BAAAApQ3/qlnY///////Z2mAAAAAAgAAAAIAAAAAAAAA7gAFKG/+g/vGAAUob/+1KMU= | 2 |   | 2 |              2 |                    10 | Sun Jan 01 11:56:10.048355 2023 PST | Sun Jan 01 11:56:20.048355 2023 PST
(1 row)

-- after compressing the chunk
select numrows_pre_compression, numrows_post_compression from _timescaledb_catalog.compression_chunk_size;
 numrows_pre_compression | numrows_post_compression 
-------------------------+--------------------------
                       2 |                        1
(1 row)

insert into mytab_oneseg values ('2023-01-01 19:56:20.048355+02'::timestamptz, 2, NULL, 2);
-- after inserting new row in compressed chunk
select numrows_pre_compression, numrows_post_compression from _timescaledb_catalog.compression_chunk_size;
 numrows_pre_compression | numrows_post_compression 
-------------------------+--------------------------
                       2 |                        1
(1 row)

select _timescaledb_internal.recompress_chunk_segmentwise(:'chunk_to_compress_1');
      recompress_chunk_segmentwise      
----------------------------------------
 _timescaledb_internal._hyper_1_1_chunk
(1 row)

-- check the ctid of the rows in the recompressed chunk to verify that we've written new data
SELECT ctid, * FROM :compressed_chunk_name_1;
 ctid  |                                       time                                       | a | b | c | _ts_meta_count | _ts_meta_sequence_num |           _ts_meta_min_1            |           _ts_meta_max_1            
-------+----------------------------------------------------------------------------------+---+---+---+----------------+-----------------------+-------------------------------------+-------------------------------------
 (0,2) | BAAAApQ2Uhq14/////5TcU6AAAAAAwAAAAMAAAAAAAAO7gAFKG/+g/vGAAUob/+1KMUAAAADV+w1/w== | 2 |   | 2 |              3 |                    10 | Sun Jan 01 09:56:20.048355 2023 PST | Sun Jan 01 11:56:20.048355 2023 PST
(1 row)

-- after recompressing chunk
select numrows_pre_compression, numrows_post_compression from _timescaledb_catalog.compression_chunk_size;
 numrows_pre_compression | numrows_post_compression 
-------------------------+--------------------------
                       3 |                        1
(1 row)

---------------- test1: one affected segment, one unaffected --------------
-- unaffected segment will still be recompressed in a future PR we want to avoid doing this
create table mytab_twoseg (time timestamptz not null, a int, b int, c int);
SELECT create_hypertable('mytab_twoseg', 'time', chunk_time_interval => interval '1 day');
     create_hypertable     
---------------------------
 (3,public,mytab_twoseg,t)
(1 row)

insert into mytab_twoseg values
('2023-01-01 21:56:20.048355+02'::timestamptz, 2, NULL, 2),
('2023-01-01 21:56:20.048355+02'::timestamptz, 3, NULL, 3), --same chunk diff segment
('2023-01-01 21:57:20.048355+02'::timestamptz, 3, NULL, 3);
alter table mytab_twoseg set (timescaledb.compress, timescaledb.compress_segmentby = 'a, c');
select show_chunks as chunk_to_compress_2 from show_chunks('mytab_twoseg') limit 1 \gset
select compress_chunk(:'chunk_to_compress_2');
             compress_chunk             
----------------------------------------
 _timescaledb_internal._hyper_3_3_chunk
(1 row)

-- should have 2 compressed rows
-- select numrows_pre_compression, numrows_post_compression from _timescaledb_catalog.compression_chunk_size ccs
-- join compressed_chunk_info_view v on ccs.chunk_id = v.chunk_id where v.compressed_chunk_schema || '.' || v.compressed_chunk_name
--  = :'chunk_to_compress_2';
select * from compression_rowcnt_view where chunk_name = :'chunk_to_compress_2';
 numrows_pre_compression | numrows_post_compression |               chunk_name               | chunk_id 
-------------------------+--------------------------+----------------------------------------+----------
                       3 |                        2 | _timescaledb_internal._hyper_3_3_chunk |        3
(1 row)

insert into mytab_twoseg values ('2023-01-01 19:56:20.048355+02'::timestamptz, 2, NULL, 2);
select * from :chunk_to_compress_2;
                time                 | a | b | c 
-------------------------------------+---+---+---
 Sun Jan 01 11:56:20.048355 2023 PST | 2 |   | 2
 Sun Jan 01 11:57:20.048355 2023 PST | 3 |   | 3
 Sun Jan 01 11:56:20.048355 2023 PST | 3 |   | 3
 Sun Jan 01 09:56:20.048355 2023 PST | 2 |   | 2
(4 rows)

SELECT compressed_chunk_schema || '.' || compressed_chunk_name as compressed_chunk_name_2
from compressed_chunk_info_view where hypertable_name = 'mytab_twoseg' \gset
select ctid, * from :compressed_chunk_name_2;
 ctid  |                                 time                                 | a | b | c | _ts_meta_count | _ts_meta_sequence_num |           _ts_meta_min_1            |           _ts_meta_max_1            
-------+----------------------------------------------------------------------+---+---+---+----------------+-----------------------+-------------------------------------+-------------------------------------
 (0,1) | BAAAApQ3/0H94wAClDf/Qf3jAAAAAQAAAAEAAAAAAAAADgAFKG/+g/vG             | 2 |   | 2 |              1 |                    10 | Sun Jan 01 11:56:20.048355 2023 PST | Sun Jan 01 11:56:20.048355 2023 PST
 (0,2) | BAAAApQ3/0H94//////8bHkAAAAAAgAAAAIAAAAAAAAA7gAFKHAFqwnGAAUocAzSF8U= | 3 |   | 3 |              2 |                    10 | Sun Jan 01 11:56:20.048355 2023 PST | Sun Jan 01 11:57:20.048355 2023 PST
(2 rows)

select _timescaledb_internal.recompress_chunk_segmentwise(:'chunk_to_compress_2');
      recompress_chunk_segmentwise      
----------------------------------------
 _timescaledb_internal._hyper_3_3_chunk
(1 row)

-- verify that metadata count looks good
select ctid, * from :compressed_chunk_name_2;
 ctid  |                                 time                                 | a | b | c | _ts_meta_count | _ts_meta_sequence_num |           _ts_meta_min_1            |           _ts_meta_max_1            
-------+----------------------------------------------------------------------+---+---+---+----------------+-----------------------+-------------------------------------+-------------------------------------
 (0,3) | BAAAApQ2Uhq14/////5S2LgAAAAAAgAAAAIAAAAAAAAA7gAFKG/+g/vGAAUoc1jSi8U= | 2 |   | 2 |              2 |                    10 | Sun Jan 01 09:56:20.048355 2023 PST | Sun Jan 01 11:56:20.048355 2023 PST
 (0,4) | BAAAApQ3/0H94//////8bHkAAAAAAgAAAAIAAAAAAAAA7gAFKHAFqwnGAAUocAzSF8U= | 3 |   | 3 |              2 |                    10 | Sun Jan 01 11:56:20.048355 2023 PST | Sun Jan 01 11:57:20.048355 2023 PST
(2 rows)

-- verify that initial data is returned as expected
select * from :chunk_to_compress_2;
                time                 | a | b | c 
-------------------------------------+---+---+---
 Sun Jan 01 11:56:20.048355 2023 PST | 2 |   | 2
 Sun Jan 01 09:56:20.048355 2023 PST | 2 |   | 2
 Sun Jan 01 11:57:20.048355 2023 PST | 3 |   | 3
 Sun Jan 01 11:56:20.048355 2023 PST | 3 |   | 3
(4 rows)

-- should still have 2 compressed rows
select * from compression_rowcnt_view where chunk_name = :'chunk_to_compress_2';
 numrows_pre_compression | numrows_post_compression |               chunk_name               | chunk_id 
-------------------------+--------------------------+----------------------------------------+----------
                       4 |                        2 | _timescaledb_internal._hyper_3_3_chunk |        3
(1 row)

----------------- more than one batch per segment ----------------------
-- test that metadata sequence number is correct
create table mytab2(time timestamptz not null, a int, b int, c int);
select create_hypertable('mytab2', 'time', chunk_time_interval => interval '1 week');
  create_hypertable  
---------------------
 (5,public,mytab2,t)
(1 row)

insert into mytab2 (time, a, c) select t,s,s from
generate_series('2023-01-01 00:00:00+00'::timestamptz, '2023-01-01 00:00:00+00'::timestamptz + interval '1 day', interval '30 sec') t cross join generate_series(0,2, 1) s;
alter table mytab2 set (timescaledb.compress, timescaledb.compress_segmentby = 'a, c');
select compress_chunk(c) from show_chunks('mytab2') c;
             compress_chunk             
----------------------------------------
 _timescaledb_internal._hyper_5_5_chunk
(1 row)

SELECT compressed_chunk_schema || '.' || compressed_chunk_name as compressed_chunk_name_2
from compressed_chunk_info_view where hypertable_name = 'mytab2'
and compressed_chunk_name is not null limit 1 \gset
insert into mytab2 values ('2023-01-01 00:00:02+00'::timestamptz, 0, NULL, 0); -- goes into the uncompressed chunk
select show_chunks('mytab2') as chunk_to_compress_2 \gset
select ctid, * from :compressed_chunk_name_2;
 ctid  |                                       time                                       | a | b | c | _ts_meta_count | _ts_meta_sequence_num |        _ts_meta_min_1        |        _ts_meta_max_1        
-------+----------------------------------------------------------------------------------+---+---+---+----------------+-----------------------+------------------------------+------------------------------
 (0,1) | BAAAApQ0bFLXgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKHbNWYAAAAUodtDtBv8AAD5gAAAAAA== | 0 |   | 0 |           1000 |                    10 | Sun Jan 01 07:40:30 2023 PST | Sun Jan 01 16:00:00 2023 PST
 (0,2) | BAAAApQtcC8rgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKGjVEigAAAUoaNilrv8AAD5gAAAAAA== | 0 |   | 0 |           1000 |                    20 | Sat Dec 31 23:20:30 2022 PST | Sun Jan 01 07:40:00 2023 PST
 (0,3) | BAAAApQnSNVgAP/////+NjyAAAADcQAAAAMAAAAAAAAP7gAFKFrcytAAAAUoWuBeVv8AADbwAAAAAA== | 0 |   | 0 |            881 |                    30 | Sat Dec 31 16:00:00 2022 PST | Sat Dec 31 23:20:00 2022 PST
 (0,4) | BAAAApQ0bFLXgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKHbNWYAAAAUodtDtBv8AAD5gAAAAAA== | 1 |   | 1 |           1000 |                    10 | Sun Jan 01 07:40:30 2023 PST | Sun Jan 01 16:00:00 2023 PST
 (0,5) | BAAAApQtcC8rgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKGjVEigAAAUoaNilrv8AAD5gAAAAAA== | 1 |   | 1 |           1000 |                    20 | Sat Dec 31 23:20:30 2022 PST | Sun Jan 01 07:40:00 2023 PST
 (0,6) | BAAAApQnSNVgAP/////+NjyAAAADcQAAAAMAAAAAAAAP7gAFKFrcytAAAAUoWuBeVv8AADbwAAAAAA== | 1 |   | 1 |            881 |                    30 | Sat Dec 31 16:00:00 2022 PST | Sat Dec 31 23:20:00 2022 PST
 (0,7) | BAAAApQ0bFLXgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKHbNWYAAAAUodtDtBv8AAD5gAAAAAA== | 2 |   | 2 |           1000 |                    10 | Sun Jan 01 07:40:30 2023 PST | Sun Jan 01 16:00:00 2023 PST
 (0,8) | BAAAApQtcC8rgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKGjVEigAAAUoaNilrv8AAD5gAAAAAA== | 2 |   | 2 |           1000 |                    20 | Sat Dec 31 23:20:30 2022 PST | Sun Jan 01 07:40:00 2023 PST
 (0,9) | BAAAApQnSNVgAP/////+NjyAAAADcQAAAAMAAAAAAAAP7gAFKFrcytAAAAUoWuBeVv8AADbwAAAAAA== | 2 |   | 2 |            881 |                    30 | Sat Dec 31 16:00:00 2022 PST | Sat Dec 31 23:20:00 2022 PST
(9 rows)

-- after compression
select * from compression_rowcnt_view where chunk_name = :'chunk_to_compress_2';
 numrows_pre_compression | numrows_post_compression |               chunk_name               | chunk_id 
-------------------------+--------------------------+----------------------------------------+----------
                    8643 |                        9 | _timescaledb_internal._hyper_5_5_chunk |        5
(1 row)

select _timescaledb_internal.recompress_chunk_segmentwise(:'chunk_to_compress_2');
      recompress_chunk_segmentwise      
----------------------------------------
 _timescaledb_internal._hyper_5_5_chunk
(1 row)

select ctid, * from :compressed_chunk_name_2;
  ctid  |                                           time                                           | a | b | c | _ts_meta_count | _ts_meta_sequence_num |        _ts_meta_min_1        |        _ts_meta_max_1        
--------+------------------------------------------------------------------------------------------+---+---+---+----------------+-----------------------+------------------------------+------------------------------
 (0,10) | BAAAApQ0bFLXgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKHbNWYAAAAUodtDtBv8AAD5gAAAAAA==         | 0 |   | 0 |           1000 |                    10 | Sun Jan 01 07:40:30 2023 PST | Sun Jan 01 16:00:00 2023 PST
 (0,11) | BAAAApQtcC8rgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKGjVEigAAAUoaNilrv8AAD5gAAAAAA==         | 0 |   | 0 |           1000 |                    20 | Sat Dec 31 23:20:30 2022 PST | Sun Jan 01 07:40:00 2023 PST
 (0,12) | BAAAApQnSNVgAP//////4XuAAAADcgAAAAQAAAAAAADf7gAFKFrcytAAAAUoWuBeVv8AADbgAAAAAAMZdQAAPQkA | 0 |   | 0 |            882 |                    30 | Sat Dec 31 16:00:00 2022 PST | Sat Dec 31 23:20:00 2022 PST
 (0,13) | BAAAApQ0bFLXgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKHbNWYAAAAUodtDtBv8AAD5gAAAAAA==         | 1 |   | 1 |           1000 |                    10 | Sun Jan 01 07:40:30 2023 PST | Sun Jan 01 16:00:00 2023 PST
 (0,14) | BAAAApQtcC8rgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKGjVEigAAAUoaNilrv8AAD5gAAAAAA==         | 1 |   | 1 |           1000 |                    20 | Sat Dec 31 23:20:30 2022 PST | Sun Jan 01 07:40:00 2023 PST
 (0,15) | BAAAApQnSNVgAP/////+NjyAAAADcQAAAAMAAAAAAAAP7gAFKFrcytAAAAUoWuBeVv8AADbwAAAAAA==         | 1 |   | 1 |            881 |                    30 | Sat Dec 31 16:00:00 2022 PST | Sat Dec 31 23:20:00 2022 PST
 (0,16) | BAAAApQ0bFLXgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKHbNWYAAAAUodtDtBv8AAD5gAAAAAA==         | 2 |   | 2 |           1000 |                    10 | Sun Jan 01 07:40:30 2023 PST | Sun Jan 01 16:00:00 2023 PST
 (0,17) | BAAAApQtcC8rgP/////+NjyAAAAD6AAAAAMAAAAAAAAP7gAFKGjVEigAAAUoaNilrv8AAD5gAAAAAA==         | 2 |   | 2 |           1000 |                    20 | Sat Dec 31 23:20:30 2022 PST | Sun Jan 01 07:40:00 2023 PST
 (0,18) | BAAAApQnSNVgAP/////+NjyAAAADcQAAAAMAAAAAAAAP7gAFKFrcytAAAAUoWuBeVv8AADbwAAAAAA==         | 2 |   | 2 |            881 |                    30 | Sat Dec 31 16:00:00 2022 PST | Sat Dec 31 23:20:00 2022 PST
(9 rows)

-- after recompression
select * from compression_rowcnt_view where chunk_name = :'chunk_to_compress_2';
 numrows_pre_compression | numrows_post_compression |               chunk_name               | chunk_id 
-------------------------+--------------------------+----------------------------------------+----------
                    8644 |                        9 | _timescaledb_internal._hyper_5_5_chunk |        5
(1 row)

-- failing test from compression_ddl
CREATE TABLE test_defaults(time timestamptz NOT NULL, device_id int);
SELECT create_hypertable('test_defaults','time');
     create_hypertable      
----------------------------
 (7,public,test_defaults,t)
(1 row)

ALTER TABLE test_defaults SET (timescaledb.compress,timescaledb.compress_segmentby='device_id');
-- create 2 chunks
INSERT INTO test_defaults SELECT '2000-01-01', 1;
INSERT INTO test_defaults SELECT '2001-01-01', 1;
SELECT compress_chunk(show_chunks) AS "compressed_chunk" FROM show_chunks('test_defaults') ORDER BY show_chunks::text LIMIT 1 \gset
select * from compression_rowcnt_view where chunk_name = :'compressed_chunk';
 numrows_pre_compression | numrows_post_compression |               chunk_name               | chunk_id 
-------------------------+--------------------------+----------------------------------------+----------
                       1 |                        1 | _timescaledb_internal._hyper_7_7_chunk |        7
(1 row)

SELECT * FROM test_defaults ORDER BY 1;
             time             | device_id 
------------------------------+-----------
 Sat Jan 01 00:00:00 2000 PST |         1
 Mon Jan 01 00:00:00 2001 PST |         1
(2 rows)

ALTER TABLE test_defaults ADD COLUMN c1 int;
ALTER TABLE test_defaults ADD COLUMN c2 int NOT NULL DEFAULT 42;
SELECT * FROM test_defaults ORDER BY 1,2;
             time             | device_id | c1 | c2 
------------------------------+-----------+----+----
 Sat Jan 01 00:00:00 2000 PST |         1 |    | 42
 Mon Jan 01 00:00:00 2001 PST |         1 |    | 42
(2 rows)

INSERT INTO test_defaults SELECT '2000-01-01', 2;
SELECT * FROM test_defaults ORDER BY 1,2;
             time             | device_id | c1 | c2 
------------------------------+-----------+----+----
 Sat Jan 01 00:00:00 2000 PST |         1 |    | 42
 Sat Jan 01 00:00:00 2000 PST |         2 |    | 42
 Mon Jan 01 00:00:00 2001 PST |         1 |    | 42
(3 rows)

call recompress_chunk(:'compressed_chunk');
SELECT * FROM test_defaults ORDER BY 1,2;
             time             | device_id | c1 | c2 
------------------------------+-----------+----+----
 Sat Jan 01 00:00:00 2000 PST |         1 |    | 42
 Sat Jan 01 00:00:00 2000 PST |         2 |    | 42
 Mon Jan 01 00:00:00 2001 PST |         1 |    | 42
(3 rows)

-- here we will have an additional compressed row after recompression because the new
-- data corresponds to a new segment
select * from compression_rowcnt_view where chunk_name = :'compressed_chunk';
 numrows_pre_compression | numrows_post_compression |               chunk_name               | chunk_id 
-------------------------+--------------------------+----------------------------------------+----------
                       2 |                        2 | _timescaledb_internal._hyper_7_7_chunk |        7
(1 row)

-- test prepared statements
-- PREPRE A SELECT before recompress and perform it after recompress
CREATE TABLE mytab_prep (time timestamptz, a int, b int, c int);
SELECT create_hypertable('mytab_prep', 'time');
NOTICE:  adding not-null constraint to column "time"
    create_hypertable    
-------------------------
 (9,public,mytab_prep,t)
(1 row)

INSERT INTO mytab_prep VALUES ('2023-01-01'::timestamptz, 2, NULL, 2),
('2023-01-01'::timestamptz, 2, NULL, 2);
alter table mytab_prep set (timescaledb.compress, timescaledb.compress_segmentby = 'a, c');
PREPARE p1 AS
SELECT * FROM mytab_prep;
select show_chunks as chunk_to_compress_prep from show_chunks('mytab_prep') limit 1 \gset
SELECT compress_chunk(:'chunk_to_compress_prep'); -- the output of the prepared plan would change before and after compress
             compress_chunk              
-----------------------------------------
 _timescaledb_internal._hyper_9_10_chunk
(1 row)

INSERT INTO mytab_prep VALUES ('2023-01-01'::timestamptz, 2, 3, 2);
-- plan should be invalidated to return results from the uncompressed chunk also
EXPLAIN (COSTS OFF) EXECUTE p1;
                        QUERY PLAN                        
----------------------------------------------------------
 Append
   ->  Custom Scan (DecompressChunk) on _hyper_9_10_chunk
         ->  Seq Scan on compress_hyper_10_11_chunk
   ->  Seq Scan on _hyper_9_10_chunk
(4 rows)

EXECUTE p1;
             time             | a | b | c 
------------------------------+---+---+---
 Sun Jan 01 00:00:00 2023 PST | 2 |   | 2
 Sun Jan 01 00:00:00 2023 PST | 2 |   | 2
 Sun Jan 01 00:00:00 2023 PST | 2 | 3 | 2
(3 rows)

-- check plan again after recompression
CALL recompress_chunk(:'chunk_to_compress_prep');
EXPLAIN (COSTS OFF) EXECUTE p1;
                     QUERY PLAN                     
----------------------------------------------------
 Custom Scan (DecompressChunk) on _hyper_9_10_chunk
   ->  Seq Scan on compress_hyper_10_11_chunk
(2 rows)

EXECUTE p1;
             time             | a | b | c 
------------------------------+---+---+---
 Sun Jan 01 00:00:00 2023 PST | 2 |   | 2
 Sun Jan 01 00:00:00 2023 PST | 2 |   | 2
 Sun Jan 01 00:00:00 2023 PST | 2 | 3 | 2
(3 rows)

-- verify segmentwise recompression when index exists, decompress + compress otherwise
-- we verify by checking the compressed chunk after recompression in both cases.
-- in the first case, it is the same before and after
-- in the second case, a new compressed chunk is created
CREATE TABLE mytab (time timestamptz, a int, b int, c int);
SELECT create_hypertable('mytab', 'time');
NOTICE:  adding not-null constraint to column "time"
  create_hypertable  
---------------------
 (11,public,mytab,t)
(1 row)

INSERT INTO mytab VALUES ('2023-01-01'::timestamptz, 2, NULL, 2),
('2023-01-01'::timestamptz, 2, NULL, 2);
select show_chunks as chunk_to_compress_mytab from show_chunks('mytab') limit 1 \gset
-- index exists, recompression should happen segment by segment so expect a  debug message
alter table mytab set (timescaledb.compress, timescaledb.compress_segmentby = 'a, c');
select compress_chunk(show_chunks('mytab'));
              compress_chunk              
------------------------------------------
 _timescaledb_internal._hyper_11_12_chunk
(1 row)

select compressed_chunk_name as compressed_chunk_name_before_recompression from compressed_chunk_info_view where hypertable_name = 'mytab' \gset
INSERT INTO mytab VALUES ('2023-01-01'::timestamptz, 2, 3, 2);
-- segmentwise recompression should not create a new compressed chunk, so verify compressed chunk is the same after recompression
call recompress_chunk(:'chunk_to_compress_mytab');
select compressed_chunk_name as compressed_chunk_name_after_recompression from compressed_chunk_info_view where hypertable_name = 'mytab' \gset
select :'compressed_chunk_name_before_recompression' as before_segmentwise_recompression, :'compressed_chunk_name_after_recompression' as after_segmentwise_recompression;
 before_segmentwise_recompression | after_segmentwise_recompression 
----------------------------------+---------------------------------
 compress_hyper_12_13_chunk       | compress_hyper_12_13_chunk
(1 row)

SELECT decompress_chunk(show_chunks('mytab'));
             decompress_chunk             
------------------------------------------
 _timescaledb_internal._hyper_11_12_chunk
(1 row)

alter table mytab set (timescaledb.compress = false);
alter table mytab set (timescaledb.compress);
select compress_chunk(show_chunks('mytab'));
              compress_chunk              
------------------------------------------
 _timescaledb_internal._hyper_11_12_chunk
(1 row)

select compressed_chunk_name as compressed_chunk_name_before_recompression from compressed_chunk_info_view where hypertable_name = 'mytab' \gset
INSERT INTO mytab VALUES ('2023-01-01'::timestamptz, 2, 3, 2);
-- expect to see a different compressed chunk after recompressing now as the operation is decompress + compress
call recompress_chunk(:'chunk_to_compress_mytab');
select compressed_chunk_name as compressed_chunk_name_after_recompression from compressed_chunk_info_view where hypertable_name = 'mytab' \gset
select :'compressed_chunk_name_before_recompression' as before_recompression, :'compressed_chunk_name_after_recompression' as after_recompression;
    before_recompression    |    after_recompression     
----------------------------+----------------------------
 compress_hyper_13_14_chunk | compress_hyper_13_15_chunk
(1 row)

-- check behavior with NULL values in segmentby columns
select '2022-01-01 09:00:00+00' as start_time \gset
create table nullseg_one (time timestamptz, a int, b int);
select create_hypertable('nullseg_one', 'time');
NOTICE:  adding not-null constraint to column "time"
     create_hypertable     
---------------------------
 (14,public,nullseg_one,t)
(1 row)

insert into nullseg_one values (:'start_time', 1, 1), (:'start_time', 1, 2), (:'start_time', 2,2), (:'start_time', 2,3);
alter table nullseg_one set (timescaledb.compress, timescaledb.compress_segmentby= 'a');
select compress_chunk(show_chunks('nullseg_one'));
              compress_chunk              
------------------------------------------
 _timescaledb_internal._hyper_14_16_chunk
(1 row)

insert into nullseg_one values (:'start_time', NULL, 4);
select show_chunks as chunk_to_compress from show_chunks('nullseg_one') limit 1 \gset
select compressed_chunk_schema || '.' || compressed_chunk_name as compressed_chunk_name from compressed_chunk_info_view where hypertable_name = 'nullseg_one' \gset
call recompress_chunk(:'chunk_to_compress');
select * from :compressed_chunk_name;
                                 time                                 | a |                            b                             | _ts_meta_count | _ts_meta_sequence_num |        _ts_meta_min_1        |        _ts_meta_max_1        
----------------------------------------------------------------------+---+----------------------------------------------------------+----------------+-----------------------+------------------------------+------------------------------
 BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | 1 | BAAAAAAAAAAAAgAAAAAAAAABAAAAAgAAAAEAAAAAAAAAAgAAAAAAAAAC |              2 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
 BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | 2 | BAAAAAAAAAAAAwAAAAAAAAABAAAAAgAAAAEAAAAAAAAAAwAAAAAAAAAM |              2 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
 BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA             |   | BAAAAAAAAAAABAAAAAAAAAAEAAAAAQAAAAEAAAAAAAAABAAAAAAAAAAI |              1 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
(3 rows)

-- insert again, check both reindex works and NULL values properly handled
insert into nullseg_one values (:'start_time', NULL, 4);
call recompress_chunk(:'chunk_to_compress');
select * from :compressed_chunk_name;
                                 time                                 | a |                            b                             | _ts_meta_count | _ts_meta_sequence_num |        _ts_meta_min_1        |        _ts_meta_max_1        
----------------------------------------------------------------------+---+----------------------------------------------------------+----------------+-----------------------+------------------------------+------------------------------
 BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | 1 | BAAAAAAAAAAAAgAAAAAAAAABAAAAAgAAAAEAAAAAAAAAAgAAAAAAAAAC |              2 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
 BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | 2 | BAAAAAAAAAAAAwAAAAAAAAABAAAAAgAAAAEAAAAAAAAAAwAAAAAAAAAM |              2 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
 BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= |   | BAAAAAAAAAAABAAAAAAAAAAAAAAAAgAAAAEAAAAAAAAABAAAAAAAAAB4 |              2 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
(3 rows)

-- test multiple NULL segmentby columns
create table nullseg_many (time timestamptz, a int, b int, c int);
select create_hypertable('nullseg_many', 'time');
NOTICE:  adding not-null constraint to column "time"
     create_hypertable      
----------------------------
 (16,public,nullseg_many,t)
(1 row)

insert into nullseg_many values (:'start_time', 1, 1, 1), (:'start_time', 1, 2, 2), (:'start_time', 2,2, 2), (:'start_time', 2,3, 3), (:'start_time', 2, NULL, 3);
alter table nullseg_many set (timescaledb.compress, timescaledb.compress_segmentby= 'a, c');
select compress_chunk(show_chunks('nullseg_many'));
              compress_chunk              
------------------------------------------
 _timescaledb_internal._hyper_16_18_chunk
(1 row)

-- new segment (1, NULL)
insert into nullseg_many values (:'start_time', 1, 4, NULL);
select show_chunks as chunk_to_compress from show_chunks('nullseg_many') limit 1 \gset
select compressed_chunk_schema || '.' || compressed_chunk_name as compressed_chunk_name from compressed_chunk_info_view where hypertable_name = 'nullseg_many' \gset
call recompress_chunk(:'chunk_to_compress');
select * from :compressed_chunk_name;
                                 time                                 | a |                                            b                                             | c | _ts_meta_count | _ts_meta_sequence_num |        _ts_meta_min_1        |        _ts_meta_max_1        
----------------------------------------------------------------------+---+------------------------------------------------------------------------------------------+---+----------------+-----------------------+------------------------------+------------------------------
 BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA             | 1 | BAAAAAAAAAAAAQAAAAAAAAABAAAAAQAAAAEAAAAAAAAAAgAAAAAAAAAC                                 | 1 |              1 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
 BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA             | 1 | BAAAAAAAAAAAAgAAAAAAAAACAAAAAQAAAAEAAAAAAAAAAwAAAAAAAAAE                                 | 2 |              1 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
 BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA             | 2 | BAAAAAAAAAAAAgAAAAAAAAACAAAAAQAAAAEAAAAAAAAAAwAAAAAAAAAE                                 | 2 |              1 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
 BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | 2 | BAEAAAAAAAAAAwAAAAAAAAADAAAAAQAAAAEAAAAAAAAAAwAAAAAAAAAGAAAAAgAAAAEAAAAAAAAAAQAAAAAAAAAC | 3 |              2 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
 BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA             | 1 | BAAAAAAAAAAABAAAAAAAAAAEAAAAAQAAAAEAAAAAAAAABAAAAAAAAAAI                                 |   |              1 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
(5 rows)

-- insert again, check both reindex works and NULL values properly handled
-- should match existing segment (1, NULL)
insert into nullseg_many values (:'start_time', 1, NULL, NULL);
call recompress_chunk(:'chunk_to_compress');
select * from :compressed_chunk_name;
                                 time                                 | a |                                            b                                             | c | _ts_meta_count | _ts_meta_sequence_num |        _ts_meta_min_1        |        _ts_meta_max_1        
----------------------------------------------------------------------+---+------------------------------------------------------------------------------------------+---+----------------+-----------------------+------------------------------+------------------------------
 BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA             | 1 | BAAAAAAAAAAAAQAAAAAAAAABAAAAAQAAAAEAAAAAAAAAAgAAAAAAAAAC                                 | 1 |              1 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
 BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA             | 1 | BAAAAAAAAAAAAgAAAAAAAAACAAAAAQAAAAEAAAAAAAAAAwAAAAAAAAAE                                 | 2 |              1 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
 BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | 1 | BAEAAAAAAAAABAAAAAAAAAAEAAAAAQAAAAEAAAAAAAAABAAAAAAAAAAIAAAAAgAAAAEAAAAAAAAAAQAAAAAAAAAC |   |              2 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
 BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA             | 2 | BAAAAAAAAAAAAgAAAAAAAAACAAAAAQAAAAEAAAAAAAAAAwAAAAAAAAAE                                 | 2 |              1 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
 BAAAAneAR/JEAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAA7gAE7wCP5IgAAATvAI/kh/8= | 2 | BAEAAAAAAAAAAwAAAAAAAAADAAAAAQAAAAEAAAAAAAAAAwAAAAAAAAAGAAAAAgAAAAEAAAAAAAAAAQAAAAAAAAAC | 3 |              2 |                    10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
(5 rows)