diff --git a/src/guc.c b/src/guc.c index 12347e30b..caa397024 100644 --- a/src/guc.c +++ b/src/guc.c @@ -71,7 +71,7 @@ TSDLLEXPORT bool ts_guc_enable_decompression_sorted_merge = true; bool ts_guc_enable_async_append = true; bool ts_guc_enable_chunkwise_aggregation = true; bool ts_guc_enable_vectorized_aggregation = true; -TSDLLEXPORT bool ts_guc_enable_compression_indexscan = true; +TSDLLEXPORT bool ts_guc_enable_compression_indexscan = false; TSDLLEXPORT bool ts_guc_enable_bulk_decompression = true; TSDLLEXPORT int ts_guc_bgw_log_level = WARNING; TSDLLEXPORT bool ts_guc_enable_skip_scan = true; @@ -506,7 +506,7 @@ _guc_init(void) "Enable compression to take indexscan path", "Enable indexscan during compression, if matching index is found", &ts_guc_enable_compression_indexscan, - true, + false, PGC_USERSET, 0, NULL, diff --git a/tsl/test/expected/compression_bgw-13.out b/tsl/test/expected/compression_bgw-13.out index 574c2a1d1..8f2321979 100644 --- a/tsl/test/expected/compression_bgw-13.out +++ b/tsl/test/expected/compression_bgw-13.out @@ -347,7 +347,7 @@ LOG: statement: CALL run_job(1004); LOG: acquiring locks for compressing "_timescaledb_internal._hyper_11_40_chunk" LOG: locks acquired for compressing "_timescaledb_internal._hyper_11_40_chunk" LOG: new compressed chunk "_timescaledb_internal.compress_hyper_13_61_chunk" created -LOG: using index "_hyper_11_40_chunk_conditions_time_idx" to scan rows for compression +LOG: using tuplesort to scan rows from "_hyper_11_40_chunk" for compression LOG: finished compressing 144 rows from "_hyper_11_40_chunk" LOG: job 1004 completed processing chunk _timescaledb_internal._hyper_11_40_chunk set client_min_messages TO NOTICE; diff --git a/tsl/test/expected/compression_bgw-14.out b/tsl/test/expected/compression_bgw-14.out index b4435ebec..6c2c4a286 100644 --- a/tsl/test/expected/compression_bgw-14.out +++ b/tsl/test/expected/compression_bgw-14.out @@ -347,7 +347,7 @@ LOG: statement: CALL run_job(1004); LOG: acquiring locks for compressing "_timescaledb_internal._hyper_11_40_chunk" LOG: locks acquired for compressing "_timescaledb_internal._hyper_11_40_chunk" LOG: new compressed chunk "_timescaledb_internal.compress_hyper_13_61_chunk" created -LOG: using index "_hyper_11_40_chunk_conditions_time_idx" to scan rows for compression +LOG: using tuplesort to scan rows from "_hyper_11_40_chunk" for compression LOG: finished compressing 144 rows from "_hyper_11_40_chunk" LOG: job 1004 completed processing chunk _timescaledb_internal._hyper_11_40_chunk set client_min_messages TO NOTICE; diff --git a/tsl/test/expected/compression_bgw-15.out b/tsl/test/expected/compression_bgw-15.out index b4435ebec..6c2c4a286 100644 --- a/tsl/test/expected/compression_bgw-15.out +++ b/tsl/test/expected/compression_bgw-15.out @@ -347,7 +347,7 @@ LOG: statement: CALL run_job(1004); LOG: acquiring locks for compressing "_timescaledb_internal._hyper_11_40_chunk" LOG: locks acquired for compressing "_timescaledb_internal._hyper_11_40_chunk" LOG: new compressed chunk "_timescaledb_internal.compress_hyper_13_61_chunk" created -LOG: using index "_hyper_11_40_chunk_conditions_time_idx" to scan rows for compression +LOG: using tuplesort to scan rows from "_hyper_11_40_chunk" for compression LOG: finished compressing 144 rows from "_hyper_11_40_chunk" LOG: job 1004 completed processing chunk _timescaledb_internal._hyper_11_40_chunk set client_min_messages TO NOTICE; diff --git a/tsl/test/expected/compression_bgw-16.out b/tsl/test/expected/compression_bgw-16.out index b4435ebec..6c2c4a286 100644 --- a/tsl/test/expected/compression_bgw-16.out +++ b/tsl/test/expected/compression_bgw-16.out @@ -347,7 +347,7 @@ LOG: statement: CALL run_job(1004); LOG: acquiring locks for compressing "_timescaledb_internal._hyper_11_40_chunk" LOG: locks acquired for compressing "_timescaledb_internal._hyper_11_40_chunk" LOG: new compressed chunk "_timescaledb_internal.compress_hyper_13_61_chunk" created -LOG: using index "_hyper_11_40_chunk_conditions_time_idx" to scan rows for compression +LOG: using tuplesort to scan rows from "_hyper_11_40_chunk" for compression LOG: finished compressing 144 rows from "_hyper_11_40_chunk" LOG: job 1004 completed processing chunk _timescaledb_internal._hyper_11_40_chunk set client_min_messages TO NOTICE; diff --git a/tsl/test/expected/compression_indexscan.out b/tsl/test/expected/compression_indexscan.out index 51fedf8fc..87d287cd7 100644 --- a/tsl/test/expected/compression_indexscan.out +++ b/tsl/test/expected/compression_indexscan.out @@ -37,6 +37,8 @@ generate_series('2018-03-02 1:00'::TIMESTAMPTZ, '2018-03-28 1:00', '1 hour') AS generate_series(1, 100, 1 ) AS g2(id) ORDER BY time; +--Test with indexscan enabled +SET timescaledb.enable_compression_indexscan = 'ON'; --Test Set 1.1 [ Index(ASC, Null_First), Compression(ASC, Null_First) ] CREATE INDEX idx_asc_null_first ON tab1(id, time ASC NULLS FIRST); ALTER TABLE tab1 SET(timescaledb.compress, timescaledb.compress_segmentby = 'id', timescaledb.compress_orderby = 'time NULLS FIRST'); @@ -584,8 +586,14 @@ SELECT decompress_chunk(show_chunks('tab1')); DROP INDEX idx_desc_null_last; --Test Set 5 GUC SET timescaledb.enable_compression_indexscan --- Default this flag will be true. -SET timescaledb.enable_compression_indexscan = 'OFF'; +-- Default this flag will be false. +RESET timescaledb.enable_compression_indexscan; +SHOW timescaledb.enable_compression_indexscan; + timescaledb.enable_compression_indexscan +------------------------------------------ + off +(1 row) + SELECT compress_chunk(show_chunks('tab1')); INFO: compress_chunk_tuplesort_start INFO: compress_chunk_tuplesort_start @@ -608,6 +616,7 @@ SELECT decompress_chunk(show_chunks('tab1')); _timescaledb_internal._hyper_1_4_chunk (4 rows) +--Test with this guc enabled SET timescaledb.enable_compression_indexscan = 'ON'; SELECT compress_chunk(show_chunks('tab1')); INFO: compress_chunk_indexscan_start matched index "_hyper_1_1_chunk_tab1_time_idx" @@ -637,7 +646,7 @@ CREATE INDEX idx_asc_null_first ON tab1(id, time ASC NULLS FIRST); CREATE INDEX idx2_asc_null_first ON tab2(id, time ASC NULLS FIRST); ALTER TABLE tab1 SET(timescaledb.compress, timescaledb.compress_segmentby = 'id', timescaledb.compress_orderby = 'time NULLS FIRST'); ALTER TABLE tab2 SET(timescaledb.compress, timescaledb.compress_segmentby = 'id', timescaledb.compress_orderby = 'time NULLS FIRST'); -SET timescaledb.enable_compression_indexscan = 'OFF'; +RESET timescaledb.enable_compression_indexscan; SELECT compress_chunk(show_chunks('tab1')); INFO: compress_chunk_tuplesort_start INFO: compress_chunk_tuplesort_start diff --git a/tsl/test/sql/compression_indexscan.sql b/tsl/test/sql/compression_indexscan.sql index 89b284902..f9bc1638e 100644 --- a/tsl/test/sql/compression_indexscan.sql +++ b/tsl/test/sql/compression_indexscan.sql @@ -35,6 +35,8 @@ generate_series(1, 100, 1 ) AS g2(id) ORDER BY time; +--Test with indexscan enabled +SET timescaledb.enable_compression_indexscan = 'ON'; --Test Set 1.1 [ Index(ASC, Null_First), Compression(ASC, Null_First) ] CREATE INDEX idx_asc_null_first ON tab1(id, time ASC NULLS FIRST); ALTER TABLE tab1 SET(timescaledb.compress, timescaledb.compress_segmentby = 'id', timescaledb.compress_orderby = 'time NULLS FIRST'); @@ -163,10 +165,12 @@ SELECT decompress_chunk(show_chunks('tab1')); DROP INDEX idx_desc_null_last; --Test Set 5 GUC SET timescaledb.enable_compression_indexscan --- Default this flag will be true. -SET timescaledb.enable_compression_indexscan = 'OFF'; +-- Default this flag will be false. +RESET timescaledb.enable_compression_indexscan; +SHOW timescaledb.enable_compression_indexscan; SELECT compress_chunk(show_chunks('tab1')); SELECT decompress_chunk(show_chunks('tab1')); +--Test with this guc enabled SET timescaledb.enable_compression_indexscan = 'ON'; SELECT compress_chunk(show_chunks('tab1')); SELECT decompress_chunk(show_chunks('tab1')); @@ -177,7 +181,7 @@ CREATE INDEX idx_asc_null_first ON tab1(id, time ASC NULLS FIRST); CREATE INDEX idx2_asc_null_first ON tab2(id, time ASC NULLS FIRST); ALTER TABLE tab1 SET(timescaledb.compress, timescaledb.compress_segmentby = 'id', timescaledb.compress_orderby = 'time NULLS FIRST'); ALTER TABLE tab2 SET(timescaledb.compress, timescaledb.compress_segmentby = 'id', timescaledb.compress_orderby = 'time NULLS FIRST'); -SET timescaledb.enable_compression_indexscan = 'OFF'; +RESET timescaledb.enable_compression_indexscan; SELECT compress_chunk(show_chunks('tab1')); SET timescaledb.enable_compression_indexscan = 'ON'; SELECT compress_chunk(show_chunks('tab2'));