mirror of
https://github.com/timescale/timescaledb.git
synced 2025-05-22 22:11:29 +08:00
Fix filtering heap scan keys based on index used
When doing an index scan, we should filter out any columns already used as index scan keys from key columns since they are later on used to generate heap scan keys.
This commit is contained in:
parent
4bb97f1875
commit
e339f1b7ce
1
.unreleased/pr_7286
Normal file
1
.unreleased/pr_7286
Normal file
@ -0,0 +1 @@
|
||||
Fixes: #7286: Fix index column check while searching for index
|
@ -157,6 +157,16 @@ decompress_batches_for_insert(const ChunkInsertState *cis, TupleTableSlot *slot)
|
||||
if (index_rel)
|
||||
null_columns = NULL;
|
||||
|
||||
if (ts_guc_debug_compression_path_info)
|
||||
{
|
||||
elog(INFO,
|
||||
"Using %s scan with scan keys: index %d, heap %d, memory %d. ",
|
||||
index_rel ? "index" : "table",
|
||||
num_index_scankeys,
|
||||
num_heap_scankeys,
|
||||
num_mem_scankeys);
|
||||
}
|
||||
|
||||
/*
|
||||
* Using latest snapshot to scan the heap since we are doing this to build
|
||||
* the index on the uncompressed chunks in order to do speculative insertion
|
||||
@ -332,20 +342,12 @@ decompress_batch_beginscan(Relation in_rel, Relation index_rel, Snapshot snapsho
|
||||
|
||||
if (index_rel)
|
||||
{
|
||||
if (ts_guc_debug_compression_path_info)
|
||||
{
|
||||
elog(INFO, "Using index scan for DML decompression");
|
||||
}
|
||||
scan->index_scan = index_beginscan(in_rel, index_rel, snapshot, num_scankeys, 0);
|
||||
index_rescan(scan->index_scan, scankeys, num_scankeys, NULL, 0);
|
||||
scan->scan = NULL;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (ts_guc_debug_compression_path_info)
|
||||
{
|
||||
elog(INFO, "Using table scan for DML decompression");
|
||||
}
|
||||
scan->scan = table_beginscan(in_rel, snapshot, num_scankeys, scankeys);
|
||||
scan->index_scan = NULL;
|
||||
}
|
||||
|
@ -322,11 +322,11 @@ build_index_scankeys_using_slot(Oid hypertable_relid, Relation in_rel, Relation
|
||||
AttrNumber idx_attnum = AttrOffsetGetAttrNumber(i);
|
||||
AttrNumber in_attnum = index_rel->rd_index->indkey.values[i];
|
||||
const NameData *attname = attnumAttName(in_rel, in_attnum);
|
||||
AttrNumber column_attno =
|
||||
get_attnum(out_rel->rd_id, NameStr(*attname)) - FirstLowInvalidHeapAttributeNumber;
|
||||
|
||||
/* Make sure we find columns in key columns in order to select the right index */
|
||||
if (!bms_is_member((get_attnum(out_rel->rd_id, NameStr(*attname)) -
|
||||
FirstLowInvalidHeapAttributeNumber),
|
||||
key_columns))
|
||||
if (!bms_is_member(column_attno, key_columns))
|
||||
{
|
||||
break;
|
||||
}
|
||||
@ -337,6 +337,7 @@ build_index_scankeys_using_slot(Oid hypertable_relid, Relation in_rel, Relation
|
||||
|
||||
if (isnull)
|
||||
{
|
||||
*index_columns = bms_add_member(*index_columns, column_attno);
|
||||
ScanKeyEntryInitialize(&scankeys[(*num_scan_keys)++],
|
||||
SK_ISNULL | SK_SEARCHNULL,
|
||||
idx_attnum,
|
||||
@ -377,6 +378,7 @@ build_index_scankeys_using_slot(Oid hypertable_relid, Relation in_rel, Relation
|
||||
Ensure(OidIsValid(opcode),
|
||||
"no opcode found for column operator of a hypertable column");
|
||||
|
||||
*index_columns = bms_add_member(*index_columns, column_attno);
|
||||
ScanKeyEntryInitialize(&scankeys[(*num_scan_keys)++],
|
||||
0, /* flags */
|
||||
idx_attnum,
|
||||
|
@ -49,13 +49,13 @@ SELECT count(*) FROM ONLY :CHUNK;
|
||||
-- should fail due to multiple entries with same time value
|
||||
\set ON_ERROR_STOP 0
|
||||
INSERT INTO comp_conflicts_1 VALUES ('2020-01-01','d1',0.1);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 2, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "1_1_comp_conflicts_1_pkey"
|
||||
INSERT INTO comp_conflicts_1 VALUES
|
||||
('2020-01-01','d1',0.1),
|
||||
('2020-01-01','d2',0.2),
|
||||
('2020-01-01','d3',0.3);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 2, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "1_1_comp_conflicts_1_pkey"
|
||||
\set ON_ERROR_STOP 1
|
||||
-- no data should be in uncompressed chunk since the inserts failed and their transaction rolled back
|
||||
@ -71,11 +71,11 @@ BEGIN;
|
||||
('2020-01-01 0:00:01','d1',0.1),
|
||||
('2020-01-01 0:00:02','d2',0.2),
|
||||
('2020-01-01 0:00:03','d3',0.3);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 2, memory 1.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 2, memory 1.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 2, memory 1.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
-- no data should have moved into uncompressed chunk for conflict check
|
||||
-- since we used metadata optimization to guarantee uniqueness
|
||||
@ -96,11 +96,11 @@ SELECT count(*) FROM ONLY :CHUNK;
|
||||
-- should fail since it conflicts with existing row
|
||||
\set ON_ERROR_STOP 0
|
||||
INSERT INTO comp_conflicts_1 VALUES ('2020-01-01','d1',0.1);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 2, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "1_1_comp_conflicts_1_pkey"
|
||||
\set ON_ERROR_STOP 1
|
||||
INSERT INTO comp_conflicts_1 VALUES ('2020-01-01','d1',0.1) ON CONFLICT DO NOTHING;
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 2, memory 1.
|
||||
-- data should have move into uncompressed chunk for conflict check
|
||||
SELECT count(*) FROM ONLY :CHUNK;
|
||||
count
|
||||
@ -134,16 +134,16 @@ SELECT count(*) FROM ONLY :CHUNK;
|
||||
-- should fail due to multiple entries with same time, device value
|
||||
\set ON_ERROR_STOP 0
|
||||
INSERT INTO comp_conflicts_2 VALUES ('2020-01-01','d1',0.1);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
ERROR: duplicate key value violates unique constraint "3_2_comp_conflicts_2_time_device_key"
|
||||
INSERT INTO comp_conflicts_2 VALUES ('2020-01-01','d2',0.2);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
ERROR: duplicate key value violates unique constraint "3_2_comp_conflicts_2_time_device_key"
|
||||
INSERT INTO comp_conflicts_2 VALUES
|
||||
('2020-01-01','d1',0.1),
|
||||
('2020-01-01','d2',0.2),
|
||||
('2020-01-01','d3',0.3);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
ERROR: duplicate key value violates unique constraint "3_2_comp_conflicts_2_time_device_key"
|
||||
\set ON_ERROR_STOP 1
|
||||
-- no data should be in uncompressed chunk since the inserts failed and their transaction rolled back
|
||||
@ -159,11 +159,11 @@ BEGIN;
|
||||
('2020-01-01 0:00:01','d1',0.1),
|
||||
('2020-01-01 0:00:01','d2',0.2),
|
||||
('2020-01-01 0:00:01','d3',0.3);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
-- no data should have move into uncompressed chunk for conflict check
|
||||
-- since we used metadata optimization to guarantee uniqueness
|
||||
@ -184,11 +184,11 @@ SELECT count(*) FROM ONLY :CHUNK;
|
||||
-- should fail since it conflicts with existing row
|
||||
\set ON_ERROR_STOP 0
|
||||
INSERT INTO comp_conflicts_2 VALUES ('2020-01-01','d1',0.1);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
ERROR: duplicate key value violates unique constraint "3_2_comp_conflicts_2_time_device_key"
|
||||
\set ON_ERROR_STOP 1
|
||||
INSERT INTO comp_conflicts_2 VALUES ('2020-01-01','d1',0.1) ON CONFLICT DO NOTHING;
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
-- data should have move into uncompressed chunk for conflict check
|
||||
SELECT count(*) FROM ONLY :CHUNK;
|
||||
count
|
||||
@ -223,28 +223,28 @@ SELECT count(*) FROM ONLY :CHUNK;
|
||||
-- should fail due to multiple entries with same time, device value
|
||||
\set ON_ERROR_STOP 0
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d1', 'label', 0.1);
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 2, heap 2, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "5_3_comp_conflicts_3_time_device_label_key"
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d2', 'label', 0.2);
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 2, heap 2, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "5_3_comp_conflicts_3_time_device_label_key"
|
||||
INSERT INTO comp_conflicts_3 VALUES
|
||||
('2020-01-01','d1', 'label', 0.1),
|
||||
('2020-01-01','d2', 'label', 0.2),
|
||||
('2020-01-01','d3', 'label', 0.3);
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 2, heap 2, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "5_3_comp_conflicts_3_time_device_label_key"
|
||||
-- should work the same without the index present
|
||||
BEGIN;
|
||||
DROP INDEX _timescaledb_internal.compress_hyper_6_6_chunk_device_label__ts_meta_sequence_num_idx;
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d1', 'label', 0.1);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "5_3_comp_conflicts_3_time_device_label_key"
|
||||
ROLLBACK;
|
||||
BEGIN;
|
||||
DROP INDEX _timescaledb_internal.compress_hyper_6_6_chunk_device_label__ts_meta_sequence_num_idx;
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d2', 'label', 0.2);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "5_3_comp_conflicts_3_time_device_label_key"
|
||||
ROLLBACK;
|
||||
BEGIN;
|
||||
@ -253,7 +253,7 @@ BEGIN;
|
||||
('2020-01-01','d1', 'label', 0.1),
|
||||
('2020-01-01','d2', 'label', 0.2),
|
||||
('2020-01-01','d3', 'label', 0.3);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "5_3_comp_conflicts_3_time_device_label_key"
|
||||
ROLLBACK;
|
||||
-- using superuser to create indexes on compressed chunks
|
||||
@ -265,7 +265,7 @@ BEGIN;
|
||||
CREATE INDEX partial_index ON _timescaledb_internal.compress_hyper_6_6_chunk (device, label, _ts_meta_sequence_num)
|
||||
WHERE label LIKE 'missing';
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d1', 'label', 0.1);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "5_3_comp_conflicts_3_time_device_label_key"
|
||||
ROLLBACK;
|
||||
-- ignore matching covering index
|
||||
@ -273,23 +273,23 @@ BEGIN;
|
||||
DROP INDEX _timescaledb_internal.compress_hyper_6_6_chunk_device_label__ts_meta_sequence_num_idx;
|
||||
CREATE INDEX covering_index ON _timescaledb_internal.compress_hyper_6_6_chunk (device) INCLUDE (label, _ts_meta_sequence_num);
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d1', 'label', 0.1);
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 1, heap 3, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "5_3_comp_conflicts_3_time_device_label_key"
|
||||
ROLLBACK;
|
||||
-- ignore matching but out of order segmentby index
|
||||
-- out of order segmentby index, index is still usable
|
||||
BEGIN;
|
||||
DROP INDEX _timescaledb_internal.compress_hyper_6_6_chunk_device_label__ts_meta_sequence_num_idx;
|
||||
CREATE INDEX covering_index ON _timescaledb_internal.compress_hyper_6_6_chunk (label, device, _ts_meta_sequence_num);
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d1', 'label', 0.1);
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 2, heap 2, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "5_3_comp_conflicts_3_time_device_label_key"
|
||||
ROLLBACK;
|
||||
-- ignore index with sequence number in the middle
|
||||
-- index with sequence number in the middle, index should be usable with single index scan key
|
||||
BEGIN;
|
||||
DROP INDEX _timescaledb_internal.compress_hyper_6_6_chunk_device_label__ts_meta_sequence_num_idx;
|
||||
CREATE INDEX covering_index ON _timescaledb_internal.compress_hyper_6_6_chunk (device, _ts_meta_sequence_num, label);
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d1', 'label', 0.1);
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 1, heap 3, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "5_3_comp_conflicts_3_time_device_label_key"
|
||||
ROLLBACK;
|
||||
-- ignore expression index
|
||||
@ -297,7 +297,7 @@ BEGIN;
|
||||
DROP INDEX _timescaledb_internal.compress_hyper_6_6_chunk_device_label__ts_meta_sequence_num_idx;
|
||||
CREATE INDEX partial_index ON _timescaledb_internal.compress_hyper_6_6_chunk (device, lower(label), _ts_meta_sequence_num);
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d1', 'label', 0.1);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "5_3_comp_conflicts_3_time_device_label_key"
|
||||
ROLLBACK;
|
||||
-- ignore non-btree index
|
||||
@ -305,7 +305,7 @@ BEGIN;
|
||||
DROP INDEX _timescaledb_internal.compress_hyper_6_6_chunk_device_label__ts_meta_sequence_num_idx;
|
||||
CREATE INDEX partial_index ON _timescaledb_internal.compress_hyper_6_6_chunk USING brin (device, label, _ts_meta_sequence_num);
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d1', 'label', 0.1);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "5_3_comp_conflicts_3_time_device_label_key"
|
||||
ROLLBACK;
|
||||
\set ON_ERROR_STOP 1
|
||||
@ -347,7 +347,7 @@ ROLLBACK;
|
||||
-- should succeed since there are no conflicts in the values
|
||||
BEGIN;
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01 0:00:01','d1', 'label', 0.1);
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 2, heap 2, memory 1.
|
||||
INFO: Number of compressed rows fetched from index: 1. Number of compressed rows filtered by heap filters: 1.
|
||||
-- no data should have move into uncompressed chunk for conflict check
|
||||
-- since we used metadata optimization to guarantee uniqueness
|
||||
@ -363,7 +363,7 @@ ROLLBACK;
|
||||
BEGIN;
|
||||
DROP INDEX _timescaledb_internal.compress_hyper_6_6_chunk_device_label__ts_meta_sequence_num_idx;
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01 0:00:01','d1', 'label', 0.1);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 1.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
-- no data should have move into uncompressed chunk for conflict check
|
||||
-- since we used metadata optimization to guarantee uniqueness
|
||||
@ -379,11 +379,11 @@ BEGIN;
|
||||
('2020-01-01 0:00:01','d1', 'label', 0.1),
|
||||
('2020-01-01 0:00:01','d2', 'label', 0.2),
|
||||
('2020-01-01 0:00:01','d3', 'label', 0.3);
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 2, heap 2, memory 1.
|
||||
INFO: Number of compressed rows fetched from index: 1. Number of compressed rows filtered by heap filters: 1.
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 2, heap 2, memory 1.
|
||||
INFO: Number of compressed rows fetched from index: 1. Number of compressed rows filtered by heap filters: 1.
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 2, heap 2, memory 1.
|
||||
INFO: Number of compressed rows fetched from index: 0. Number of compressed rows filtered by heap filters: 0.
|
||||
-- no data for should have move into uncompressed chunk for conflict check
|
||||
-- since we used metadata optimization to guarantee uniqueness
|
||||
@ -401,11 +401,11 @@ BEGIN;
|
||||
('2020-01-01 0:00:01','d1', 'label', 0.1),
|
||||
('2020-01-01 0:00:01','d2', 'label', 0.2),
|
||||
('2020-01-01 0:00:01','d3', 'label', 0.3);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 1.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 1.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 1.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
-- no data for should have move into uncompressed chunk for conflict check
|
||||
-- since we used metadata optimization to guarantee uniqueness
|
||||
@ -418,7 +418,7 @@ INFO: Number of compressed rows fetched from table scan: 0. Number of compresse
|
||||
ROLLBACK;
|
||||
BEGIN;
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01 0:00:01','d3', 'label', 0.2);
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 2, heap 2, memory 1.
|
||||
INFO: Number of compressed rows fetched from index: 0. Number of compressed rows filtered by heap filters: 0.
|
||||
-- count = 1 since no data should have move into uncompressed chunk for conflict check since d3 is new segment
|
||||
SELECT count(*) FROM ONLY :CHUNK;
|
||||
@ -438,11 +438,11 @@ SELECT count(*) FROM ONLY :CHUNK;
|
||||
-- should fail since it conflicts with existing row
|
||||
\set ON_ERROR_STOP 0
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d1', 'label', 0.1);
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 2, heap 2, memory 1.
|
||||
ERROR: duplicate key value violates unique constraint "5_3_comp_conflicts_3_time_device_label_key"
|
||||
\set ON_ERROR_STOP 1
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d1', 'label', 0.1) ON CONFLICT DO NOTHING;
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 2, heap 2, memory 1.
|
||||
-- data should have move into uncompressed chunk for conflict check
|
||||
SELECT count(*) FROM ONLY :CHUNK;
|
||||
count
|
||||
@ -489,7 +489,7 @@ ROLLBACK;
|
||||
-- should succeed since there are no conflicts in the values
|
||||
BEGIN;
|
||||
INSERT INTO comp_conflicts_4 VALUES ('2020-01-01 2:00:01','d1',0.1);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
-- no data should have move into uncompressed chunk for conflict check
|
||||
-- since we used metadata optimization to guarantee uniqueness
|
||||
@ -505,11 +505,11 @@ BEGIN;
|
||||
('2020-01-01 2:00:01','d1',0.1),
|
||||
('2020-01-01 2:00:01','d2',0.2),
|
||||
('2020-01-01 2:00:01','d3',0.3);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
-- no data for should have move into uncompressed chunk for conflict check
|
||||
-- since we used metadata optimization to guarantee uniqueness
|
||||
@ -522,7 +522,7 @@ INFO: Number of compressed rows fetched from table scan: 0. Number of compresse
|
||||
ROLLBACK;
|
||||
BEGIN;
|
||||
INSERT INTO comp_conflicts_4 VALUES ('2020-01-01 0:00:01','d3',0.2);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
INFO: Number of compressed rows fetched from table scan: 0. Number of compressed rows filtered: 0.
|
||||
-- count = 1 since no data should have move into uncompressed chunk for conflict check since d3 is new segment
|
||||
SELECT count(*) FROM ONLY :CHUNK;
|
||||
@ -542,7 +542,7 @@ SELECT count(*) FROM ONLY :CHUNK;
|
||||
-- should fail since it conflicts with existing row
|
||||
\set ON_ERROR_STOP 0
|
||||
INSERT INTO comp_conflicts_4 VALUES ('2020-01-01','d1',0.1);
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
ERROR: duplicate key value violates unique constraint "7_4_comp_conflicts_4_time_device_key"
|
||||
\set ON_ERROR_STOP 1
|
||||
-- data not should have move into uncompressed chunk for conflict check
|
||||
@ -553,9 +553,9 @@ SELECT count(*) FROM ONLY :CHUNK;
|
||||
(1 row)
|
||||
|
||||
INSERT INTO comp_conflicts_4 VALUES ('2020-01-01 0:00:01','d1',0.1) ON CONFLICT DO NOTHING;
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
INSERT INTO comp_conflicts_4 VALUES ('2020-01-01 0:30:00','d1',0.1) ON CONFLICT DO NOTHING;
|
||||
INFO: Using table scan for DML decompression
|
||||
INFO: Using table scan with scan keys: index 0, heap 4, memory 2.
|
||||
-- data should have move into uncompressed chunk for conflict check
|
||||
-- 2 segments (count = 2000)
|
||||
SELECT count(*) FROM ONLY :CHUNK;
|
||||
@ -643,7 +643,7 @@ SELECT COUNT(*) FROM compressed_ht WHERE name = 'ON CONFLICT DO UPDATE';
|
||||
INSERT INTO compressed_ht VALUES ('2017-12-28 01:10:28.192199+05:30', '1', 0.876, 4.123, 'new insert row')
|
||||
ON conflict(sensor_id, time)
|
||||
DO UPDATE SET sensor_id = excluded.sensor_id , name = 'ON CONFLICT DO UPDATE';
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 1, heap 2, memory 1.
|
||||
INFO: Number of compressed rows fetched from index: 1. Number of compressed rows filtered by heap filters: 0.
|
||||
-- should report 1 row
|
||||
SELECT COUNT(*) FROM compressed_ht WHERE name = 'ON CONFLICT DO UPDATE';
|
||||
@ -667,7 +667,7 @@ WHERE hypertable_name = 'compressed_ht' ORDER BY chunk_name;
|
||||
INSERT INTO compressed_ht VALUES ('2022-01-24 01:10:28.192199+05:30', '6', 0.876, 4.123, 'new insert row')
|
||||
ON conflict(sensor_id, time)
|
||||
DO UPDATE SET sensor_id = excluded.sensor_id , name = 'ON CONFLICT DO UPDATE' RETURNING *;
|
||||
INFO: Using index scan for DML decompression
|
||||
INFO: Using index scan with scan keys: index 1, heap 2, memory 1.
|
||||
INFO: Number of compressed rows fetched from index: 1. Number of compressed rows filtered by heap filters: 0.
|
||||
time | sensor_id | cpu | temperature | name
|
||||
-------------------------------------+-----------+-------+-------------+-----------------------
|
||||
|
@ -194,14 +194,14 @@ BEGIN;
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d1', 'label', 0.1);
|
||||
ROLLBACK;
|
||||
|
||||
-- ignore matching but out of order segmentby index
|
||||
-- out of order segmentby index, index is still usable
|
||||
BEGIN;
|
||||
DROP INDEX _timescaledb_internal.compress_hyper_6_6_chunk_device_label__ts_meta_sequence_num_idx;
|
||||
CREATE INDEX covering_index ON _timescaledb_internal.compress_hyper_6_6_chunk (label, device, _ts_meta_sequence_num);
|
||||
INSERT INTO comp_conflicts_3 VALUES ('2020-01-01','d1', 'label', 0.1);
|
||||
ROLLBACK;
|
||||
|
||||
-- ignore index with sequence number in the middle
|
||||
-- index with sequence number in the middle, index should be usable with single index scan key
|
||||
BEGIN;
|
||||
DROP INDEX _timescaledb_internal.compress_hyper_6_6_chunk_device_label__ts_meta_sequence_num_idx;
|
||||
CREATE INDEX covering_index ON _timescaledb_internal.compress_hyper_6_6_chunk (device, _ts_meta_sequence_num, label);
|
||||
|
Loading…
x
Reference in New Issue
Block a user