mirror of
https://github.com/timescale/timescaledb.git
synced 2025-05-17 02:53:51 +08:00
Fix segfault on insert into internal compressed table
When trying to insert into the internal compressed hypertable timescaledb would segfault. This patch blocks direct inserts into the internal compressed hypertable through our tuple routing. Internally we don't use this code path for compression as we create chunks explicitly and insert directly into those chunks in compress_chunk. Fixes #3920
This commit is contained in:
parent
99746ed8ba
commit
765d7375ce
@ -117,6 +117,14 @@ ts_chunk_dispatch_get_chunk_insert_state(ChunkDispatch *dispatch, Point *point,
|
||||
ChunkInsertState *cis;
|
||||
bool cis_changed = true;
|
||||
|
||||
/* Direct inserts into internal compressed hypertable is not supported.
|
||||
* For compression chunks are created explicitly by compress_chunk and
|
||||
* inserted into directly so we should never end up in this code path
|
||||
* for a compressed hypertable.
|
||||
*/
|
||||
if (dispatch->hypertable->fd.compression_state == HypertableInternalCompressionTable)
|
||||
elog(ERROR, "direct insert into internal compressed hypertable is not supported");
|
||||
|
||||
cis = ts_subspace_store_get(dispatch->cache, point);
|
||||
|
||||
if (NULL == cis)
|
||||
|
@ -203,6 +203,12 @@ ts_subspace_store_get(const SubspaceStore *store, const Point *target)
|
||||
|
||||
Assert(target->cardinality == store->num_dimensions);
|
||||
|
||||
/* The internal compressed hypertable has no dimensions as
|
||||
* chunks are created explicitly by compress_chunk and linked
|
||||
* to the source chunk. */
|
||||
if (store->num_dimensions == 0)
|
||||
return NULL;
|
||||
|
||||
for (i = 0; i < target->cardinality; i++)
|
||||
{
|
||||
match = ts_dimension_vec_find_slice(vec, target->coordinates[i]);
|
||||
|
@ -868,13 +868,13 @@ SELECT table_name from create_hypertable( 'conditions', 'timec');
|
||||
conditions
|
||||
(1 row)
|
||||
|
||||
INSERT INTO conditions
|
||||
INSERT INTO conditions
|
||||
SELECT generate_series('2010-01-01 09:00:00-08'::timestamptz, '2010-01-03 09:00:00-08'::timestamptz, '1 day'), 55 , 45;
|
||||
CREATE MATERIALIZED VIEW cagg_conditions WITH (timescaledb.continuous,
|
||||
timescaledb.materialized_only = true)
|
||||
AS
|
||||
SELECT time_bucket( '7 days', timec) bkt, count(*) cnt, sum(temperature) sumb
|
||||
FROM conditions
|
||||
FROM conditions
|
||||
GROUP BY time_bucket('7 days', timec);
|
||||
NOTICE: refreshing continuous aggregate "cagg_conditions"
|
||||
SELECT * FROM cagg_conditions ORDER BY 1;
|
||||
@ -890,7 +890,6 @@ SELECT compress_chunk(ch) FROM show_chunks('conditions') ch;
|
||||
_timescaledb_internal._hyper_15_24_chunk
|
||||
(1 row)
|
||||
|
||||
|
||||
SELECT chunk_name, range_start, range_end, is_compressed
|
||||
FROM timescaledb_information.chunks
|
||||
WHERE hypertable_name = 'conditions';
|
||||
@ -910,3 +909,22 @@ SELECT * FROM cagg_conditions ORDER BY 1;
|
||||
Sun Dec 27 16:00:00 2009 PST | 5 | 185
|
||||
(1 row)
|
||||
|
||||
-- direct insert into interal compressed hypertable should be blocked
|
||||
CREATE TABLE direct_insert(time timestamptz not null);
|
||||
SELECT table_name FROM create_hypertable('direct_insert','time');
|
||||
table_name
|
||||
---------------
|
||||
direct_insert
|
||||
(1 row)
|
||||
|
||||
ALTER TABLE direct_insert SET(timescaledb.compress);
|
||||
SELECT
|
||||
format('%I.%I', ht.schema_name, ht.table_name) AS "TABLENAME"
|
||||
FROM
|
||||
_timescaledb_catalog.hypertable ht
|
||||
INNER JOIN _timescaledb_catalog.hypertable uncompress ON (ht.id = uncompress.compressed_hypertable_id
|
||||
AND uncompress.table_name = 'direct_insert') \gset
|
||||
\set ON_ERROR_STOP 0
|
||||
INSERT INTO :TABLENAME SELECT;
|
||||
ERROR: direct insert into internal compressed hypertable is not supported
|
||||
\set ON_ERROR_STOP 1
|
||||
|
@ -868,13 +868,13 @@ SELECT table_name from create_hypertable( 'conditions', 'timec');
|
||||
conditions
|
||||
(1 row)
|
||||
|
||||
INSERT INTO conditions
|
||||
INSERT INTO conditions
|
||||
SELECT generate_series('2010-01-01 09:00:00-08'::timestamptz, '2010-01-03 09:00:00-08'::timestamptz, '1 day'), 55 , 45;
|
||||
CREATE MATERIALIZED VIEW cagg_conditions WITH (timescaledb.continuous,
|
||||
timescaledb.materialized_only = true)
|
||||
AS
|
||||
SELECT time_bucket( '7 days', timec) bkt, count(*) cnt, sum(temperature) sumb
|
||||
FROM conditions
|
||||
FROM conditions
|
||||
GROUP BY time_bucket('7 days', timec);
|
||||
NOTICE: refreshing continuous aggregate "cagg_conditions"
|
||||
SELECT * FROM cagg_conditions ORDER BY 1;
|
||||
@ -890,7 +890,6 @@ SELECT compress_chunk(ch) FROM show_chunks('conditions') ch;
|
||||
_timescaledb_internal._hyper_15_24_chunk
|
||||
(1 row)
|
||||
|
||||
|
||||
SELECT chunk_name, range_start, range_end, is_compressed
|
||||
FROM timescaledb_information.chunks
|
||||
WHERE hypertable_name = 'conditions';
|
||||
@ -910,3 +909,22 @@ SELECT * FROM cagg_conditions ORDER BY 1;
|
||||
Sun Dec 27 16:00:00 2009 PST | 5 | 185
|
||||
(1 row)
|
||||
|
||||
-- direct insert into interal compressed hypertable should be blocked
|
||||
CREATE TABLE direct_insert(time timestamptz not null);
|
||||
SELECT table_name FROM create_hypertable('direct_insert','time');
|
||||
table_name
|
||||
---------------
|
||||
direct_insert
|
||||
(1 row)
|
||||
|
||||
ALTER TABLE direct_insert SET(timescaledb.compress);
|
||||
SELECT
|
||||
format('%I.%I', ht.schema_name, ht.table_name) AS "TABLENAME"
|
||||
FROM
|
||||
_timescaledb_catalog.hypertable ht
|
||||
INNER JOIN _timescaledb_catalog.hypertable uncompress ON (ht.id = uncompress.compressed_hypertable_id
|
||||
AND uncompress.table_name = 'direct_insert') \gset
|
||||
\set ON_ERROR_STOP 0
|
||||
INSERT INTO :TABLENAME SELECT;
|
||||
ERROR: direct insert into internal compressed hypertable is not supported
|
||||
\set ON_ERROR_STOP 1
|
||||
|
@ -868,13 +868,13 @@ SELECT table_name from create_hypertable( 'conditions', 'timec');
|
||||
conditions
|
||||
(1 row)
|
||||
|
||||
INSERT INTO conditions
|
||||
INSERT INTO conditions
|
||||
SELECT generate_series('2010-01-01 09:00:00-08'::timestamptz, '2010-01-03 09:00:00-08'::timestamptz, '1 day'), 55 , 45;
|
||||
CREATE MATERIALIZED VIEW cagg_conditions WITH (timescaledb.continuous,
|
||||
timescaledb.materialized_only = true)
|
||||
AS
|
||||
SELECT time_bucket( '7 days', timec) bkt, count(*) cnt, sum(temperature) sumb
|
||||
FROM conditions
|
||||
FROM conditions
|
||||
GROUP BY time_bucket('7 days', timec);
|
||||
NOTICE: refreshing continuous aggregate "cagg_conditions"
|
||||
SELECT * FROM cagg_conditions ORDER BY 1;
|
||||
@ -890,7 +890,6 @@ SELECT compress_chunk(ch) FROM show_chunks('conditions') ch;
|
||||
_timescaledb_internal._hyper_15_24_chunk
|
||||
(1 row)
|
||||
|
||||
|
||||
SELECT chunk_name, range_start, range_end, is_compressed
|
||||
FROM timescaledb_information.chunks
|
||||
WHERE hypertable_name = 'conditions';
|
||||
@ -910,3 +909,22 @@ SELECT * FROM cagg_conditions ORDER BY 1;
|
||||
Sun Dec 27 16:00:00 2009 PST | 5 | 185
|
||||
(1 row)
|
||||
|
||||
-- direct insert into interal compressed hypertable should be blocked
|
||||
CREATE TABLE direct_insert(time timestamptz not null);
|
||||
SELECT table_name FROM create_hypertable('direct_insert','time');
|
||||
table_name
|
||||
---------------
|
||||
direct_insert
|
||||
(1 row)
|
||||
|
||||
ALTER TABLE direct_insert SET(timescaledb.compress);
|
||||
SELECT
|
||||
format('%I.%I', ht.schema_name, ht.table_name) AS "TABLENAME"
|
||||
FROM
|
||||
_timescaledb_catalog.hypertable ht
|
||||
INNER JOIN _timescaledb_catalog.hypertable uncompress ON (ht.id = uncompress.compressed_hypertable_id
|
||||
AND uncompress.table_name = 'direct_insert') \gset
|
||||
\set ON_ERROR_STOP 0
|
||||
INSERT INTO :TABLENAME SELECT;
|
||||
ERROR: direct insert into internal compressed hypertable is not supported
|
||||
\set ON_ERROR_STOP 1
|
||||
|
@ -558,21 +558,21 @@ CREATE TABLE conditions (
|
||||
humidity DOUBLE PRECISION NULL
|
||||
);
|
||||
SELECT table_name from create_hypertable( 'conditions', 'timec');
|
||||
INSERT INTO conditions
|
||||
INSERT INTO conditions
|
||||
SELECT generate_series('2010-01-01 09:00:00-08'::timestamptz, '2010-01-03 09:00:00-08'::timestamptz, '1 day'), 55 , 45;
|
||||
|
||||
CREATE MATERIALIZED VIEW cagg_conditions WITH (timescaledb.continuous,
|
||||
timescaledb.materialized_only = true)
|
||||
AS
|
||||
SELECT time_bucket( '7 days', timec) bkt, count(*) cnt, sum(temperature) sumb
|
||||
FROM conditions
|
||||
FROM conditions
|
||||
GROUP BY time_bucket('7 days', timec);
|
||||
|
||||
SELECT * FROM cagg_conditions ORDER BY 1;
|
||||
|
||||
ALTER TABLE conditions SET (timescaledb.compress);
|
||||
SELECT compress_chunk(ch) FROM show_chunks('conditions') ch;
|
||||
|
||||
|
||||
SELECT chunk_name, range_start, range_end, is_compressed
|
||||
FROM timescaledb_information.chunks
|
||||
WHERE hypertable_name = 'conditions';
|
||||
@ -584,3 +584,20 @@ INSERT INTO conditions VALUES('2010-01-01 12:00:00-08', 10, 20);
|
||||
--refresh cagg, should have updated info
|
||||
CALL refresh_continuous_aggregate('cagg_conditions', NULL, '2011-01-01 12:00:00-08' );
|
||||
SELECT * FROM cagg_conditions ORDER BY 1;
|
||||
|
||||
-- direct insert into interal compressed hypertable should be blocked
|
||||
CREATE TABLE direct_insert(time timestamptz not null);
|
||||
SELECT table_name FROM create_hypertable('direct_insert','time');
|
||||
ALTER TABLE direct_insert SET(timescaledb.compress);
|
||||
|
||||
SELECT
|
||||
format('%I.%I', ht.schema_name, ht.table_name) AS "TABLENAME"
|
||||
FROM
|
||||
_timescaledb_catalog.hypertable ht
|
||||
INNER JOIN _timescaledb_catalog.hypertable uncompress ON (ht.id = uncompress.compressed_hypertable_id
|
||||
AND uncompress.table_name = 'direct_insert') \gset
|
||||
|
||||
\set ON_ERROR_STOP 0
|
||||
INSERT INTO :TABLENAME SELECT;
|
||||
\set ON_ERROR_STOP 1
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user