mirror of
https://github.com/timescale/timescaledb.git
synced 2025-05-16 10:33:27 +08:00
Changing from using the `compress_using` parameter with a table access method name to use the boolean parameter `hypercore_use_access_method` instead to avoid having to provide a name when using the table access method for compression.
205 lines
8.6 KiB
Plaintext
205 lines
8.6 KiB
Plaintext
-- This file and its contents are licensed under the Timescale License.
|
|
-- Please see the included NOTICE for copyright information and
|
|
-- LICENSE-TIMESCALE for a copy of the license.
|
|
\ir include/setup_hypercore.sql
|
|
-- This file and its contents are licensed under the Timescale License.
|
|
-- Please see the included NOTICE for copyright information and
|
|
-- LICENSE-TIMESCALE for a copy of the license.
|
|
\set hypertable readings
|
|
\ir hypercore_helpers.sql
|
|
-- This file and its contents are licensed under the Timescale License.
|
|
-- Please see the included NOTICE for copyright information and
|
|
-- LICENSE-TIMESCALE for a copy of the license.
|
|
-- Function to run an explain analyze with and do replacements on the
|
|
-- emitted plan. This is intended to be used when the structure of the
|
|
-- plan is important, but not the specific chunks scanned nor the
|
|
-- number of heap fetches, rows, loops, etc.
|
|
create function explain_analyze_anonymize(text) returns setof text
|
|
language plpgsql as
|
|
$$
|
|
declare
|
|
ln text;
|
|
begin
|
|
for ln in
|
|
execute format('explain (analyze, costs off, summary off, timing off, decompress_cache_stats) %s', $1)
|
|
loop
|
|
if trim(both from ln) like 'Group Key:%' then
|
|
continue;
|
|
end if;
|
|
ln := regexp_replace(ln, 'Array Cache Hits: \d+', 'Array Cache Hits: N');
|
|
ln := regexp_replace(ln, 'Array Cache Misses: \d+', 'Array Cache Misses: N');
|
|
ln := regexp_replace(ln, 'Array Cache Evictions: \d+', 'Array Cache Evictions: N');
|
|
ln := regexp_replace(ln, 'Heap Fetches: \d+', 'Heap Fetches: N');
|
|
ln := regexp_replace(ln, 'Workers Launched: \d+', 'Workers Launched: N');
|
|
ln := regexp_replace(ln, 'actual rows=\d+ loops=\d+', 'actual rows=N loops=N');
|
|
ln := regexp_replace(ln, '_hyper_\d+_\d+_chunk', '_hyper_I_N_chunk', 1, 0);
|
|
return next ln;
|
|
end loop;
|
|
end;
|
|
$$;
|
|
create function explain_anonymize(text) returns setof text
|
|
language plpgsql as
|
|
$$
|
|
declare
|
|
ln text;
|
|
begin
|
|
for ln in
|
|
execute format('explain (costs off, summary off, timing off) %s', $1)
|
|
loop
|
|
ln := regexp_replace(ln, 'Array Cache Hits: \d+', 'Array Cache Hits: N');
|
|
ln := regexp_replace(ln, 'Array Cache Misses: \d+', 'Array Cache Misses: N');
|
|
ln := regexp_replace(ln, 'Array Cache Evictions: \d+', 'Array Cache Evictions: N');
|
|
ln := regexp_replace(ln, 'Heap Fetches: \d+', 'Heap Fetches: N');
|
|
ln := regexp_replace(ln, 'Workers Launched: \d+', 'Workers Launched: N');
|
|
ln := regexp_replace(ln, 'actual rows=\d+ loops=\d+', 'actual rows=N loops=N');
|
|
ln := regexp_replace(ln, '_hyper_\d+_\d+_chunk', '_hyper_I_N_chunk', 1, 0);
|
|
return next ln;
|
|
end loop;
|
|
end;
|
|
$$;
|
|
create table :hypertable(
|
|
metric_id serial,
|
|
created_at timestamptz not null unique,
|
|
location_id smallint, --segmentby attribute with index
|
|
owner_id bigint, --segmentby attribute without index
|
|
device_id bigint, --non-segmentby attribute
|
|
temp float8,
|
|
humidity float4
|
|
);
|
|
create index hypertable_location_id_idx on :hypertable (location_id);
|
|
create index hypertable_device_id_idx on :hypertable (device_id);
|
|
select create_hypertable(:'hypertable', by_range('created_at'));
|
|
create_hypertable
|
|
-------------------
|
|
(1,t)
|
|
(1 row)
|
|
|
|
-- Disable incremental sort to make tests stable
|
|
set enable_incremental_sort = false;
|
|
select setseed(1);
|
|
setseed
|
|
---------
|
|
|
|
(1 row)
|
|
|
|
-- Insert rows into the tables.
|
|
--
|
|
-- The timestamps for the original rows will have timestamps every 10
|
|
-- seconds. Any other timestamps are inserted as part of the test.
|
|
insert into :hypertable (created_at, location_id, device_id, owner_id, temp, humidity)
|
|
select t, ceil(random()*10), ceil(random()*30), ceil(random() * 5), random()*40, random()*100
|
|
from generate_series('2022-06-01'::timestamptz, '2022-07-01', '5m') t;
|
|
alter table :hypertable set (
|
|
timescaledb.compress,
|
|
timescaledb.compress_orderby = 'created_at',
|
|
timescaledb.compress_segmentby = 'location_id, owner_id'
|
|
);
|
|
-- Get some test chunks as global variables (first and last chunk here)
|
|
select format('%I.%I', chunk_schema, chunk_name)::regclass as chunk1
|
|
from timescaledb_information.chunks
|
|
where format('%I.%I', hypertable_schema, hypertable_name)::regclass = :'hypertable'::regclass
|
|
order by chunk1 asc
|
|
limit 1 \gset
|
|
select format('%I.%I', chunk_schema, chunk_name)::regclass as chunk2
|
|
from timescaledb_information.chunks
|
|
where format('%I.%I', hypertable_schema, hypertable_name)::regclass = :'hypertable'::regclass
|
|
order by chunk2 asc
|
|
limit 1 offset 1 \gset
|
|
-- We disable columnar scan for these tests since we have a dedicated
|
|
-- test for this.
|
|
set timescaledb.enable_columnarscan to false;
|
|
-- Discourage seqscan to make sure we predictibly use indexscan
|
|
set enable_seqscan to false;
|
|
set enable_memoize to false;
|
|
-- Create a hypercore with a few rows and use the big table to join
|
|
-- with it. This should put the hypercore as the inner relation and
|
|
-- trigger rescans.
|
|
create table the_hypercore (
|
|
updated_at timestamptz not null unique,
|
|
device_id int,
|
|
height float
|
|
);
|
|
create index on the_hypercore (device_id);
|
|
select from create_hypertable('the_hypercore', 'updated_at');
|
|
--
|
|
(1 row)
|
|
|
|
-- Fill the table with some data, but less than a single chunk, so
|
|
-- that we will get it as an inner relation in the nested loop join.
|
|
insert into the_hypercore
|
|
select t, ceil(random()*5), random()*40
|
|
from generate_series('2022-06-01'::timestamptz, '2022-06-10', '1 hour') t;
|
|
-- Run joins before making it a hypercore to have something to
|
|
-- compare with.
|
|
select * into expected_inner from :chunk1 join the_hypercore using (device_id);
|
|
select created_at, updated_at, o.device_id, i.humidity, o.height
|
|
into expected_left
|
|
from :chunk1 i left join the_hypercore o
|
|
on i.created_at = o.updated_at and i.device_id = o.device_id;
|
|
alter table the_hypercore set (
|
|
timescaledb.compress,
|
|
timescaledb.compress_segmentby = '',
|
|
timescaledb.compress_orderby = 'updated_at desc'
|
|
);
|
|
select compress_chunk(show_chunks('the_hypercore'), hypercore_use_access_method => true);
|
|
compress_chunk
|
|
----------------------------------------
|
|
_timescaledb_internal._hyper_3_7_chunk
|
|
_timescaledb_internal._hyper_3_8_chunk
|
|
_timescaledb_internal._hyper_3_9_chunk
|
|
(3 rows)
|
|
|
|
vacuum analyze the_hypercore;
|
|
-- Test a merge join. We explicitly set what join methods to enable
|
|
-- and disable to avoid flaky tests.
|
|
set enable_mergejoin to true;
|
|
set enable_hashjoin to false;
|
|
set enable_nestloop to false;
|
|
\set jointype merge
|
|
\ir include/hypercore_join_test.sql
|
|
-- This file and its contents are licensed under the Timescale License.
|
|
-- Please see the included NOTICE for copyright information and
|
|
-- LICENSE-TIMESCALE for a copy of the license.
|
|
\set inner :jointype _join
|
|
\set outer :jointype _outer_join
|
|
-- Test inner join to make sure that it works.
|
|
select explain_analyze_anonymize(format($$
|
|
select * from %s join the_hypercore using (device_id)
|
|
$$, :'chunk1'));
|
|
explain_analyze_anonymize
|
|
-----------------------------------------------------------------------------------------------------------------------
|
|
Merge Join (actual rows=N loops=N)
|
|
Merge Cond: (_hyper_I_N_chunk.device_id = _hyper_I_N_chunk.device_id)
|
|
-> Merge Append (actual rows=N loops=N)
|
|
Sort Key: _hyper_I_N_chunk.device_id
|
|
-> Index Scan using _hyper_I_N_chunk_the_hypercore_device_id_idx on _hyper_I_N_chunk (actual rows=N loops=N)
|
|
-> Index Scan using _hyper_I_N_chunk_the_hypercore_device_id_idx on _hyper_I_N_chunk (actual rows=N loops=N)
|
|
-> Index Scan using _hyper_I_N_chunk_the_hypercore_device_id_idx on _hyper_I_N_chunk (actual rows=N loops=N)
|
|
-> Index Scan using _hyper_I_N_chunk_hypertable_device_id_idx on _hyper_I_N_chunk (actual rows=N loops=N)
|
|
Array Cache Hits: N
|
|
Array Cache Misses: N
|
|
Array Cache Evictions: N
|
|
Array Decompressions: 9
|
|
(12 rows)
|
|
|
|
-- Check that it generates the right result
|
|
select * into :inner from :chunk1 join the_hypercore using (device_id);
|
|
\x on
|
|
select * from :inner r full join expected_inner e on row(r) = row(e)
|
|
where r.device_id is null or e.device_id is null;
|
|
(0 rows)
|
|
|
|
\x off
|
|
-- Test outer join (left in this case) to make sure that it works.
|
|
select explain_analyze_anonymize(format($$
|
|
select created_at, updated_at, o.device_id, i.humidity, o.height
|
|
from :chunk1 i left join the_hypercore o
|
|
on i.created_at = o.updated_at and i.device_id = o.device_id;
|
|
|
|
select created_at, updated_at, o.device_id, i.humidity, o.height
|
|
into :outer
|
|
from :chunk1 i left join the_hypercore o
|
|
on i.created_at = o.updated_at and i.device_id = o.device_id;
|
|
$$, :'chunk1'));
|
|
psql:include/hypercore_join_test.sql:31: ERROR: syntax error at or near ":" at character 152
|