Skip to content

Commit

Permalink
Add compressed hypertables transition table tests
Browse files Browse the repository at this point in the history
Adds a bunch of tests for transition tables on compressed data.
  • Loading branch information
mkindahl committed Jan 7, 2025
1 parent 8ec3365 commit 17f36ac
Show file tree
Hide file tree
Showing 3 changed files with 225 additions and 0 deletions.
129 changes: 129 additions & 0 deletions tsl/test/expected/compression_trigger.out
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
-- This file and its contents are licensed under the Timescale License.
-- Please see the included NOTICE for copyright information and
-- LICENSE-TIMESCALE for a copy of the license.
-- This is copied from hypercore_trigger.sql
create table readings(
metric_id serial,
created_at timestamptz not null unique,
location_id smallint, --segmentby attribute with index
owner_id bigint, --segmentby attribute without index
device_id bigint, --non-segmentby attribute
temp float8,
humidity float4
);
select create_hypertable('readings', by_range('created_at'));
create_hypertable
-------------------
(1,t)
(1 row)

select setseed(1);
setseed
---------

(1 row)

insert into readings(created_at, location_id, device_id, owner_id, temp, humidity)
select t, ceil(random()*10), ceil(random()*30), ceil(random() * 5), random()*40, random()*100
from generate_series('2022-06-01'::timestamptz, '2022-07-01', '5m') t;
alter table readings set (
timescaledb.compress,
timescaledb.compress_orderby = 'created_at',
timescaledb.compress_segmentby = 'location_id, owner_id'
);
select compress_chunk(show_chunks('readings'));
compress_chunk
----------------------------------------
_timescaledb_internal._hyper_1_1_chunk
_timescaledb_internal._hyper_1_2_chunk
_timescaledb_internal._hyper_1_3_chunk
_timescaledb_internal._hyper_1_4_chunk
_timescaledb_internal._hyper_1_5_chunk
_timescaledb_internal._hyper_1_6_chunk
(6 rows)

create table saved_rows (like readings, new_row bool not null, kind text);
create function save_transition_table() returns trigger as $$
begin
case tg_op
when 'INSERT' then
insert into saved_rows select n.*, true, tg_op from new_table n;
when 'DELETE' then
insert into saved_rows select o.*, false, tg_op from old_table o;
when 'UPDATE' then
insert into saved_rows select n.*, true, tg_op from new_table n;
insert into saved_rows select o.*, false, tg_op from old_table o;
end case;
return null;
end;
$$ language plpgsql;
create trigger save_insert_transition_table_trg
after insert on readings
referencing new table as new_table
for each statement execute function save_transition_table();
insert into readings(created_at, location_id, device_id, owner_id, temp, humidity)
values ('2022-06-01 00:01:23', 999, 666, 111, 3.14, 3.14),
('2022-06-01 00:02:23', 999, 666, 112, 3.14, 3.14);
select * from saved_rows;
metric_id | created_at | location_id | owner_id | device_id | temp | humidity | new_row | kind
-----------+------------------------------+-------------+----------+-----------+------+----------+---------+--------
8642 | Wed Jun 01 00:01:23 2022 PDT | 999 | 111 | 666 | 3.14 | 3.14 | t | INSERT
8643 | Wed Jun 01 00:02:23 2022 PDT | 999 | 112 | 666 | 3.14 | 3.14 | t | INSERT
(2 rows)

truncate saved_rows;
copy readings(created_at, location_id, device_id, owner_id, temp, humidity) from stdin with (format csv);
select * from saved_rows;
metric_id | created_at | location_id | owner_id | device_id | temp | humidity | new_row | kind
-----------+------------------------------+-------------+----------+-----------+------+----------+---------+--------
8644 | Wed Jun 01 00:01:35 2022 PDT | 999 | 111 | 666 | 3.14 | 3.14 | t | INSERT
(1 row)

truncate saved_rows;
create trigger save_update_transition_table_trg
after update on readings
referencing new table as new_table old table as old_table
for each statement execute function save_transition_table();
select * from readings where location_id = 999;
metric_id | created_at | location_id | owner_id | device_id | temp | humidity
-----------+------------------------------+-------------+----------+-----------+------+----------
8642 | Wed Jun 01 00:01:23 2022 PDT | 999 | 111 | 666 | 3.14 | 3.14
8643 | Wed Jun 01 00:02:23 2022 PDT | 999 | 112 | 666 | 3.14 | 3.14
8644 | Wed Jun 01 00:01:35 2022 PDT | 999 | 111 | 666 | 3.14 | 3.14
(3 rows)

update readings set humidity = 99.99 where location_id = 999;
select * from saved_rows;
metric_id | created_at | location_id | owner_id | device_id | temp | humidity | new_row | kind
-----------+------------------------------+-------------+----------+-----------+------+----------+---------+--------
8642 | Wed Jun 01 00:01:23 2022 PDT | 999 | 111 | 666 | 3.14 | 99.99 | t | UPDATE
8643 | Wed Jun 01 00:02:23 2022 PDT | 999 | 112 | 666 | 3.14 | 99.99 | t | UPDATE
8644 | Wed Jun 01 00:01:35 2022 PDT | 999 | 111 | 666 | 3.14 | 99.99 | t | UPDATE
8642 | Wed Jun 01 00:01:23 2022 PDT | 999 | 111 | 666 | 3.14 | 3.14 | f | UPDATE
8643 | Wed Jun 01 00:02:23 2022 PDT | 999 | 112 | 666 | 3.14 | 3.14 | f | UPDATE
8644 | Wed Jun 01 00:01:35 2022 PDT | 999 | 111 | 666 | 3.14 | 3.14 | f | UPDATE
(6 rows)

truncate saved_rows;
create trigger save_delete_transition_table_trg
after delete on readings
referencing old table as old_table
for each statement execute function save_transition_table();
select * from readings where location_id = 999;
metric_id | created_at | location_id | owner_id | device_id | temp | humidity
-----------+------------------------------+-------------+----------+-----------+------+----------
8642 | Wed Jun 01 00:01:23 2022 PDT | 999 | 111 | 666 | 3.14 | 99.99
8643 | Wed Jun 01 00:02:23 2022 PDT | 999 | 112 | 666 | 3.14 | 99.99
8644 | Wed Jun 01 00:01:35 2022 PDT | 999 | 111 | 666 | 3.14 | 99.99
(3 rows)

delete from readings where location_id = 999;
select * from saved_rows;
metric_id | created_at | location_id | owner_id | device_id | temp | humidity | new_row | kind
-----------+------------------------------+-------------+----------+-----------+------+----------+---------+--------
8642 | Wed Jun 01 00:01:23 2022 PDT | 999 | 111 | 666 | 3.14 | 99.99 | f | DELETE
8643 | Wed Jun 01 00:02:23 2022 PDT | 999 | 112 | 666 | 3.14 | 99.99 | f | DELETE
8644 | Wed Jun 01 00:01:35 2022 PDT | 999 | 111 | 666 | 3.14 | 99.99 | f | DELETE
(3 rows)

truncate saved_rows;
1 change: 1 addition & 0 deletions tsl/test/sql/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ set(TEST_FILES
compression_settings.sql
compression_sorted_merge_columns.sql
compression_sorted_merge_distinct.sql
compression_trigger.sql
decompress_index.sql
foreign_keys.sql
move.sql
Expand Down
95 changes: 95 additions & 0 deletions tsl/test/sql/compression_trigger.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
-- This file and its contents are licensed under the Timescale License.
-- Please see the included NOTICE for copyright information and
-- LICENSE-TIMESCALE for a copy of the license.

-- This is copied from hypercore_trigger.sql

create table readings(
metric_id serial,
created_at timestamptz not null unique,
location_id smallint, --segmentby attribute with index
owner_id bigint, --segmentby attribute without index
device_id bigint, --non-segmentby attribute
temp float8,
humidity float4
);

select create_hypertable('readings', by_range('created_at'));

select setseed(1);

insert into readings(created_at, location_id, device_id, owner_id, temp, humidity)
select t, ceil(random()*10), ceil(random()*30), ceil(random() * 5), random()*40, random()*100
from generate_series('2022-06-01'::timestamptz, '2022-07-01', '5m') t;

alter table readings set (
timescaledb.compress,
timescaledb.compress_orderby = 'created_at',
timescaledb.compress_segmentby = 'location_id, owner_id'
);

select compress_chunk(show_chunks('readings'));

create table saved_rows (like readings, new_row bool not null, kind text);

create function save_transition_table() returns trigger as $$
begin
case tg_op
when 'INSERT' then
insert into saved_rows select n.*, true, tg_op from new_table n;
when 'DELETE' then
insert into saved_rows select o.*, false, tg_op from old_table o;
when 'UPDATE' then
insert into saved_rows select n.*, true, tg_op from new_table n;
insert into saved_rows select o.*, false, tg_op from old_table o;
end case;
return null;
end;
$$ language plpgsql;

create trigger save_insert_transition_table_trg
after insert on readings
referencing new table as new_table
for each statement execute function save_transition_table();

insert into readings(created_at, location_id, device_id, owner_id, temp, humidity)
values ('2022-06-01 00:01:23', 999, 666, 111, 3.14, 3.14),
('2022-06-01 00:02:23', 999, 666, 112, 3.14, 3.14);

select * from saved_rows;

truncate saved_rows;

copy readings(created_at, location_id, device_id, owner_id, temp, humidity) from stdin with (format csv);
"2022-06-01 00:01:35",999,666,111,3.14,3.14
\.

select * from saved_rows;

truncate saved_rows;

create trigger save_update_transition_table_trg
after update on readings
referencing new table as new_table old table as old_table
for each statement execute function save_transition_table();

select * from readings where location_id = 999;

update readings set humidity = 99.99 where location_id = 999;

select * from saved_rows;

truncate saved_rows;

create trigger save_delete_transition_table_trg
after delete on readings
referencing old table as old_table
for each statement execute function save_transition_table();

select * from readings where location_id = 999;

delete from readings where location_id = 999;

select * from saved_rows;

truncate saved_rows;

0 comments on commit 17f36ac

Please sign in to comment.