Skip to content

Commit

Permalink
Reduce transparent_decompress_chunk test flakiness
Browse files Browse the repository at this point in the history
One of the merge join queries is known to switch inner and
outer join relations because the costs seem to be the same.
Adding an additional filter on one of the relations should
change the costs enough so they are not interchangeable.
  • Loading branch information
antekresic committed Jan 12, 2024
1 parent 67c3701 commit f08e9f0
Show file tree
Hide file tree
Showing 7 changed files with 30 additions and 13 deletions.
2 changes: 1 addition & 1 deletion tsl/src/nodes/decompress_chunk/planner.c
Original file line number Diff line number Diff line change
Expand Up @@ -376,7 +376,7 @@ is_not_runtime_constant_walker(Node *node, void *context)
/*
* We might want to support these nodes to have vectorizable join
* clauses (T_Var) or join clauses referencing a variable that is
* above outer join (T_PlaceHolderVar). We don't suppor them at the
* above outer join (T_PlaceHolderVar). We don't support them at the
* moment.
*/
return true;
Expand Down
6 changes: 5 additions & 1 deletion tsl/test/shared/expected/transparent_decompress_chunk-13.out
Original file line number Diff line number Diff line change
Expand Up @@ -751,6 +751,7 @@ FROM metrics m1
INNER JOIN metrics_space m2 ON m1.time = m2.time
AND m1.device_id = 1
AND m2.device_id = 2
WHERE m1.v1 = 3 -- additional filter to force m1 as inner relation in join
ORDER BY m1.time,
m1.device_id,
m2.time,
Expand All @@ -764,10 +765,13 @@ QUERY PLAN
Order: m1."time"
-> Index Scan Backward using _hyper_X_X_chunk_metrics_device_id_time_idx on _hyper_X_X_chunk m1_1
Index Cond: (device_id = 1)
Filter: (v1 = 3)
-> Index Scan Backward using _hyper_X_X_chunk_metrics_device_id_time_idx on _hyper_X_X_chunk m1_2
Index Cond: (device_id = 1)
Filter: (v1 = 3)
-> Index Scan Backward using _hyper_X_X_chunk_metrics_device_id_time_idx on _hyper_X_X_chunk m1_3
Index Cond: (device_id = 1)
Filter: (v1 = 3)
-> Materialize
-> Custom Scan (ChunkAppend) on metrics_space m2
Order: m2."time"
Expand All @@ -777,7 +781,7 @@ QUERY PLAN
Index Cond: (device_id = 2)
-> Index Scan using _hyper_X_X_chunk_metrics_space_device_id_time_idx on _hyper_X_X_chunk m2_3
Index Cond: (device_id = 2)
(20 rows)
(23 rows)

-- test OUTER JOIN
SET min_parallel_table_scan_size TO '0';
Expand Down
6 changes: 5 additions & 1 deletion tsl/test/shared/expected/transparent_decompress_chunk-14.out
Original file line number Diff line number Diff line change
Expand Up @@ -751,6 +751,7 @@ FROM metrics m1
INNER JOIN metrics_space m2 ON m1.time = m2.time
AND m1.device_id = 1
AND m2.device_id = 2
WHERE m1.v1 = 3 -- additional filter to force m1 as inner relation in join
ORDER BY m1.time,
m1.device_id,
m2.time,
Expand All @@ -764,10 +765,13 @@ QUERY PLAN
Order: m1."time"
-> Index Scan Backward using _hyper_X_X_chunk_metrics_device_id_time_idx on _hyper_X_X_chunk m1_1
Index Cond: (device_id = 1)
Filter: (v1 = 3)
-> Index Scan Backward using _hyper_X_X_chunk_metrics_device_id_time_idx on _hyper_X_X_chunk m1_2
Index Cond: (device_id = 1)
Filter: (v1 = 3)
-> Index Scan Backward using _hyper_X_X_chunk_metrics_device_id_time_idx on _hyper_X_X_chunk m1_3
Index Cond: (device_id = 1)
Filter: (v1 = 3)
-> Materialize
-> Custom Scan (ChunkAppend) on metrics_space m2
Order: m2."time"
Expand All @@ -777,7 +781,7 @@ QUERY PLAN
Index Cond: (device_id = 2)
-> Index Scan using _hyper_X_X_chunk_metrics_space_device_id_time_idx on _hyper_X_X_chunk m2_3
Index Cond: (device_id = 2)
(20 rows)
(23 rows)

-- test OUTER JOIN
SET min_parallel_table_scan_size TO '0';
Expand Down
6 changes: 5 additions & 1 deletion tsl/test/shared/expected/transparent_decompress_chunk-15.out
Original file line number Diff line number Diff line change
Expand Up @@ -753,6 +753,7 @@ FROM metrics m1
INNER JOIN metrics_space m2 ON m1.time = m2.time
AND m1.device_id = 1
AND m2.device_id = 2
WHERE m1.v1 = 3 -- additional filter to force m1 as inner relation in join
ORDER BY m1.time,
m1.device_id,
m2.time,
Expand All @@ -766,10 +767,13 @@ QUERY PLAN
Order: m1."time"
-> Index Scan Backward using _hyper_X_X_chunk_metrics_device_id_time_idx on _hyper_X_X_chunk m1_1
Index Cond: (device_id = 1)
Filter: (v1 = 3)
-> Index Scan Backward using _hyper_X_X_chunk_metrics_device_id_time_idx on _hyper_X_X_chunk m1_2
Index Cond: (device_id = 1)
Filter: (v1 = 3)
-> Index Scan Backward using _hyper_X_X_chunk_metrics_device_id_time_idx on _hyper_X_X_chunk m1_3
Index Cond: (device_id = 1)
Filter: (v1 = 3)
-> Materialize
-> Custom Scan (ChunkAppend) on metrics_space m2
Order: m2."time"
Expand All @@ -779,7 +783,7 @@ QUERY PLAN
Index Cond: (device_id = 2)
-> Index Scan using _hyper_X_X_chunk_metrics_space_device_id_time_idx on _hyper_X_X_chunk m2_3
Index Cond: (device_id = 2)
(20 rows)
(23 rows)

-- test OUTER JOIN
SET min_parallel_table_scan_size TO '0';
Expand Down
6 changes: 5 additions & 1 deletion tsl/test/shared/expected/transparent_decompress_chunk-16.out
Original file line number Diff line number Diff line change
Expand Up @@ -750,6 +750,7 @@ FROM metrics m1
INNER JOIN metrics_space m2 ON m1.time = m2.time
AND m1.device_id = 1
AND m2.device_id = 2
WHERE m1.v1 = 3 -- additional filter to force m1 as inner relation in join
ORDER BY m1.time,
m1.device_id,
m2.time,
Expand All @@ -763,10 +764,13 @@ QUERY PLAN
Order: m1."time"
-> Index Scan Backward using _hyper_X_X_chunk_metrics_device_id_time_idx on _hyper_X_X_chunk m1_1
Index Cond: (device_id = 1)
Filter: (v1 = 3)
-> Index Scan Backward using _hyper_X_X_chunk_metrics_device_id_time_idx on _hyper_X_X_chunk m1_2
Index Cond: (device_id = 1)
Filter: (v1 = 3)
-> Index Scan Backward using _hyper_X_X_chunk_metrics_device_id_time_idx on _hyper_X_X_chunk m1_3
Index Cond: (device_id = 1)
Filter: (v1 = 3)
-> Materialize
-> Custom Scan (ChunkAppend) on metrics_space m2
Order: m2."time"
Expand All @@ -776,7 +780,7 @@ QUERY PLAN
Index Cond: (device_id = 2)
-> Index Scan using _hyper_X_X_chunk_metrics_space_device_id_time_idx on _hyper_X_X_chunk m2_3
Index Cond: (device_id = 2)
(20 rows)
(23 rows)

-- test OUTER JOIN
SET min_parallel_table_scan_size TO '0';
Expand Down
16 changes: 8 additions & 8 deletions tsl/test/shared/sql/include/shared_setup.sql
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@ CREATE SCHEMA test;

-- create normal hypertable with dropped columns, each chunk will have different attribute numbers
CREATE TABLE metrics(filler_1 int, filler_2 int, filler_3 int, time timestamptz NOT NULL, device_id int, v0 int, v1 int, v2 float, v3 float);
CREATE INDEX ON metrics(time DESC);
CREATE INDEX ON metrics(device_id,time DESC);
SELECT create_hypertable('metrics','time',create_default_indexes:=false);

ALTER TABLE metrics DROP COLUMN filler_1;
Expand All @@ -24,12 +22,12 @@ ALTER TABLE metrics DROP COLUMN filler_2;
INSERT INTO metrics(time,device_id,v0,v1,v2,v3) SELECT time, device_id, device_id-1, device_id + 2, device_id + 0.5, NULL FROM generate_series('2000-01-06 0:00:00+0'::timestamptz,'2000-01-12 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gdevice(device_id);
ALTER TABLE metrics DROP COLUMN filler_3;
INSERT INTO metrics(time,device_id,v0,v1,v2,v3) SELECT time, device_id, device_id, device_id + 2, device_id + 0.5, NULL FROM generate_series('2000-01-13 0:00:00+0'::timestamptz,'2000-01-19 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gdevice(device_id);
CREATE INDEX ON metrics(time DESC);
CREATE INDEX ON metrics(device_id,time DESC);
ANALYZE metrics;

-- create identical hypertable with space partitioning
CREATE TABLE metrics_space(filler_1 int, filler_2 int, filler_3 int, time timestamptz NOT NULL, device_id int, v0 int, v1 int, v2 float, v3 float);
CREATE INDEX ON metrics_space(time);
CREATE INDEX ON metrics_space(device_id,time);
SELECT create_hypertable('metrics_space','time','device_id',3,create_default_indexes:=false);

ALTER TABLE metrics_space DROP COLUMN filler_1;
Expand All @@ -38,13 +36,13 @@ ALTER TABLE metrics_space DROP COLUMN filler_2;
INSERT INTO metrics_space(time,device_id,v0,v1,v2,v3) SELECT time, device_id, device_id+1, device_id + 2, device_id + 0.5, NULL FROM generate_series('2000-01-06 0:00:00+0'::timestamptz,'2000-01-12 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gdevice(device_id);
ALTER TABLE metrics_space DROP COLUMN filler_3;
INSERT INTO metrics_space(time,device_id,v0,v1,v2,v3) SELECT time, device_id, device_id+1, device_id + 2, device_id + 0.5, NULL FROM generate_series('2000-01-13 0:00:00+0'::timestamptz,'2000-01-19 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gdevice(device_id);
CREATE INDEX ON metrics_space(time);
CREATE INDEX ON metrics_space(device_id,time);
ANALYZE metrics_space;


-- create hypertable with compression
CREATE TABLE metrics_compressed(filler_1 int, filler_2 int, filler_3 int, time timestamptz NOT NULL, device_id int, v0 int, v1 int, v2 float, v3 float);
CREATE INDEX ON metrics_compressed(time);
CREATE INDEX ON metrics_compressed(device_id,time);
SELECT create_hypertable('metrics_compressed','time',create_default_indexes:=false);

ALTER TABLE metrics_compressed DROP COLUMN filler_1;
Expand All @@ -53,6 +51,8 @@ ALTER TABLE metrics_compressed DROP COLUMN filler_2;
INSERT INTO metrics_compressed(time,device_id,v0,v1,v2,v3) SELECT time, device_id, device_id-1, device_id + 2, device_id + 0.5, NULL FROM generate_series('2000-01-06 0:00:00+0'::timestamptz,'2000-01-12 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gdevice(device_id);
ALTER TABLE metrics_compressed DROP COLUMN filler_3;
INSERT INTO metrics_compressed(time,device_id,v0,v1,v2,v3) SELECT time, device_id, device_id, device_id + 2, device_id + 0.5, NULL FROM generate_series('2000-01-13 0:00:00+0'::timestamptz,'2000-01-19 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gdevice(device_id);
CREATE INDEX ON metrics_compressed(time);
CREATE INDEX ON metrics_compressed(device_id,time);
ANALYZE metrics_compressed;

-- compress chunks
Expand All @@ -66,8 +66,6 @@ REINDEX TABLE _timescaledb_internal._compressed_hypertable_4;

-- create hypertable with space partitioning and compression
CREATE TABLE metrics_space_compressed(filler_1 int, filler_2 int, filler_3 int, time timestamptz NOT NULL, device_id int, v0 int, v1 int, v2 float, v3 float);
CREATE INDEX ON metrics_space_compressed(time);
CREATE INDEX ON metrics_space_compressed(device_id,time);
SELECT create_hypertable('metrics_space_compressed','time','device_id',3,create_default_indexes:=false);

ALTER TABLE metrics_space_compressed DROP COLUMN filler_1;
Expand All @@ -76,6 +74,8 @@ ALTER TABLE metrics_space_compressed DROP COLUMN filler_2;
INSERT INTO metrics_space_compressed(time,device_id,v0,v1,v2,v3) SELECT time, device_id, device_id+1, device_id + 2, device_id + 0.5, NULL FROM generate_series('2000-01-06 0:00:00+0'::timestamptz,'2000-01-12 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gdevice(device_id);
ALTER TABLE metrics_space_compressed DROP COLUMN filler_3;
INSERT INTO metrics_space_compressed(time,device_id,v0,v1,v2,v3) SELECT time, device_id, device_id+1, device_id + 2, device_id + 0.5, NULL FROM generate_series('2000-01-13 0:00:00+0'::timestamptz,'2000-01-19 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gdevice(device_id);
CREATE INDEX ON metrics_space_compressed(time);
CREATE INDEX ON metrics_space_compressed(device_id,time);
ANALYZE metrics_space_compressed;

-- compress chunks
Expand Down
1 change: 1 addition & 0 deletions tsl/test/shared/sql/transparent_decompress_chunk.sql.in
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,7 @@ FROM metrics m1
INNER JOIN metrics_space m2 ON m1.time = m2.time
AND m1.device_id = 1
AND m2.device_id = 2
WHERE m1.v1 = 3 -- additional filter to force m1 as inner relation in join
ORDER BY m1.time,
m1.device_id,
m2.time,
Expand Down

0 comments on commit f08e9f0

Please sign in to comment.