Skip to content

Commit

Permalink
Fix corresponding equivalence member not found
Browse files Browse the repository at this point in the history
When querying compressed data we determine whether the requested ORDER
can be applied to the underlying query on the compressed data itself.
This happens twice. The first time we decide whether we can push down
the sort and then we do a recheck when we setup the sort metadata.
Unfortunately those two checks did not agree. The initial check concluded
it is possible but the recheck disagreed.  This was due to a bug when
checking the query properties we mixed up the attnos and used attnos
from uncompressed chunk and compressed chunk in the same bitmapset.
If a segmentby column with equality constraint was present in the WHERE
clause whose attno was identical to a compressed attno of a different
column that was part of the ORDER BY the recheck would fail.

This patch removes the recheck and relies on the initial assesment
when building sort metadata.
  • Loading branch information
svenklemm committed Jul 1, 2024
1 parent 6e01b8c commit c3417a4
Show file tree
Hide file tree
Showing 6 changed files with 104 additions and 16 deletions.
1 change: 1 addition & 0 deletions .unreleased/pr_7080
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fixes: #7080 Fix `corresponding equivalence member not found` error
19 changes: 3 additions & 16 deletions tsl/src/nodes/decompress_chunk/decompress_chunk.c
Original file line number Diff line number Diff line change
Expand Up @@ -170,11 +170,8 @@ build_compressed_scan_pathkeys(SortInfo *sort_info, PlannerInfo *root, List *chu
* seen from the start, so that we arrive at the proper counts of seen
* segmentby columns in the end.
*/
Bitmapset *segmentby_columns = bms_copy(info->chunk_const_segmentby);
ListCell *lc;
for (lc = list_head(chunk_pathkeys);
lc != NULL && bms_num_members(segmentby_columns) < info->num_segmentby_columns;
lc = lnext(chunk_pathkeys, lc))
for (lc = list_head(chunk_pathkeys); lc; lc = lnext(chunk_pathkeys, lc))
{
PathKey *pk = lfirst(lc);
EquivalenceMember *compressed_em = NULL;
Expand All @@ -195,21 +192,11 @@ build_compressed_scan_pathkeys(SortInfo *sort_info, PlannerInfo *root, List *chu
* already refers a compressed column, it is a bug. See
* build_sortinfo().
*/
Ensure(compressed_em, "corresponding equivalence member not found");
if (!compressed_em)
break;

required_compressed_pathkeys = lappend(required_compressed_pathkeys, pk);

segmentby_columns =
bms_add_member(segmentby_columns, castNode(Var, compressed_em->em_expr)->varattno);
}

/*
* Either we sort by all segmentby columns, or by subset of them and
* nothing else. We verified this condition in build_sortinfo(), so only
* asserting here.
*/
Assert(bms_num_members(segmentby_columns) == info->num_segmentby_columns ||
list_length(required_compressed_pathkeys) == list_length(chunk_pathkeys));
}

/*
Expand Down
28 changes: 28 additions & 0 deletions tsl/test/expected/transparent_decompression-14.out
Original file line number Diff line number Diff line change
Expand Up @@ -9594,3 +9594,31 @@ SELECT count(*) from motion_table;
(1 row)

--END of test for page settings
CREATE TABLE repro(time timestamptz NOT NULL, device_id text, room_id text NOT NULL, score_type int NOT NULL, algorithm_version text NOT NULL, adjusted_score float NOT NULL);
SELECT create_hypertable('repro','time');
create_hypertable
---------------------
(17,public,repro,t)
(1 row)

ALTER TABLE repro SET (
timescaledb.compress,
timescaledb.compress_segmentby = 'device_id, room_id, score_type, algorithm_version',
timescaledb.compress_orderby='"time" DESC'
);
SELECT _timescaledb_functions.create_chunk('repro','{"time": [1717632000000000,1718236800000000]}');
create_chunk
-------------------------------------------------------------------------------------------------------------
(1441,17,_timescaledb_internal,_hyper_17_1441_chunk,r,"{""time"": [1717632000000000, 1718236800000000]}",t)
(1 row)

select compress_chunk(show_chunks('repro'));
compress_chunk
--------------------------------------------
_timescaledb_internal._hyper_17_1441_chunk
(1 row)

select from repro where room_id = 'foo' order by device_id, algorithm_version, score_type, time desc;
--
(0 rows)

28 changes: 28 additions & 0 deletions tsl/test/expected/transparent_decompression-15.out
Original file line number Diff line number Diff line change
Expand Up @@ -9595,3 +9595,31 @@ SELECT count(*) from motion_table;
(1 row)

--END of test for page settings
CREATE TABLE repro(time timestamptz NOT NULL, device_id text, room_id text NOT NULL, score_type int NOT NULL, algorithm_version text NOT NULL, adjusted_score float NOT NULL);
SELECT create_hypertable('repro','time');
create_hypertable
---------------------
(17,public,repro,t)
(1 row)

ALTER TABLE repro SET (
timescaledb.compress,
timescaledb.compress_segmentby = 'device_id, room_id, score_type, algorithm_version',
timescaledb.compress_orderby='"time" DESC'
);
SELECT _timescaledb_functions.create_chunk('repro','{"time": [1717632000000000,1718236800000000]}');
create_chunk
-------------------------------------------------------------------------------------------------------------
(1441,17,_timescaledb_internal,_hyper_17_1441_chunk,r,"{""time"": [1717632000000000, 1718236800000000]}",t)
(1 row)

select compress_chunk(show_chunks('repro'));
compress_chunk
--------------------------------------------
_timescaledb_internal._hyper_17_1441_chunk
(1 row)

select from repro where room_id = 'foo' order by device_id, algorithm_version, score_type, time desc;
--
(0 rows)

28 changes: 28 additions & 0 deletions tsl/test/expected/transparent_decompression-16.out
Original file line number Diff line number Diff line change
Expand Up @@ -9595,3 +9595,31 @@ SELECT count(*) from motion_table;
(1 row)

--END of test for page settings
CREATE TABLE repro(time timestamptz NOT NULL, device_id text, room_id text NOT NULL, score_type int NOT NULL, algorithm_version text NOT NULL, adjusted_score float NOT NULL);
SELECT create_hypertable('repro','time');
create_hypertable
---------------------
(17,public,repro,t)
(1 row)

ALTER TABLE repro SET (
timescaledb.compress,
timescaledb.compress_segmentby = 'device_id, room_id, score_type, algorithm_version',
timescaledb.compress_orderby='"time" DESC'
);
SELECT _timescaledb_functions.create_chunk('repro','{"time": [1717632000000000,1718236800000000]}');
create_chunk
-------------------------------------------------------------------------------------------------------------
(1441,17,_timescaledb_internal,_hyper_17_1441_chunk,r,"{""time"": [1717632000000000, 1718236800000000]}",t)
(1 row)

select compress_chunk(show_chunks('repro'));
compress_chunk
--------------------------------------------
_timescaledb_internal._hyper_17_1441_chunk
(1 row)

select from repro where room_id = 'foo' order by device_id, algorithm_version, score_type, time desc;
--
(0 rows)

16 changes: 16 additions & 0 deletions tsl/test/sql/transparent_decompression.sql.in
Original file line number Diff line number Diff line change
Expand Up @@ -310,3 +310,19 @@ FROM ( SELECT chunk_schema || '.' || chunk_name as chunk_table
SELECT count(*) from motion_table;

--END of test for page settings


CREATE TABLE repro(time timestamptz NOT NULL, device_id text, room_id text NOT NULL, score_type int NOT NULL, algorithm_version text NOT NULL, adjusted_score float NOT NULL);
SELECT create_hypertable('repro','time');

ALTER TABLE repro SET (
timescaledb.compress,
timescaledb.compress_segmentby = 'device_id, room_id, score_type, algorithm_version',
timescaledb.compress_orderby='"time" DESC'
);

SELECT _timescaledb_functions.create_chunk('repro','{"time": [1717632000000000,1718236800000000]}');

select compress_chunk(show_chunks('repro'));
select from repro where room_id = 'foo' order by device_id, algorithm_version, score_type, time desc;

0 comments on commit c3417a4

Please sign in to comment.