Skip to content

Commit

Permalink
Fix segfault on non-open primary dimension when compressing
Browse files Browse the repository at this point in the history
With the new hypertable API hypertables can be created with
primary space partition. In dev-builds this was prevented with
Asserts. This patch removes the Asserts and adds a proper check.

(cherry picked from commit 4dcbc26)
  • Loading branch information
svenklemm authored and timescale-automation committed Jun 3, 2024
1 parent 6a8b31f commit 0b9d1ce
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 9 deletions.
1 change: 1 addition & 0 deletions .unreleased/pr_6978
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fixes: #6978 Fix segfault in compress_chunk with primary space partition
6 changes: 0 additions & 6 deletions src/subspace_store.c
Original file line number Diff line number Diff line change
Expand Up @@ -75,12 +75,6 @@ ts_subspace_store_init(const Hyperspace *space, MemoryContext mcxt, int16 max_it
MemoryContext old = MemoryContextSwitchTo(mcxt);
SubspaceStore *sst = palloc(sizeof(SubspaceStore));

/*
* make sure that the first dimension is a time dimension, otherwise the
* tree will grow in a way that makes pruning less effective.
*/
Assert(space->num_dimensions < 1 || space->dimensions[0].type == DIMENSION_TYPE_OPEN);

sst->origin = subspace_store_internal_node_create(space->num_dimensions == 1);
sst->num_dimensions = space->num_dimensions;
/* max_items = 0 is treated as unlimited */
Expand Down
4 changes: 1 addition & 3 deletions tsl/src/compression/api.c
Original file line number Diff line number Diff line change
Expand Up @@ -256,9 +256,7 @@ find_chunk_to_merge_into(Hypertable *ht, Chunk *current_chunk)

const Dimension *time_dim = hyperspace_get_open_dimension(ht->space, 0);

Assert(time_dim != NULL);

if (time_dim->fd.compress_interval_length == 0)
if (!time_dim || time_dim->fd.compress_interval_length == 0)
return NULL;

Assert(current_chunk->cube->num_slices > 0);
Expand Down
19 changes: 19 additions & 0 deletions tsl/test/expected/compression_merge.out
Original file line number Diff line number Diff line change
Expand Up @@ -820,3 +820,22 @@ NOTICE: chunk "_hyper_17_344_chunk" is already compressed
(1 row)

ROLLBACK;
-- test segfault when compression hypertable with primary space dimension #6977
CREATE TABLE test_by_hash(id BIGINT, value float8);
SELECT create_hypertable('test_by_hash', by_hash('id', 8));
create_hypertable
-------------------
(19,t)
(1 row)

ALTER TABLE test_by_hash SET (timescaledb.compress = true);
WARNING: there was some uncertainty picking the default segment by for the hypertable: You do not have any indexes on columns that can be used for segment_by and thus we are not using segment_by for compression. Please make sure you are not missing any indexes
NOTICE: default segment by for hypertable "test_by_hash" is set to ""
NOTICE: default order by for hypertable "test_by_hash" is set to "id DESC"
INSERT INTO test_by_hash VALUES (1, 1.0), (2, 2.0), (3, 3.0);
SELECT compress_chunk('_timescaledb_internal._hyper_19_351_chunk');
compress_chunk
-------------------------------------------
_timescaledb_internal._hyper_19_351_chunk
(1 row)

7 changes: 7 additions & 0 deletions tsl/test/sql/compression_merge.sql
Original file line number Diff line number Diff line change
Expand Up @@ -308,3 +308,10 @@ BEGIN;
SELECT hypertable_name, range_start, range_end FROM timescaledb_information.chunks WHERE hypertable_name = 'test9' ORDER BY 2;
ROLLBACK;

-- test segfault when compression hypertable with primary space dimension #6977
CREATE TABLE test_by_hash(id BIGINT, value float8);
SELECT create_hypertable('test_by_hash', by_hash('id', 8));
ALTER TABLE test_by_hash SET (timescaledb.compress = true);
INSERT INTO test_by_hash VALUES (1, 1.0), (2, 2.0), (3, 3.0);
SELECT compress_chunk('_timescaledb_internal._hyper_19_351_chunk');

0 comments on commit 0b9d1ce

Please sign in to comment.