Skip to content

Commit

Permalink
Fix ChunkAppend, ConstraintAwareAppend child subplan
Browse files Browse the repository at this point in the history
When TidRangeScan is child of ChunkAppend or ConstraintAwareAppend node, an
error is reported as "invalid child of chunk append: Node (26)". This patch
fixes the issue by recognising TidRangeScan as a valid child.

Fixes: #4872
  • Loading branch information
sb230132 committed Jan 17, 2023
1 parent 5c897ff commit 37126a4
Show file tree
Hide file tree
Showing 9 changed files with 350 additions and 0 deletions.
3 changes: 3 additions & 0 deletions src/nodes/chunk_append/planner.c
Original file line number Diff line number Diff line change
Expand Up @@ -368,6 +368,9 @@ ts_chunk_append_get_scan_plan(Plan *plan)
case T_TidScan:
case T_ValuesScan:
case T_WorkTableScan:
#if PG14_GE
case T_TidRangeScan:
#endif
return (Scan *) plan;
break;
case T_CustomScan:
Expand Down
6 changes: 6 additions & 0 deletions src/nodes/constraint_aware_append/constraint_aware_append.c
Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,9 @@ ca_append_begin(CustomScanState *node, EState *estate, int eflags)
case T_WorkTableScan:
case T_ForeignScan:
case T_CustomScan:
#if PG14_GE
case T_TidRangeScan:
#endif
{
/*
* If this is a base rel (chunk), check if it can be
Expand Down Expand Up @@ -441,6 +444,9 @@ constraint_aware_append_plan_create(PlannerInfo *root, RelOptInfo *rel, CustomPa
case T_WorkTableScan:
case T_ForeignScan:
case T_CustomScan:
#if PG14_GE
case T_TidRangeScan:
#endif
{
List *chunk_clauses = NIL;
ListCell *lc;
Expand Down
32 changes: 32 additions & 0 deletions test/expected/create_hypertable.out
Original file line number Diff line number Diff line change
Expand Up @@ -974,3 +974,35 @@ ERROR: insert or update on table "_hyper_21_21_chunk" violates foreign key cons
DROP TABLE regular cascade;
NOTICE: drop cascades to 2 other objects
DROP TABLE timescale cascade;
-- github issue 4872
-- If subplan of ChunkAppend is TidRangeScan, then SELECT on
-- hypertable fails with error "invalid child of chunk append: Node (26)"
create table tidrangescan_test (
time timestamp with time zone,
some_column bigint
);
select create_hypertable('tidrangescan_test', 'time');
NOTICE: adding not-null constraint to column "time"
create_hypertable
---------------------------------
(22,public,tidrangescan_test,t)
(1 row)

insert into tidrangescan_test (time, some_column) values ('2023-02-12 00:00:00+02:40', 1);
insert into tidrangescan_test (time, some_column) values ('2023-02-12 00:00:10+02:40', 2);
insert into tidrangescan_test (time, some_column) values ('2023-02-12 00:00:20+02:40', 3);
-- Below query will generate plan as
-- Custom Scan (ChunkAppend)
-- -> Tid Range Scan
-- However when traversing ChunkAppend node, Tid Range Scan node is not
-- recognised as a valid child node of ChunkAppend which causes error
-- "invalid child of chunk append: Node (26)" when below query is executed
select * from tidrangescan_test where time > '2023-02-12 00:00:00+02:40'::timestamp with time zone - interval '5 years' and ctid < '(1,1)'::tid ORDER BY time;
time | some_column
------------------------------+-------------
Sat Feb 11 13:20:00 2023 PST | 1
Sat Feb 11 13:20:10 2023 PST | 2
Sat Feb 11 13:20:20 2023 PST | 3
(3 rows)

drop table tidrangescan_test;
24 changes: 24 additions & 0 deletions test/sql/create_hypertable.sql
Original file line number Diff line number Diff line change
Expand Up @@ -600,3 +600,27 @@ insert into timescale values (now(), 111,2);
-- cleanup
DROP TABLE regular cascade;
DROP TABLE timescale cascade;

-- github issue 4872
-- If subplan of ChunkAppend is TidRangeScan, then SELECT on
-- hypertable fails with error "invalid child of chunk append: Node (26)"
create table tidrangescan_test (
time timestamp with time zone,
some_column bigint
);

select create_hypertable('tidrangescan_test', 'time');

insert into tidrangescan_test (time, some_column) values ('2023-02-12 00:00:00+02:40', 1);
insert into tidrangescan_test (time, some_column) values ('2023-02-12 00:00:10+02:40', 2);
insert into tidrangescan_test (time, some_column) values ('2023-02-12 00:00:20+02:40', 3);

-- Below query will generate plan as
-- Custom Scan (ChunkAppend)
-- -> Tid Range Scan
-- However when traversing ChunkAppend node, Tid Range Scan node is not
-- recognised as a valid child node of ChunkAppend which causes error
-- "invalid child of chunk append: Node (26)" when below query is executed
select * from tidrangescan_test where time > '2023-02-12 00:00:00+02:40'::timestamp with time zone - interval '5 years' and ctid < '(1,1)'::tid ORDER BY time;

drop table tidrangescan_test;
65 changes: 65 additions & 0 deletions tsl/test/expected/continuous_aggs-12.out
Original file line number Diff line number Diff line change
Expand Up @@ -2280,3 +2280,68 @@ ORDER BY timec;
Thu Nov 01 17:00:00 2018 PDT | NYC | 35 | 15
(4 rows)

-- github issue 4872
-- If subplan of ConstraintAwareAppend is TidRangeScan, then SELECT on
-- hypertable fails with error "invalid child of chunk append: Node (26)"
CREATE TABLE metrics(time timestamptz, device_id int, v1 float, v2 float);
SELECT create_hypertable('metrics','time');
NOTICE: adding not-null constraint to column "time"
create_hypertable
-----------------------
(63,public,metrics,t)
(1 row)

INSERT INTO metrics SELECT generate_series('2000-01-01'::timestamptz,'2000-01-10','1m'),1,0.25,0.75;
CREATE MATERIALIZED VIEW cagg_expr WITH (timescaledb.continuous)
AS
SELECT
time_bucket('1d', time) AS time,
'Const'::text AS Const,
4.3::numeric AS "numeric",
first(metrics,time),
CASE WHEN true THEN 'foo' ELSE 'bar' END,
COALESCE(NULL,'coalesce'),
avg(v1) + avg(v2) AS avg1,
avg(v1+v2) AS avg2,
count(*) AS cnt
FROM metrics
WHERE ctid < '(1,1)'::tid GROUP BY 1 WITH NO DATA;
CALL refresh_continuous_aggregate('cagg_expr', NULL, NULL);
SET timescaledb.enable_chunk_append to off;
SET enable_indexscan to off;
EXPLAIN (analyze, costs off, timing off, summary off) SELECT * FROM cagg_expr ORDER BY time LIMIT 5;
QUERY PLAN
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Limit (actual rows=2 loops=1)
-> Merge Append (actual rows=2 loops=1)
Sort Key: _hyper_64_134_chunk."time"
-> Sort (actual rows=2 loops=1)
Sort Key: _hyper_64_134_chunk."time"
Sort Method: quicksort
-> Bitmap Heap Scan on _hyper_64_134_chunk (actual rows=2 loops=1)
Recheck Cond: ("time" < COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
Heap Blocks: exact=1
-> Bitmap Index Scan on _hyper_64_134_chunk__materialized_hypertable_64_time_idx (actual rows=2 loops=1)
Index Cond: ("time" < COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
-> GroupAggregate (actual rows=0 loops=1)
Group Key: (time_bucket('@ 1 day'::interval, metrics."time"))
-> Sort (actual rows=0 loops=1)
Sort Key: (time_bucket('@ 1 day'::interval, metrics."time"))
Sort Method: quicksort
-> Custom Scan (ConstraintAwareAppend) (actual rows=0 loops=1)
Hypertable: metrics
Chunks excluded during startup: 1
-> Append (actual rows=0 loops=1)
-> Bitmap Heap Scan on _hyper_63_133_chunk (actual rows=0 loops=1)
Recheck Cond: ("time" >= COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
Filter: (ctid < '(1,1)'::tid)
Rows Removed by Filter: 4801
Heap Blocks: exact=36
-> Bitmap Index Scan on _hyper_63_133_chunk_metrics_time_idx (actual rows=4801 loops=1)
Index Cond: ("time" >= COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
(27 rows)

RESET timescaledb.enable_chunk_append;
RESET enable_indexscan;
DROP MATERIALIZED VIEW cagg_expr;
NOTICE: drop cascades to table _timescaledb_internal._hyper_64_134_chunk
65 changes: 65 additions & 0 deletions tsl/test/expected/continuous_aggs-13.out
Original file line number Diff line number Diff line change
Expand Up @@ -2280,3 +2280,68 @@ ORDER BY timec;
Thu Nov 01 17:00:00 2018 PDT | NYC | 35 | 15
(4 rows)

-- github issue 4872
-- If subplan of ConstraintAwareAppend is TidRangeScan, then SELECT on
-- hypertable fails with error "invalid child of chunk append: Node (26)"
CREATE TABLE metrics(time timestamptz, device_id int, v1 float, v2 float);
SELECT create_hypertable('metrics','time');
NOTICE: adding not-null constraint to column "time"
create_hypertable
-----------------------
(63,public,metrics,t)
(1 row)

INSERT INTO metrics SELECT generate_series('2000-01-01'::timestamptz,'2000-01-10','1m'),1,0.25,0.75;
CREATE MATERIALIZED VIEW cagg_expr WITH (timescaledb.continuous)
AS
SELECT
time_bucket('1d', time) AS time,
'Const'::text AS Const,
4.3::numeric AS "numeric",
first(metrics,time),
CASE WHEN true THEN 'foo' ELSE 'bar' END,
COALESCE(NULL,'coalesce'),
avg(v1) + avg(v2) AS avg1,
avg(v1+v2) AS avg2,
count(*) AS cnt
FROM metrics
WHERE ctid < '(1,1)'::tid GROUP BY 1 WITH NO DATA;
CALL refresh_continuous_aggregate('cagg_expr', NULL, NULL);
SET timescaledb.enable_chunk_append to off;
SET enable_indexscan to off;
EXPLAIN (analyze, costs off, timing off, summary off) SELECT * FROM cagg_expr ORDER BY time LIMIT 5;
QUERY PLAN
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Limit (actual rows=2 loops=1)
-> Merge Append (actual rows=2 loops=1)
Sort Key: _hyper_64_134_chunk."time"
-> Sort (actual rows=2 loops=1)
Sort Key: _hyper_64_134_chunk."time"
Sort Method: quicksort
-> Bitmap Heap Scan on _hyper_64_134_chunk (actual rows=2 loops=1)
Recheck Cond: ("time" < COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
Heap Blocks: exact=1
-> Bitmap Index Scan on _hyper_64_134_chunk__materialized_hypertable_64_time_idx (actual rows=2 loops=1)
Index Cond: ("time" < COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
-> GroupAggregate (actual rows=0 loops=1)
Group Key: (time_bucket('@ 1 day'::interval, metrics."time"))
-> Sort (actual rows=0 loops=1)
Sort Key: (time_bucket('@ 1 day'::interval, metrics."time"))
Sort Method: quicksort
-> Custom Scan (ConstraintAwareAppend) (actual rows=0 loops=1)
Hypertable: metrics
Chunks excluded during startup: 1
-> Append (actual rows=0 loops=1)
-> Bitmap Heap Scan on _hyper_63_133_chunk (actual rows=0 loops=1)
Recheck Cond: ("time" >= COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
Filter: (ctid < '(1,1)'::tid)
Rows Removed by Filter: 4801
Heap Blocks: exact=36
-> Bitmap Index Scan on _hyper_63_133_chunk_metrics_time_idx (actual rows=4801 loops=1)
Index Cond: ("time" >= COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
(27 rows)

RESET timescaledb.enable_chunk_append;
RESET enable_indexscan;
DROP MATERIALIZED VIEW cagg_expr;
NOTICE: drop cascades to table _timescaledb_internal._hyper_64_134_chunk
62 changes: 62 additions & 0 deletions tsl/test/expected/continuous_aggs-14.out
Original file line number Diff line number Diff line change
Expand Up @@ -2280,3 +2280,65 @@ ORDER BY timec;
Thu Nov 01 17:00:00 2018 PDT | NYC | 35 | 15
(4 rows)

-- github issue 4872
-- If subplan of ConstraintAwareAppend is TidRangeScan, then SELECT on
-- hypertable fails with error "invalid child of chunk append: Node (26)"
CREATE TABLE metrics(time timestamptz, device_id int, v1 float, v2 float);
SELECT create_hypertable('metrics','time');
NOTICE: adding not-null constraint to column "time"
create_hypertable
-----------------------
(63,public,metrics,t)
(1 row)

INSERT INTO metrics SELECT generate_series('2000-01-01'::timestamptz,'2000-01-10','1m'),1,0.25,0.75;
CREATE MATERIALIZED VIEW cagg_expr WITH (timescaledb.continuous)
AS
SELECT
time_bucket('1d', time) AS time,
'Const'::text AS Const,
4.3::numeric AS "numeric",
first(metrics,time),
CASE WHEN true THEN 'foo' ELSE 'bar' END,
COALESCE(NULL,'coalesce'),
avg(v1) + avg(v2) AS avg1,
avg(v1+v2) AS avg2,
count(*) AS cnt
FROM metrics
WHERE ctid < '(1,1)'::tid GROUP BY 1 WITH NO DATA;
CALL refresh_continuous_aggregate('cagg_expr', NULL, NULL);
SET timescaledb.enable_chunk_append to off;
SET enable_indexscan to off;
EXPLAIN (analyze, costs off, timing off, summary off) SELECT * FROM cagg_expr ORDER BY time LIMIT 5;
QUERY PLAN
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Limit (actual rows=2 loops=1)
-> Merge Append (actual rows=2 loops=1)
Sort Key: _hyper_64_134_chunk."time"
-> Sort (actual rows=2 loops=1)
Sort Key: _hyper_64_134_chunk."time"
Sort Method: quicksort
-> Bitmap Heap Scan on _hyper_64_134_chunk (actual rows=2 loops=1)
Recheck Cond: ("time" < COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
Heap Blocks: exact=1
-> Bitmap Index Scan on _hyper_64_134_chunk__materialized_hypertable_64_time_idx (actual rows=2 loops=1)
Index Cond: ("time" < COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
-> GroupAggregate (actual rows=0 loops=1)
Group Key: (time_bucket('@ 1 day'::interval, metrics."time"))
-> Sort (actual rows=0 loops=1)
Sort Key: (time_bucket('@ 1 day'::interval, metrics."time"))
Sort Method: quicksort
-> Custom Scan (ConstraintAwareAppend) (actual rows=0 loops=1)
Hypertable: metrics
Chunks excluded during startup: 1
-> Append (actual rows=0 loops=1)
-> Tid Range Scan on _hyper_63_133_chunk (actual rows=0 loops=1)
TID Cond: (ctid < '(1,1)'::tid)
Filter: ("time" >= COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
Rows Removed by Filter: 136
(24 rows)

RESET timescaledb.enable_chunk_append;
RESET enable_indexscan;
DROP MATERIALIZED VIEW cagg_expr;
NOTICE: drop cascades to table _timescaledb_internal._hyper_64_134_chunk
63 changes: 63 additions & 0 deletions tsl/test/expected/continuous_aggs-15.out
Original file line number Diff line number Diff line change
Expand Up @@ -2282,3 +2282,66 @@ ORDER BY timec;
Thu Nov 01 17:00:00 2018 PDT | NYC | 35 | 15
(4 rows)

-- github issue 4872
-- If subplan of ConstraintAwareAppend is TidRangeScan, then SELECT on
-- hypertable fails with error "invalid child of chunk append: Node (26)"
CREATE TABLE metrics(time timestamptz, device_id int, v1 float, v2 float);
SELECT create_hypertable('metrics','time');
NOTICE: adding not-null constraint to column "time"
create_hypertable
-----------------------
(63,public,metrics,t)
(1 row)

INSERT INTO metrics SELECT generate_series('2000-01-01'::timestamptz,'2000-01-10','1m'),1,0.25,0.75;
CREATE MATERIALIZED VIEW cagg_expr WITH (timescaledb.continuous)
AS
SELECT
time_bucket('1d', time) AS time,
'Const'::text AS Const,
4.3::numeric AS "numeric",
first(metrics,time),
CASE WHEN true THEN 'foo' ELSE 'bar' END,
COALESCE(NULL,'coalesce'),
avg(v1) + avg(v2) AS avg1,
avg(v1+v2) AS avg2,
count(*) AS cnt
FROM metrics
WHERE ctid < '(1,1)'::tid GROUP BY 1 WITH NO DATA;
CALL refresh_continuous_aggregate('cagg_expr', NULL, NULL);
SET timescaledb.enable_chunk_append to off;
SET enable_indexscan to off;
EXPLAIN (analyze, costs off, timing off, summary off) SELECT * FROM cagg_expr ORDER BY time LIMIT 5;
QUERY PLAN
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Limit (actual rows=2 loops=1)
-> Merge Append (actual rows=2 loops=1)
Sort Key: _hyper_64_134_chunk."time"
-> Sort (actual rows=2 loops=1)
Sort Key: _hyper_64_134_chunk."time"
Sort Method: quicksort
-> Bitmap Heap Scan on _hyper_64_134_chunk (actual rows=2 loops=1)
Recheck Cond: ("time" < COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
Heap Blocks: exact=1
-> Bitmap Index Scan on _hyper_64_134_chunk__materialized_hypertable_64_time_idx (actual rows=2 loops=1)
Index Cond: ("time" < COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
-> GroupAggregate (actual rows=0 loops=1)
Group Key: (time_bucket('@ 1 day'::interval, metrics."time"))
-> Sort (actual rows=0 loops=1)
Sort Key: (time_bucket('@ 1 day'::interval, metrics."time"))
Sort Method: quicksort
-> Result (actual rows=0 loops=1)
-> Custom Scan (ConstraintAwareAppend) (actual rows=0 loops=1)
Hypertable: metrics
Chunks excluded during startup: 1
-> Append (actual rows=0 loops=1)
-> Tid Range Scan on _hyper_63_133_chunk (actual rows=0 loops=1)
TID Cond: (ctid < '(1,1)'::tid)
Filter: ("time" >= COALESCE(_timescaledb_internal.to_timestamp(_timescaledb_internal.cagg_watermark(64)), '-infinity'::timestamp with time zone))
Rows Removed by Filter: 136
(25 rows)

RESET timescaledb.enable_chunk_append;
RESET enable_indexscan;
DROP MATERIALIZED VIEW cagg_expr;
NOTICE: drop cascades to table _timescaledb_internal._hyper_64_134_chunk
Loading

0 comments on commit 37126a4

Please sign in to comment.