Skip to content

Commit

Permalink
Fix ANALYZE on dist hypertable for a set of nodes
Browse files Browse the repository at this point in the history
Make sure ANALYZE can be run on a specific set of data nodes
assigned to the distributed hypertable

Fix #4508
  • Loading branch information
pmwkaa committed Jul 21, 2022
1 parent ca3c85b commit 9e96da6
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 1 deletion.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ accidentally triggering the load of a previous DB version.**
* #4517 Fix prepared statement param handling in ChunkAppend
* #4526 Fix gapfill group comparison for TOASTed values
* #4527 Handle stats properly for range types
* #4522 Fix ANALYZE on dist hypertable for a set of nodes

**Thanks**

Expand Down
4 changes: 3 additions & 1 deletion tsl/src/chunk_api.c
Original file line number Diff line number Diff line change
Expand Up @@ -1340,6 +1340,7 @@ static void
fetch_remote_chunk_stats(Hypertable *ht, FunctionCallInfo fcinfo, bool col_stats)
{
StatsProcessContext statsctx;
List *data_nodes;
DistCmdResult *cmdres;
TupleDesc tupdesc;
TupleFactory *tf;
Expand All @@ -1355,7 +1356,8 @@ fetch_remote_chunk_stats(Hypertable *ht, FunctionCallInfo fcinfo, bool col_stats
errmsg("function returning record called in context "
"that cannot accept type record")));

cmdres = ts_dist_cmd_invoke_func_call_on_all_data_nodes(fcinfo);
data_nodes = ts_hypertable_get_data_node_name_list(ht);
cmdres = ts_dist_cmd_invoke_func_call_on_data_nodes(fcinfo, data_nodes);

/* Expect TEXT response format since dist command API currently defaults
* to requesting TEXT */
Expand Down
15 changes: 15 additions & 0 deletions tsl/test/expected/dist_ddl.out
Original file line number Diff line number Diff line change
Expand Up @@ -2484,6 +2484,21 @@ VACUUM disttable;
VACUUM;
\c :TEST_DBNAME :ROLE_SUPERUSER;
DROP TABLE disttable;
-- Ensure ANALYZE commands can be run on a set of data nodes
--
-- Issue: #4508
--
CREATE TABLE hyper(time TIMESTAMPTZ, device INT, temp FLOAT);
SELECT create_distributed_hypertable('hyper', 'time', 'device', 4, chunk_time_interval => interval '18 hours', replication_factor => 1, data_nodes => '{ data_node_1, data_node_2 }');
NOTICE: adding not-null constraint to column "time"
create_distributed_hypertable
-------------------------------
(20,public,hyper,t)
(1 row)

INSERT INTO hyper SELECT t, ceil((random() * 5))::int, random() * 80
FROM generate_series('2019-01-01'::timestamptz, '2019-01-05'::timestamptz, '1 minute') as t;
ANALYZE hyper;
-- cleanup
\c :TEST_DBNAME :ROLE_CLUSTER_SUPERUSER;
DROP DATABASE :MY_DB1;
Expand Down
11 changes: 11 additions & 0 deletions tsl/test/sql/dist_ddl.sql
Original file line number Diff line number Diff line change
Expand Up @@ -701,6 +701,17 @@ VACUUM;
\c :TEST_DBNAME :ROLE_SUPERUSER;
DROP TABLE disttable;

-- Ensure ANALYZE commands can be run on a set of data nodes
--
-- Issue: #4508
--
CREATE TABLE hyper(time TIMESTAMPTZ, device INT, temp FLOAT);
SELECT create_distributed_hypertable('hyper', 'time', 'device', 4, chunk_time_interval => interval '18 hours', replication_factor => 1, data_nodes => '{ data_node_1, data_node_2 }');

INSERT INTO hyper SELECT t, ceil((random() * 5))::int, random() * 80
FROM generate_series('2019-01-01'::timestamptz, '2019-01-05'::timestamptz, '1 minute') as t;
ANALYZE hyper;

-- cleanup
\c :TEST_DBNAME :ROLE_CLUSTER_SUPERUSER;
DROP DATABASE :MY_DB1;
Expand Down

0 comments on commit 9e96da6

Please sign in to comment.