From cd174965774ce1a064b2c47b02b96376dbcc59ab Mon Sep 17 00:00:00 2001 From: guo-shaoge Date: Sat, 10 Dec 2022 17:14:13 +0800 Subject: [PATCH 01/10] planner: refine planner code in disaggregated tiflash mode Signed-off-by: guo-shaoge --- executor/tiflashtest/tiflash_test.go | 22 ++++++++++++++++++++++ planner/core/find_best_task.go | 18 +++++++++++++----- planner/core/task.go | 18 ++++++++++++++++++ 3 files changed, 53 insertions(+), 5 deletions(-) diff --git a/executor/tiflashtest/tiflash_test.go b/executor/tiflashtest/tiflash_test.go index baba133b3d5a7..f6956a4743885 100644 --- a/executor/tiflashtest/tiflash_test.go +++ b/executor/tiflashtest/tiflash_test.go @@ -1303,3 +1303,25 @@ func TestDisaggregatedTiFlash(t *testing.T) { }) tk.MustQuery("select * from t;").Check(testkit.Rows()) } + +func TestDisaggregatedTiFlashQuery(t *testing.T) { + config.UpdateGlobal(func(conf *config.Config) { + conf.DisaggregatedTiFlash = true + }) + store := testkit.CreateMockStore(t, withMockTiFlash(2)) + tk := testkit.NewTestKit(t, store) + tk.MustExec("use test") + tk.MustExec("drop table if exists tbl_1") + tk.MustExec("create table tbl_1 ( col_1 bigint not null default -1443635317331776148 ,col_2 text ( 176 ) collate utf8mb4_bin not null ,col_3 decimal ( 8 , 3 ) ,col_4 varchar ( 128 ) collate utf8mb4_bin not null ,col_5 varchar ( 377 ) collate utf8mb4_bin ,col_6 double ,col_7 varchar ( 459 ) collate utf8mb4_bin ,col_8 tinyint default -88 ) charset utf8mb4 collate utf8mb4_bin ;") + tk.MustExec("alter table tbl_1 set tiflash replica 1") + tb := external.GetTableByName(t, tk, "test", "tbl_1") + err := domain.GetDomain(tk.Session()).DDL().UpdateTableReplicaInfo(tk.Session(), tb.Meta().ID, true) + require.NoError(t, err) + tk.MustExec("set @@session.tidb_isolation_read_engines=\"tiflash\"") + + tk.MustQuery("explain select max( tbl_1.col_1 ) as r0 , sum( tbl_1.col_1 ) as r1 , sum( tbl_1.col_8 ) as r2 from tbl_1 where tbl_1.col_8 != 68 or tbl_1.col_3 between null and 939 order by r0,r1,r2 ;") + + config.UpdateGlobal(func(conf *config.Config) { + conf.DisaggregatedTiFlash = false + }) +} diff --git a/planner/core/find_best_task.go b/planner/core/find_best_task.go index afc5223b9be94..67a3efb3a74b3 100644 --- a/planner/core/find_best_task.go +++ b/planner/core/find_best_task.go @@ -2013,7 +2013,17 @@ func (ds *DataSource) convertToTableScan(prop *property.PhysicalProperty, candid ColumnNames: ds.names, } mppTask = ts.addPushedDownSelectionToMppTask(mppTask, ds.stats.ScaleByExpectCnt(prop.ExpectedCnt)) - return mppTask, nil + task = mppTask + if !mppTask.invalid() { + if prop.TaskTp != property.RootTaskType && len(mppTask.rootTaskConds) > 0 { + task = invalidTask + } else { + task = mppTask + task = task.convertToRootTask(ds.ctx) + ds.addSelection4PlanCache(task.(*rootTask), ds.stats.ScaleByExpectCnt(prop.ExpectedCnt), prop) + } + } + return task, nil } copTask := &copTask{ tablePlan: ts, @@ -2223,10 +2233,8 @@ func (ts *PhysicalTableScan) addPushedDownSelectionToMppTask(mpp *mppTask, stats filterCondition, rootTaskConds := SplitSelCondsWithVirtualColumn(ts.filterCondition) var newRootConds []expression.Expression filterCondition, newRootConds = expression.PushDownExprs(ts.ctx.GetSessionVars().StmtCtx, filterCondition, ts.ctx.GetClient(), ts.StoreType) - rootTaskConds = append(rootTaskConds, newRootConds...) - if len(rootTaskConds) > 0 { - return &mppTask{} - } + mpp.rootTaskConds = append(rootTaskConds, newRootConds...) + ts.filterCondition = filterCondition // Add filter condition to table plan now. if len(ts.filterCondition) > 0 { diff --git a/planner/core/task.go b/planner/core/task.go index cc27029d83c8e..3183e07a49f57 100644 --- a/planner/core/task.go +++ b/planner/core/task.go @@ -2067,6 +2067,7 @@ type mppTask struct { partTp property.MPPPartitionType hashCols []*property.MPPPartitionColumn + rootTaskConds []expression.Expression } func (t *mppTask) count() float64 { @@ -2146,6 +2147,23 @@ func (t *mppTask) convertToRootTaskImpl(ctx sessionctx.Context) *rootTask { rt := &rootTask{ p: p, } + + if len(t.rootTaskConds) > 0 { + // Some Filter cannot be pushed down to TiFlash, need to add Selection in rootTask, + // so this Selection will be executed in TiDB. + _, isTableScan := t.p.(*PhysicalTableScan) + if !isTableScan { + // Need to make sure oriTaskPlan is TableScan, because rootTaskConds is part of TableScan.FilterCondition. + // It's only for TableScan. This is ensured we convert mppTask to rootTask just after TableScan is built, + // so no other operators are added into this mppTask. + panic("expect task.p is PhysicalTableScan when got task.rootTaskConds") + } + selectivity := SelectionFactor + sel := PhysicalSelection{Conditions: t.rootTaskConds}.Init(ctx, rt.p.statsInfo().Scale(selectivity), rt.p.SelectBlockOffset()) + sel.fromDataSource = true + sel.SetChildren(rt.p) + rt.p = sel + } return rt } From 1f795522f063e336b680a920e05c5a2e7b175be5 Mon Sep 17 00:00:00 2001 From: guo-shaoge Date: Sat, 10 Dec 2022 21:34:30 +0800 Subject: [PATCH 02/10] better refine(consider task type) Signed-off-by: guo-shaoge --- executor/tiflashtest/tiflash_test.go | 12 +++++++++--- planner/core/find_best_task.go | 15 ++++++++++++--- planner/core/task.go | 4 ++-- 3 files changed, 23 insertions(+), 8 deletions(-) diff --git a/executor/tiflashtest/tiflash_test.go b/executor/tiflashtest/tiflash_test.go index f6956a4743885..6f11550aac22b 100644 --- a/executor/tiflashtest/tiflash_test.go +++ b/executor/tiflashtest/tiflash_test.go @@ -1308,6 +1308,10 @@ func TestDisaggregatedTiFlashQuery(t *testing.T) { config.UpdateGlobal(func(conf *config.Config) { conf.DisaggregatedTiFlash = true }) + defer config.UpdateGlobal(func(conf *config.Config) { + conf.DisaggregatedTiFlash = false + }) + store := testkit.CreateMockStore(t, withMockTiFlash(2)) tk := testkit.NewTestKit(t, store) tk.MustExec("use test") @@ -1321,7 +1325,9 @@ func TestDisaggregatedTiFlashQuery(t *testing.T) { tk.MustQuery("explain select max( tbl_1.col_1 ) as r0 , sum( tbl_1.col_1 ) as r1 , sum( tbl_1.col_8 ) as r2 from tbl_1 where tbl_1.col_8 != 68 or tbl_1.col_3 between null and 939 order by r0,r1,r2 ;") - config.UpdateGlobal(func(conf *config.Config) { - conf.DisaggregatedTiFlash = false - }) + tk.MustExec("drop table if exists tbl_7, tbl_8") + tk.MustExec("create table tbl_7 ( col_32 int unsigned default 1404315863 ,col_33 boolean not null ,col_34 smallint default 15119 ,col_35 tinyint unsigned not null ,col_36 mediumint not null default -7380012 ,col_37 time not null ,col_38 char ( 167 ) collate utf8mb4_bin ,col_39 datetime default '1999-02-07' , unique key idx_7 ( col_36 ,col_32 ,col_39 ) ) charset utf8mb4 collate utf8mb4_bin partition by hash ( col_36 ) partitions 2;") + tk.MustExec("create table tbl_8 ( col_40 varchar ( 458 ) collate utf8mb4_bin not null ,col_41 float ,col_42 varchar ( 111 ) collate utf8mb4_bin not null default 'iFN1*3sU' ,col_43 mediumint not null default 3553140 ,col_44 time not null ,col_45 bigint not null , unique key idx_8 ( col_45 ) ,key idx_9 ( col_45 ,col_43 ) ,unique key idx_10 ( col_45 ,col_41 ,col_43 ) ,unique key idx_11 ( col_45 ) ) charset utf8mb4 collate utf8mb4_bin partition by hash ( col_45 ) partitions 4;") + tk.MustExec("set @@tidb_partition_prune_mode = 'static'") + tk.MustQuery("explain select /*+ hash_join( tbl_8 , tbl_7 */ lower( tbl_8.col_42 ) as r0 from tbl_8 , tbl_7 where tbl_8.col_42 < 'e' order by r0 limit 60 ;") } diff --git a/planner/core/find_best_task.go b/planner/core/find_best_task.go index 67a3efb3a74b3..ea9cf6861d918 100644 --- a/planner/core/find_best_task.go +++ b/planner/core/find_best_task.go @@ -1986,8 +1986,11 @@ func (ds *DataSource) convertToTableScan(prop *property.PhysicalProperty, candid } } } - // In disaggregated tiflash mode, only MPP is allowed, Cop and BatchCop is deprecated. - if prop.TaskTp == property.MppTaskType || config.GetGlobalConfig().DisaggregatedTiFlash && ts.StoreType == kv.TiFlash { + // In disaggregated tiflash mode, only MPP is allowed, cop and batchCop is deprecated. + // So if prop.TaskTp is RootTaskType, have to use mppTask then convert to rootTask. + isDisaggregatedTiFlashPath := config.GetGlobalConfig().DisaggregatedTiFlash && ts.StoreType == kv.TiFlash + canMppTaskConvertToRootTask := isDisaggregatedTiFlashPath && prop.TaskTp == property.RootTaskType && ds.SCtx().GetSessionVars().IsMPPAllowed() + if prop.TaskTp == property.MppTaskType || canMppTaskConvertToRootTask { if ts.KeepOrder { return invalidTask, nil } @@ -2015,7 +2018,9 @@ func (ds *DataSource) convertToTableScan(prop *property.PhysicalProperty, candid mppTask = ts.addPushedDownSelectionToMppTask(mppTask, ds.stats.ScaleByExpectCnt(prop.ExpectedCnt)) task = mppTask if !mppTask.invalid() { - if prop.TaskTp != property.RootTaskType && len(mppTask.rootTaskConds) > 0 { + if prop.TaskTp == property.MppTaskType && len(mppTask.rootTaskConds) > 0 { + // If got filters cannot be pushed down to tiflash, we have to make sure it will be executed in TiDB, + // So have to return a rootTask, but prop requires return mppTask, cannot meet this requirement. task = invalidTask } else { task = mppTask @@ -2025,6 +2030,10 @@ func (ds *DataSource) convertToTableScan(prop *property.PhysicalProperty, candid } return task, nil } + if isDisaggregatedTiFlashPath { + // prop.TaskTp is cop related, just return invalidTask. + return invalidTask, nil + } copTask := &copTask{ tablePlan: ts, indexPlanFinished: true, diff --git a/planner/core/task.go b/planner/core/task.go index 3183e07a49f57..006d1c2fdae72 100644 --- a/planner/core/task.go +++ b/planner/core/task.go @@ -2065,8 +2065,8 @@ func (p *PhysicalWindow) attach2Task(tasks ...task) task { type mppTask struct { p PhysicalPlan - partTp property.MPPPartitionType - hashCols []*property.MPPPartitionColumn + partTp property.MPPPartitionType + hashCols []*property.MPPPartitionColumn rootTaskConds []expression.Expression } From fd774094dfffbd3c5142c76a1294f86409c02955 Mon Sep 17 00:00:00 2001 From: guo-shaoge Date: Sun, 11 Dec 2022 17:01:18 +0800 Subject: [PATCH 03/10] fix planner(Selection); fix gcworker Signed-off-by: guo-shaoge --- ddl/placement/common.go | 2 ++ planner/core/find_best_task.go | 14 ++++++++------ planner/core/task.go | 31 ++++++++++++++++++++++++++----- store/gcworker/gc_worker.go | 4 ++++ 4 files changed, 40 insertions(+), 11 deletions(-) diff --git a/ddl/placement/common.go b/ddl/placement/common.go index cd02622dd0562..a80a091c2c3a1 100644 --- a/ddl/placement/common.go +++ b/ddl/placement/common.go @@ -54,4 +54,6 @@ const ( // EngineLabelTiKV is the label value used in some tests. And possibly TiKV will // set the engine label with a value of EngineLabelTiKV. EngineLabelTiKV = "tikv" + + EngineLabelTiFlashCompute = "tiflash_compute" ) diff --git a/planner/core/find_best_task.go b/planner/core/find_best_task.go index ea9cf6861d918..b056d4921e2aa 100644 --- a/planner/core/find_best_task.go +++ b/planner/core/find_best_task.go @@ -1988,9 +1988,10 @@ func (ds *DataSource) convertToTableScan(prop *property.PhysicalProperty, candid } // In disaggregated tiflash mode, only MPP is allowed, cop and batchCop is deprecated. // So if prop.TaskTp is RootTaskType, have to use mppTask then convert to rootTask. - isDisaggregatedTiFlashPath := config.GetGlobalConfig().DisaggregatedTiFlash && ts.StoreType == kv.TiFlash - canMppTaskConvertToRootTask := isDisaggregatedTiFlashPath && prop.TaskTp == property.RootTaskType && ds.SCtx().GetSessionVars().IsMPPAllowed() - if prop.TaskTp == property.MppTaskType || canMppTaskConvertToRootTask { + isDisaggregatedTiFlash := config.GetGlobalConfig().DisaggregatedTiFlash + isDisaggregatedTiFlashPath := isDisaggregatedTiFlash && ts.StoreType == kv.TiFlash + canMppConvertToRootForDisaggregatedTiFlash := isDisaggregatedTiFlashPath && prop.TaskTp == property.RootTaskType && ds.SCtx().GetSessionVars().IsMPPAllowed() + if prop.TaskTp == property.MppTaskType || canMppConvertToRootForDisaggregatedTiFlash { if ts.KeepOrder { return invalidTask, nil } @@ -2006,8 +2007,9 @@ func (ds *DataSource) convertToTableScan(prop *property.PhysicalProperty, candid } } mppTask := &mppTask{ - p: ts, - partTp: property.AnyType, + p: ts, + partTp: property.AnyType, + tblColHists: ds.TblColHists, } ts.PartitionInfo = PartitionInfo{ PruningConds: pushDownNot(ds.ctx, ds.allConds), @@ -2020,7 +2022,7 @@ func (ds *DataSource) convertToTableScan(prop *property.PhysicalProperty, candid if !mppTask.invalid() { if prop.TaskTp == property.MppTaskType && len(mppTask.rootTaskConds) > 0 { // If got filters cannot be pushed down to tiflash, we have to make sure it will be executed in TiDB, - // So have to return a rootTask, but prop requires return mppTask, cannot meet this requirement. + // So have to return a rootTask, but prop requires mppTask, cannot meet this requirement. task = invalidTask } else { task = mppTask diff --git a/planner/core/task.go b/planner/core/task.go index 006d1c2fdae72..c488451e4d90d 100644 --- a/planner/core/task.go +++ b/planner/core/task.go @@ -2065,8 +2065,20 @@ func (p *PhysicalWindow) attach2Task(tasks ...task) task { type mppTask struct { p PhysicalPlan - partTp property.MPPPartitionType - hashCols []*property.MPPPartitionColumn + partTp property.MPPPartitionType + hashCols []*property.MPPPartitionColumn + + tblColHists *statistics.HistColl + // rootTaskConds record filters of TableScan that cannot be pushed down to TiFlash. + + // For logical plan like: HashAgg -> Selection -> TableScan, if filters in Selection cannot be pushed to TiFlash. + // Planner will generate physical plan like: PhysicalHashAgg -> PhysicalSelection -> TableReader -> PhysicalTableScan(cop tiflash) + // Because planner will make mppTask invalid directly then use copTask directly. + + // But in DisaggregatedTiFlash mode, cop and batchCop protocol is disabled, so we have to consider this situation for mppTask. + // When generating PhysicalTableScan, if prop.TaskTp is RootTaskType, mppTask will be converted to rootTask, + // and filters in rootTaskConds will be added in a Selection which will be executed in TiDB. + // So physical plan be like: PhysicalHashAgg -> PhysicalSelection -> TableReader -> ExchangeSender -> PhysicalTableScan(mpp tiflash) rootTaskConds []expression.Expression } @@ -2152,13 +2164,22 @@ func (t *mppTask) convertToRootTaskImpl(ctx sessionctx.Context) *rootTask { // Some Filter cannot be pushed down to TiFlash, need to add Selection in rootTask, // so this Selection will be executed in TiDB. _, isTableScan := t.p.(*PhysicalTableScan) + _, isSelection := t.p.(*PhysicalSelection) + if isSelection { + _, isTableScan = t.p.Children()[0].(*PhysicalTableScan) + } if !isTableScan { // Need to make sure oriTaskPlan is TableScan, because rootTaskConds is part of TableScan.FilterCondition. - // It's only for TableScan. This is ensured we convert mppTask to rootTask just after TableScan is built, + // It's only for TableScan. This is ensured by converting mppTask to rootTask just after TableScan is built, // so no other operators are added into this mppTask. - panic("expect task.p is PhysicalTableScan when got task.rootTaskConds") + logutil.BgLogger().Error("expect Selection or TableScan for mppTask.p", zap.String("mppTask.p", t.p.TP())) + return invalidTask + } + selectivity, _, err := t.tblColHists.Selectivity(ctx, t.rootTaskConds, nil) + if err != nil { + logutil.BgLogger().Debug("calculate selectivity failed, use selection factor", zap.Error(err)) + selectivity = SelectionFactor } - selectivity := SelectionFactor sel := PhysicalSelection{Conditions: t.rootTaskConds}.Init(ctx, rt.p.statsInfo().Scale(selectivity), rt.p.SelectBlockOffset()) sel.fromDataSource = true sel.SetChildren(rt.p) diff --git a/store/gcworker/gc_worker.go b/store/gcworker/gc_worker.go index 054ec83ee7e8a..87a990f7f096c 100644 --- a/store/gcworker/gc_worker.go +++ b/store/gcworker/gc_worker.go @@ -912,6 +912,10 @@ func needsGCOperationForStore(store *metapb.Store) (bool, error) { // skip physical resolve locks for it. return false, nil + case placement.EngineLabelTiFlashCompute: + logutil.BgLogger().Debug("[gc worker] will ignore gc tiflash_compute node") + return false, nil + case placement.EngineLabelTiKV, "": // If no engine label is set, it should be a TiKV node. return true, nil From 3d94b2353b94ed59117e383c98a7806e07a2f1cf Mon Sep 17 00:00:00 2001 From: guo-shaoge Date: Sun, 11 Dec 2022 21:33:18 +0800 Subject: [PATCH 04/10] refine case and failpoint Signed-off-by: guo-shaoge --- executor/tiflashtest/tiflash_test.go | 20 ++++++++++++-------- planner/core/planbuilder.go | 6 ++++++ 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/executor/tiflashtest/tiflash_test.go b/executor/tiflashtest/tiflash_test.go index 6f11550aac22b..023fdcfc4bcec 100644 --- a/executor/tiflashtest/tiflash_test.go +++ b/executor/tiflashtest/tiflash_test.go @@ -1316,18 +1316,22 @@ func TestDisaggregatedTiFlashQuery(t *testing.T) { tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists tbl_1") - tk.MustExec("create table tbl_1 ( col_1 bigint not null default -1443635317331776148 ,col_2 text ( 176 ) collate utf8mb4_bin not null ,col_3 decimal ( 8 , 3 ) ,col_4 varchar ( 128 ) collate utf8mb4_bin not null ,col_5 varchar ( 377 ) collate utf8mb4_bin ,col_6 double ,col_7 varchar ( 459 ) collate utf8mb4_bin ,col_8 tinyint default -88 ) charset utf8mb4 collate utf8mb4_bin ;") + tk.MustExec(`create table tbl_1 ( col_1 bigint not null default -1443635317331776148, + col_2 text ( 176 ) collate utf8mb4_bin not null, + col_3 decimal ( 8, 3 ), + col_4 varchar ( 128 ) collate utf8mb4_bin not null, + col_5 varchar ( 377 ) collate utf8mb4_bin, + col_6 double, + col_7 varchar ( 459 ) collate utf8mb4_bin, + col_8 tinyint default -88 ) charset utf8mb4 collate utf8mb4_bin ;`) tk.MustExec("alter table tbl_1 set tiflash replica 1") tb := external.GetTableByName(t, tk, "test", "tbl_1") err := domain.GetDomain(tk.Session()).DDL().UpdateTableReplicaInfo(tk.Session(), tb.Meta().ID, true) require.NoError(t, err) tk.MustExec("set @@session.tidb_isolation_read_engines=\"tiflash\"") - tk.MustQuery("explain select max( tbl_1.col_1 ) as r0 , sum( tbl_1.col_1 ) as r1 , sum( tbl_1.col_8 ) as r2 from tbl_1 where tbl_1.col_8 != 68 or tbl_1.col_3 between null and 939 order by r0,r1,r2 ;") - - tk.MustExec("drop table if exists tbl_7, tbl_8") - tk.MustExec("create table tbl_7 ( col_32 int unsigned default 1404315863 ,col_33 boolean not null ,col_34 smallint default 15119 ,col_35 tinyint unsigned not null ,col_36 mediumint not null default -7380012 ,col_37 time not null ,col_38 char ( 167 ) collate utf8mb4_bin ,col_39 datetime default '1999-02-07' , unique key idx_7 ( col_36 ,col_32 ,col_39 ) ) charset utf8mb4 collate utf8mb4_bin partition by hash ( col_36 ) partitions 2;") - tk.MustExec("create table tbl_8 ( col_40 varchar ( 458 ) collate utf8mb4_bin not null ,col_41 float ,col_42 varchar ( 111 ) collate utf8mb4_bin not null default 'iFN1*3sU' ,col_43 mediumint not null default 3553140 ,col_44 time not null ,col_45 bigint not null , unique key idx_8 ( col_45 ) ,key idx_9 ( col_45 ,col_43 ) ,unique key idx_10 ( col_45 ,col_41 ,col_43 ) ,unique key idx_11 ( col_45 ) ) charset utf8mb4 collate utf8mb4_bin partition by hash ( col_45 ) partitions 4;") - tk.MustExec("set @@tidb_partition_prune_mode = 'static'") - tk.MustQuery("explain select /*+ hash_join( tbl_8 , tbl_7 */ lower( tbl_8.col_42 ) as r0 from tbl_8 , tbl_7 where tbl_8.col_42 < 'e' order by r0 limit 60 ;") + needCheckTiFlashComputeNode := "false" + failpoint.Enable("github.com/pingcap/tidb/planner/core/testDisaggregatedTiFlashQuery", fmt.Sprintf("return(%s)", needCheckTiFlashComputeNode)) + defer failpoint.Disable("github.com/pingcap/tidb/planner/core/testDisaggregatedTiFlashQuery") + tk.MustExec("explain select max( tbl_1.col_1 ) as r0 , sum( tbl_1.col_1 ) as r1 , sum( tbl_1.col_8 ) as r2 from tbl_1 where tbl_1.col_8 != 68 or tbl_1.col_3 between null and 939 order by r0,r1,r2;") } diff --git a/planner/core/planbuilder.go b/planner/core/planbuilder.go index d8677ea7dd072..e589480e972f3 100644 --- a/planner/core/planbuilder.go +++ b/planner/core/planbuilder.go @@ -26,6 +26,7 @@ import ( "unsafe" "github.com/pingcap/errors" + "github.com/pingcap/failpoint" "github.com/pingcap/tidb/bindinfo" "github.com/pingcap/tidb/config" "github.com/pingcap/tidb/domain" @@ -1408,6 +1409,11 @@ func filterPathByIsolationRead(ctx sessionctx.Context, paths []*util.AccessPath, // 1. path.StoreType doesn't exists in isolationReadEngines or // 2. TiFlash is disaggregated and the number of tiflash_compute node is zero. shouldPruneTiFlashCompute := noTiFlashComputeNode && exists && paths[i].StoreType == kv.TiFlash + failpoint.Inject("testDisaggregatedTiFlashQuery", func(val failpoint.Value) { + // Ignore check if tiflash_compute node number. + // After we support disaggregated tiflash in test framework, can delete this failpoint. + shouldPruneTiFlashCompute = val.(bool) + }) if shouldPruneTiFlashCompute { outputComputeNodeErrMsg = true } From e266256b8cc6ad09586dd0a1e835dde8f56e787d Mon Sep 17 00:00:00 2001 From: guo-shaoge Date: Sun, 11 Dec 2022 21:37:20 +0800 Subject: [PATCH 05/10] delete unnecessary var Signed-off-by: guo-shaoge --- planner/core/find_best_task.go | 3 +-- planner/core/task.go | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/planner/core/find_best_task.go b/planner/core/find_best_task.go index b056d4921e2aa..c2056479740de 100644 --- a/planner/core/find_best_task.go +++ b/planner/core/find_best_task.go @@ -1988,8 +1988,7 @@ func (ds *DataSource) convertToTableScan(prop *property.PhysicalProperty, candid } // In disaggregated tiflash mode, only MPP is allowed, cop and batchCop is deprecated. // So if prop.TaskTp is RootTaskType, have to use mppTask then convert to rootTask. - isDisaggregatedTiFlash := config.GetGlobalConfig().DisaggregatedTiFlash - isDisaggregatedTiFlashPath := isDisaggregatedTiFlash && ts.StoreType == kv.TiFlash + isDisaggregatedTiFlashPath := config.GetGlobalConfig().DisaggregatedTiFlash && ts.StoreType == kv.TiFlash canMppConvertToRootForDisaggregatedTiFlash := isDisaggregatedTiFlashPath && prop.TaskTp == property.RootTaskType && ds.SCtx().GetSessionVars().IsMPPAllowed() if prop.TaskTp == property.MppTaskType || canMppConvertToRootForDisaggregatedTiFlash { if ts.KeepOrder { diff --git a/planner/core/task.go b/planner/core/task.go index c488451e4d90d..1a70bdbb87f94 100644 --- a/planner/core/task.go +++ b/planner/core/task.go @@ -2068,7 +2068,6 @@ type mppTask struct { partTp property.MPPPartitionType hashCols []*property.MPPPartitionColumn - tblColHists *statistics.HistColl // rootTaskConds record filters of TableScan that cannot be pushed down to TiFlash. // For logical plan like: HashAgg -> Selection -> TableScan, if filters in Selection cannot be pushed to TiFlash. @@ -2080,6 +2079,7 @@ type mppTask struct { // and filters in rootTaskConds will be added in a Selection which will be executed in TiDB. // So physical plan be like: PhysicalHashAgg -> PhysicalSelection -> TableReader -> ExchangeSender -> PhysicalTableScan(mpp tiflash) rootTaskConds []expression.Expression + tblColHists *statistics.HistColl } func (t *mppTask) count() float64 { From cd3fcaf7fd091cf64c631b970729975ab086ec55 Mon Sep 17 00:00:00 2001 From: guo-shaoge Date: Sun, 11 Dec 2022 21:47:41 +0800 Subject: [PATCH 06/10] fix lint Signed-off-by: guo-shaoge --- ddl/placement/common.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ddl/placement/common.go b/ddl/placement/common.go index a80a091c2c3a1..7c77ead97e30e 100644 --- a/ddl/placement/common.go +++ b/ddl/placement/common.go @@ -55,5 +55,7 @@ const ( // set the engine label with a value of EngineLabelTiKV. EngineLabelTiKV = "tikv" + // EngineLabelTiFlashCompute is for disaggregated tiflash mode, + // it's the lable of tiflash_compute nodes. EngineLabelTiFlashCompute = "tiflash_compute" ) From a1f7042693e2a1514e91eda6188cb49657fe1751 Mon Sep 17 00:00:00 2001 From: guo-shaoge Date: Mon, 12 Dec 2022 10:48:50 +0800 Subject: [PATCH 07/10] fix unit-test case (window) Signed-off-by: guo-shaoge --- planner/core/find_best_task.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/planner/core/find_best_task.go b/planner/core/find_best_task.go index c2056479740de..9a7b2c7ad2bff 100644 --- a/planner/core/find_best_task.go +++ b/planner/core/find_best_task.go @@ -2023,7 +2023,8 @@ func (ds *DataSource) convertToTableScan(prop *property.PhysicalProperty, candid // If got filters cannot be pushed down to tiflash, we have to make sure it will be executed in TiDB, // So have to return a rootTask, but prop requires mppTask, cannot meet this requirement. task = invalidTask - } else { + } else if prop.TaskTp == property.RootTaskType { + // when got here, canMppConvertToRootForDisaggregatedTiFlash is true. task = mppTask task = task.convertToRootTask(ds.ctx) ds.addSelection4PlanCache(task.(*rootTask), ds.stats.ScaleByExpectCnt(prop.ExpectedCnt), prop) From 70f158fa68ddcec3446483e18b846098fc8ee825 Mon Sep 17 00:00:00 2001 From: guo-shaoge Date: Thu, 29 Dec 2022 17:16:36 +0800 Subject: [PATCH 08/10] fix noMPP Signed-off-by: guo-shaoge --- planner/core/task.go | 4 ---- 1 file changed, 4 deletions(-) diff --git a/planner/core/task.go b/planner/core/task.go index 1a70bdbb87f94..c13ff72f1a8cc 100644 --- a/planner/core/task.go +++ b/planner/core/task.go @@ -1977,10 +1977,6 @@ func (p *PhysicalHashAgg) attach2TaskForMpp(tasks ...task) task { } attachPlan2Task(proj, newMpp) return newMpp - case NoMpp: - t = mpp.convertToRootTask(p.ctx) - attachPlan2Task(p, t) - return t default: return invalidTask } From 77a36768c5bb7bcb554cbae15d1d7d4ba2517f14 Mon Sep 17 00:00:00 2001 From: guo-shaoge Date: Thu, 29 Dec 2022 18:12:58 +0800 Subject: [PATCH 09/10] check task before call addSelection4PlanCache Signed-off-by: guo-shaoge --- planner/core/find_best_task.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/planner/core/find_best_task.go b/planner/core/find_best_task.go index 9a7b2c7ad2bff..17672deaacdeb 100644 --- a/planner/core/find_best_task.go +++ b/planner/core/find_best_task.go @@ -2027,7 +2027,9 @@ func (ds *DataSource) convertToTableScan(prop *property.PhysicalProperty, candid // when got here, canMppConvertToRootForDisaggregatedTiFlash is true. task = mppTask task = task.convertToRootTask(ds.ctx) - ds.addSelection4PlanCache(task.(*rootTask), ds.stats.ScaleByExpectCnt(prop.ExpectedCnt), prop) + if task != nil && !task.invalid() { + ds.addSelection4PlanCache(task.(*rootTask), ds.stats.ScaleByExpectCnt(prop.ExpectedCnt), prop) + } } } return task, nil From b0a8cd93e909d8ed80bb66707893b4cef88eac2f Mon Sep 17 00:00:00 2001 From: guo-shaoge Date: Thu, 29 Dec 2022 18:22:18 +0800 Subject: [PATCH 10/10] fix tautological condition: non-nil != nil (nilness) Signed-off-by: guo-shaoge --- planner/core/find_best_task.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/planner/core/find_best_task.go b/planner/core/find_best_task.go index 17672deaacdeb..80517b6f51da1 100644 --- a/planner/core/find_best_task.go +++ b/planner/core/find_best_task.go @@ -2027,7 +2027,7 @@ func (ds *DataSource) convertToTableScan(prop *property.PhysicalProperty, candid // when got here, canMppConvertToRootForDisaggregatedTiFlash is true. task = mppTask task = task.convertToRootTask(ds.ctx) - if task != nil && !task.invalid() { + if !task.invalid() { ds.addSelection4PlanCache(task.(*rootTask), ds.stats.ScaleByExpectCnt(prop.ExpectedCnt), prop) } }