diff --git a/cmd/backup.go b/cmd/backup.go index 0958e92db..74eca92d3 100644 --- a/cmd/backup.go +++ b/cmd/backup.go @@ -74,6 +74,7 @@ func newFullBackupCommand() *cobra.Command { return runBackupCommand(command, "Full backup") }, } + task.DefineFilterFlags(command) return command } diff --git a/cmd/restore.go b/cmd/restore.go index d4f2e8ec6..dc907df58 100644 --- a/cmd/restore.go +++ b/cmd/restore.go @@ -82,6 +82,7 @@ func newFullRestoreCommand() *cobra.Command { return runRestoreCommand(cmd, "Full restore") }, } + task.DefineFilterFlags(command) return command } diff --git a/go.mod b/go.mod index 7a0791ae5..6cc439e0c 100644 --- a/go.mod +++ b/go.mod @@ -19,7 +19,7 @@ require ( github.com/pingcap/parser v0.0.0-20200518090819-ec1e13b948b1 github.com/pingcap/pd/v4 v4.0.0-rc.2.0.20200520083007-2c251bd8f181 github.com/pingcap/tidb v1.1.0-beta.0.20200521154755-134e691d6f5f - github.com/pingcap/tidb-tools v4.0.0-rc.1.0.20200514040632-f76b3e428e19+incompatible + github.com/pingcap/tidb-tools v4.0.0-rc.2.0.20200521050818-6dd445d83fe0+incompatible github.com/pingcap/tipb v0.0.0-20200417094153-7316d94df1ee github.com/prometheus/client_golang v1.5.1 github.com/prometheus/common v0.9.1 diff --git a/go.sum b/go.sum index abe4575b9..b25de07fc 100644 --- a/go.sum +++ b/go.sum @@ -512,6 +512,8 @@ github.com/pingcap/tidb-tools v4.0.0-rc.1.0.20200421113014-507d2bb3a15e+incompat github.com/pingcap/tidb-tools v4.0.0-rc.1.0.20200421113014-507d2bb3a15e+incompatible/go.mod h1:XGdcy9+yqlDSEMTpOXnwf3hiTeqrV6MN/u1se9N8yIM= github.com/pingcap/tidb-tools v4.0.0-rc.1.0.20200514040632-f76b3e428e19+incompatible h1:/JKsYjsa5Ug8v5CN4zIbJGIqsvgBUkGwaP/rEScVvWM= github.com/pingcap/tidb-tools v4.0.0-rc.1.0.20200514040632-f76b3e428e19+incompatible/go.mod h1:XGdcy9+yqlDSEMTpOXnwf3hiTeqrV6MN/u1se9N8yIM= +github.com/pingcap/tidb-tools v4.0.0-rc.2.0.20200521050818-6dd445d83fe0+incompatible h1:e+j+rsJYX+J7eTkgjnGBH2/T3NS6GNSPD6nHA5bPdCI= +github.com/pingcap/tidb-tools v4.0.0-rc.2.0.20200521050818-6dd445d83fe0+incompatible/go.mod h1:XGdcy9+yqlDSEMTpOXnwf3hiTeqrV6MN/u1se9N8yIM= github.com/pingcap/tipb v0.0.0-20190428032612-535e1abaa330/go.mod h1:RtkHW8WbcNxj8lsbzjaILci01CtYnYbIkQhjyZWrWVI= github.com/pingcap/tipb v0.0.0-20200417094153-7316d94df1ee h1:XJQ6/LGzOSc/jo33AD8t7jtc4GohxcyODsYnb+kZXJM= github.com/pingcap/tipb v0.0.0-20200417094153-7316d94df1ee/go.mod h1:RtkHW8WbcNxj8lsbzjaILci01CtYnYbIkQhjyZWrWVI= diff --git a/pkg/backup/client.go b/pkg/backup/client.go index c0651d7a7..a72879216 100644 --- a/pkg/backup/client.go +++ b/pkg/backup/client.go @@ -18,7 +18,7 @@ import ( "github.com/pingcap/log" "github.com/pingcap/parser/model" pd "github.com/pingcap/pd/v4/client" - "github.com/pingcap/tidb-tools/pkg/filter" + "github.com/pingcap/tidb-tools/pkg/table-filter" "github.com/pingcap/tidb/distsql" "github.com/pingcap/tidb/domain" "github.com/pingcap/tidb/kv" @@ -210,7 +210,7 @@ func appendRanges(tbl *model.TableInfo, tblID int64) ([]kv.KeyRange, error) { func BuildBackupRangeAndSchema( dom *domain.Domain, storage kv.Storage, - tableFilter *filter.Filter, + tableFilter filter.Filter, backupTS uint64, ) ([]rtree.Range, *Schemas, error) { info, err := dom.GetSnapshotInfoSchema(backupTS) @@ -232,7 +232,7 @@ func BuildBackupRangeAndSchema( randAlloc := autoid.NewAllocator(storage, dbInfo.ID, false, autoid.AutoRandomType) for _, tableInfo := range dbInfo.Tables { - if !tableFilter.Match(&filter.Table{Schema: dbInfo.Name.L, Name: tableInfo.Name.L}) { + if !tableFilter.MatchTable(dbInfo.Name.O, tableInfo.Name.O) { // Skip tables other than the given table. continue } diff --git a/pkg/backup/schema_test.go b/pkg/backup/schema_test.go index 06584a6b5..c4fe485fc 100644 --- a/pkg/backup/schema_test.go +++ b/pkg/backup/schema_test.go @@ -8,7 +8,7 @@ import ( "sync/atomic" . "github.com/pingcap/check" - "github.com/pingcap/tidb-tools/pkg/filter" + "github.com/pingcap/tidb-tools/pkg/table-filter" "github.com/pingcap/tidb/util/testkit" "github.com/pingcap/tidb/util/testleak" @@ -57,9 +57,7 @@ func (s *testBackupSchemaSuite) TestBuildBackupRangeAndSchema(c *C) { tk := testkit.NewTestKit(c, s.mock.Storage) // Table t1 is not exist. - testFilter, err := filter.New(false, &filter.Rules{ - DoTables: []*filter.Table{{Schema: "test", Name: "t1"}}, - }) + testFilter, err := filter.Parse([]string{"test.t1"}) c.Assert(err, IsNil) _, backupSchemas, err := backup.BuildBackupRangeAndSchema( s.mock.Domain, s.mock.Storage, testFilter, math.MaxUint64) @@ -67,9 +65,7 @@ func (s *testBackupSchemaSuite) TestBuildBackupRangeAndSchema(c *C) { c.Assert(backupSchemas, IsNil) // Database is not exist. - fooFilter, err := filter.New(false, &filter.Rules{ - DoTables: []*filter.Table{{Schema: "foo", Name: "t1"}}, - }) + fooFilter, err := filter.Parse([]string{"foo.t1"}) c.Assert(err, IsNil) _, backupSchemas, err = backup.BuildBackupRangeAndSchema( s.mock.Domain, s.mock.Storage, fooFilter, math.MaxUint64) @@ -77,7 +73,7 @@ func (s *testBackupSchemaSuite) TestBuildBackupRangeAndSchema(c *C) { c.Assert(backupSchemas, IsNil) // Empty database. - noFilter, err := filter.New(false, &filter.Rules{}) + noFilter, err := filter.Parse([]string{"*.*"}) c.Assert(err, IsNil) _, backupSchemas, err = backup.BuildBackupRangeAndSchema( s.mock.Domain, s.mock.Storage, noFilter, math.MaxUint64) diff --git a/pkg/task/backup.go b/pkg/task/backup.go index 9d055bc5c..9765c5a3b 100644 --- a/pkg/task/backup.go +++ b/pkg/task/backup.go @@ -14,7 +14,6 @@ import ( "github.com/pingcap/log" "github.com/pingcap/parser/model" "github.com/pingcap/parser/mysql" - "github.com/pingcap/tidb-tools/pkg/filter" "github.com/pingcap/tidb/sessionctx/stmtctx" "github.com/pingcap/tidb/sessionctx/variable" "github.com/pingcap/tidb/types" @@ -109,10 +108,6 @@ func RunBackup(c context.Context, g glue.Glue, cmdName string, cfg *BackupConfig if err != nil { return err } - tableFilter, err := filter.New(cfg.CaseSensitive, &cfg.Filter) - if err != nil { - return err - } mgr, err := newMgr(ctx, g, cfg.PD, cfg.TLS, conn.SkipTiFlash) if err != nil { return err @@ -135,7 +130,7 @@ func RunBackup(c context.Context, g glue.Glue, cmdName string, cfg *BackupConfig g.Record("BackupTS", backupTS) ranges, backupSchemas, err := backup.BuildBackupRangeAndSchema( - mgr.GetDomain(), mgr.GetTiKV(), tableFilter, backupTS) + mgr.GetDomain(), mgr.GetTiKV(), cfg.TableFilter, backupTS) if err != nil { return err } diff --git a/pkg/task/common.go b/pkg/task/common.go index c7962a94a..18de6c785 100644 --- a/pkg/task/common.go +++ b/pkg/task/common.go @@ -15,7 +15,7 @@ import ( "github.com/pingcap/kvproto/pkg/backup" "github.com/pingcap/log" pd "github.com/pingcap/pd/v4/client" - "github.com/pingcap/tidb-tools/pkg/filter" + "github.com/pingcap/tidb-tools/pkg/table-filter" "github.com/pingcap/tidb/store/tikv" "github.com/spf13/cobra" "github.com/spf13/pflag" @@ -49,6 +49,8 @@ const ( flagRateLimitUnit = "ratelimit-unit" flagConcurrency = "concurrency" flagChecksum = "checksum" + flagFilter = "filter" + flagCaseSensitive = "case-sensitive" ) // TLSConfig is the common configuration for TLS connection. @@ -91,8 +93,18 @@ type Config struct { // LogProgress is true means the progress bar is printed to the log instead of stdout. LogProgress bool `json:"log-progress" toml:"log-progress"` - CaseSensitive bool `json:"case-sensitive" toml:"case-sensitive"` - Filter filter.Rules `json:"black-white-list" toml:"black-white-list"` + // CaseSensitive should not be used. + // + // Deprecated: This field is kept only to satisfy the cyclic dependency with TiDB. This field + // should be removed after TiDB upgrades the BR dependency. + CaseSensitive bool + // Filter should not be used, use TableFilter instead. + // + // Deprecated: This field is kept only to satisfy the cyclic dependency with TiDB. This field + // should be removed after TiDB upgrades the BR dependency. + Filter filter.MySQLReplicationRules + + TableFilter filter.Filter `json:"-" toml:"-"` } // DefineCommonFlags defines the flags common to all BRIE commands. @@ -119,19 +131,26 @@ func DefineCommonFlags(flags *pflag.FlagSet) { storage.DefineFlags(flags) } -// DefineDatabaseFlags defines the required --db flag. +// DefineDatabaseFlags defines the required --db flag for `db` subcommand. func DefineDatabaseFlags(command *cobra.Command) { command.Flags().String(flagDatabase, "", "database name") _ = command.MarkFlagRequired(flagDatabase) } -// DefineTableFlags defines the required --db and --table flags. +// DefineTableFlags defines the required --db and --table flags for `table` subcommand. func DefineTableFlags(command *cobra.Command) { DefineDatabaseFlags(command) command.Flags().StringP(flagTable, "t", "", "table name") _ = command.MarkFlagRequired(flagTable) } +// DefineFilterFlags defines the --filter and --case-sensitive flags for `full` subcommand. +func DefineFilterFlags(command *cobra.Command) { + flags := command.Flags() + flags.StringArrayP(flagFilter, "f", []string{"*.*"}, "select tables to process") + flags.Bool(flagCaseSensitive, false, "whether the table names used in --filter should be case-sensitive") +} + // ParseFromFlags parses the TLS config from the flag set. func (tls *TLSConfig) ParseFromFlags(flags *pflag.FlagSet) error { var err error @@ -188,20 +207,39 @@ func (cfg *Config) ParseFromFlags(flags *pflag.FlagSet) error { } cfg.RateLimit = rateLimit * rateLimitUnit - if dbFlag := flags.Lookup(flagDatabase); dbFlag != nil { - db := escapeFilterName(dbFlag.Value.String()) + var caseSensitive bool + if filterFlag := flags.Lookup(flagFilter); filterFlag != nil { + f, err := filter.Parse(filterFlag.Value.(pflag.SliceValue).GetSlice()) + if err != nil { + return err + } + cfg.TableFilter = f + caseSensitive, err = flags.GetBool(flagCaseSensitive) + if err != nil { + return errors.Trace(err) + } + } else if dbFlag := flags.Lookup(flagDatabase); dbFlag != nil { + db := dbFlag.Value.String() if len(db) == 0 { return errors.New("empty database name is not allowed") } if tblFlag := flags.Lookup(flagTable); tblFlag != nil { - tbl := escapeFilterName(tblFlag.Value.String()) + tbl := tblFlag.Value.String() if len(tbl) == 0 { return errors.New("empty table name is not allowed") } - cfg.Filter.DoTables = []*filter.Table{{Schema: db, Name: tbl}} + cfg.TableFilter = filter.NewTablesFilter(filter.Table{ + Schema: db, + Name: tbl, + }) } else { - cfg.Filter.DoDBs = []string{db} + cfg.TableFilter = filter.NewSchemasFilter(db) } + } else { + cfg.TableFilter, _ = filter.Parse([]string{"*.*"}) + } + if !caseSensitive { + cfg.TableFilter = filter.CaseInsensitive(cfg.TableFilter) } if err := cfg.BackendOptions.ParseFromFlags(flags); err != nil { diff --git a/pkg/task/restore.go b/pkg/task/restore.go index b026cbe5b..bf0a6f63a 100644 --- a/pkg/task/restore.go +++ b/pkg/task/restore.go @@ -10,7 +10,6 @@ import ( "github.com/pingcap/kvproto/pkg/backup" "github.com/pingcap/log" "github.com/pingcap/parser/model" - "github.com/pingcap/tidb-tools/pkg/filter" "github.com/pingcap/tidb/config" "github.com/spf13/pflag" "go.uber.org/zap" @@ -321,15 +320,10 @@ func filterRestoreFiles( client *restore.Client, cfg *RestoreConfig, ) (files []*backup.File, tables []*utils.Table, dbs []*utils.Database, err error) { - tableFilter, err := filter.New(cfg.CaseSensitive, &cfg.Filter) - if err != nil { - return nil, nil, nil, err - } - for _, db := range client.GetDatabases() { createdDatabase := false for _, table := range db.Tables { - if !tableFilter.Match(&filter.Table{Schema: db.Info.Name.O, Name: table.Info.Name.O}) { + if !cfg.TableFilter.MatchTable(db.Info.Name.O, table.Info.Name.O) { continue } diff --git a/tests/br_table_filter/run.sh b/tests/br_table_filter/run.sh new file mode 100755 index 000000000..f04b7b186 --- /dev/null +++ b/tests/br_table_filter/run.sh @@ -0,0 +1,120 @@ +#!/bin/sh +# +# Copyright 2020 PingCAP, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eux +DB="$TEST_NAME" + +run_sql "create schema $DB;" + +run_sql "create table $DB.one(c int);" +run_sql "create table $DB.two(c int);" +run_sql "create table $DB.three(c int);" +run_sql "create table $DB.four(c int);" +run_sql "create table $DB.FIVE(c int);" +run_sql "create table $DB.TEN(c int);" +run_sql 'create table '"$DB"'.`the,special,table`(c int);' + +run_sql "insert into $DB.one values (1);" +run_sql "insert into $DB.two values (2);" +run_sql "insert into $DB.three values (3);" +run_sql "insert into $DB.four values (4);" +run_sql "insert into $DB.FIVE values (5);" +run_sql "insert into $DB.TEN values (10);" +run_sql 'insert into '"$DB"'.`the,special,table` values (375);' + +echo 'Simple check' + +run_br backup full -f "$DB.*" -s "local://$TEST_DIR/$DB/full" --pd $PD_ADDR +run_sql "drop schema $DB;" +run_br restore full -s "local://$TEST_DIR/$DB/full" --pd $PD_ADDR + +run_sql "select c from $DB.one;" +run_sql "select c from $DB.two;" +run_sql "select c from $DB.three;" +run_sql "select c from $DB.four;" +run_sql "select c from $DB.FIVE;" +run_sql "select c from $DB.TEN;" +run_sql 'select c from '"$DB"'.`the,special,table`;' + +echo 'Filtered backup check' + +run_br backup full -f "$DB.t*" -s "local://$TEST_DIR/$DB/t" --pd $PD_ADDR +run_sql "drop schema $DB;" +run_br restore full -s "local://$TEST_DIR/$DB/t" --pd $PD_ADDR + +! run_sql "select c from $DB.one;" +run_sql "select c from $DB.two;" +run_sql "select c from $DB.three;" +! run_sql "select c from $DB.four;" +! run_sql "select c from $DB.FIVE;" +run_sql "select c from $DB.TEN;" +run_sql 'select c from '"$DB"'.`the,special,table`;' + +echo 'Filtered restore check' + +run_sql "drop schema $DB;" +run_br restore full -f "*.f*" -s "local://$TEST_DIR/$DB/full" --pd $PD_ADDR + +! run_sql "select c from $DB.one;" +! run_sql "select c from $DB.two;" +! run_sql "select c from $DB.three;" +run_sql "select c from $DB.four;" +run_sql "select c from $DB.FIVE;" +! run_sql "select c from $DB.TEN;" +! run_sql 'select c from '"$DB"'.`the,special,table`;' + +echo 'Multiple filters check' + +run_sql "drop schema $DB;" +run_br restore full -f '*.*' -f '!*.five' -f '!*.`the,special,table`' -s "local://$TEST_DIR/$DB/full" --pd $PD_ADDR + +run_sql "select c from $DB.one;" +run_sql "select c from $DB.two;" +run_sql "select c from $DB.three;" +run_sql "select c from $DB.four;" +! run_sql "select c from $DB.FIVE;" +run_sql "select c from $DB.TEN;" +! run_sql 'select c from '"$DB"'.`the,special,table`;' + +echo 'Case sensitive restore check' + +run_sql "drop schema $DB;" +run_br restore full --case-sensitive -f '*.t*' -s "local://$TEST_DIR/$DB/full" --pd $PD_ADDR + +! run_sql "select c from $DB.one;" +run_sql "select c from $DB.two;" +run_sql "select c from $DB.three;" +! run_sql "select c from $DB.four;" +! run_sql "select c from $DB.FIVE;" +! run_sql "select c from $DB.TEN;" +run_sql 'select c from '"$DB"'.`the,special,table`;' + +echo 'Case sensitive backup check' + +run_sql "drop schema $DB;" +run_br restore full --case-sensitive -s "local://$TEST_DIR/$DB/full" --pd $PD_ADDR +run_br backup full --case-sensitive -f "$DB.[oF]*" -s "local://$TEST_DIR/$DB/of" --pd $PD_ADDR +run_sql "drop schema $DB;" +run_br restore full --case-sensitive -s "local://$TEST_DIR/$DB/of" --pd $PD_ADDR + +run_sql "select c from $DB.one;" +! run_sql "select c from $DB.two;" +! run_sql "select c from $DB.three;" +! run_sql "select c from $DB.four;" +run_sql "select c from $DB.FIVE;" +! run_sql "select c from $DB.TEN;" +! run_sql 'select c from '"$DB"'.`the,special,table`;' + +run_sql "drop schema $DB;"