Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into HEAD
Browse files Browse the repository at this point in the history
  • Loading branch information
tangenta committed May 10, 2022
2 parents 97174f5 + 1d2a0b9 commit 405d2c0
Show file tree
Hide file tree
Showing 495 changed files with 21,447 additions and 19,904 deletions.
2 changes: 1 addition & 1 deletion .github/licenserc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ header:
- '**/*.key'
- '**/*.md'
- '**/*.json'
- '**/*.toml'
- '**/*.pem'
- '**/*.crt'
- '**/*.test'
- '**/*.result'
- '**/*.example'
- '.codecov.yml'
- 'errors.toml'
- 'Jenkinsfile'
- '.editorconfig'
- 'hooks/pre-commit'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,21 +17,7 @@ on:
- '!br/docker/**'
# disable pull request only keep the merge action since it is very costly to run those tests
# pull_request:
# branches:
# - master
# - 'release-[0-9].[0-9]*'
# paths:
# - '.github/workflows/br_compatible_test.yml'
# - 'br/**'
# - '!**.html'
# - '!**.md'
# - '!CNAME'
# - '!LICENSE'
# - '!br/docs/**'
# - '!br/tests/**'
# - '!br/docker/**'

# See: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency.
concurrency:
group: ${{ github.ref }}-${{ github.workflow }}
cancel-in-progress: true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ on:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

# See: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency.
concurrency:
group: ${{ github.ref }}-${{ github.workflow }}
cancel-in-progress: true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ on:
- 'util/codec/**'
- 'parser/model/**'

# See: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency.
concurrency:
group: ${{ github.ref }}-${{ github.workflow }}
cancel-in-progress: true
Expand Down
35 changes: 35 additions & 0 deletions .github/workflows/integration-test-with-real-tikv.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
name: Real TiKV Tests

on:
push:
branches: [master]
pull_request:
branches: [master]

concurrency:
group: ${{ github.ref }}-${{ github.workflow }}
cancel-in-progress: true

jobs:
pessimistic-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2
with:
go-version: 1.18
- name: Setup TiKV cluster
run: |
# Disable pipelined pessimistic lock temporarily until tikv#11649 is resolved
echo -e "[pessimistic-txn]\npipelined = false\n" > tikv.toml
echo -e "[raftdb]\nmax-open-files = 10240\n" >> tikv.toml
echo -e "[rocksdb]\nmax-open-files = 10240\n" >> tikv.toml
curl --proto '=https' --tlsv1.2 -sSf https://tiup-mirrors.pingcap.com/install.sh | sh
$HOME/.tiup/bin/tiup playground nightly --mode tikv-slim --kv 3 --pd 3 --without-monitor --kv.config tikv.toml &
curl --retry-connrefused --retry 5 --retry-delay 5 http://127.0.0.1:2379
- name: Run Tests
run: |
export log_level=error
make failpoint-enable
go test ./tests/pessimistictest -v -with-real-tikv -timeout 20m
1,267 changes: 0 additions & 1,267 deletions CHANGELOG.md

This file was deleted.

2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# limitations under the License.

# Builder image
FROM golang:1.16-alpine as builder
FROM golang:1.18.1-alpine as builder

RUN apk add --no-cache \
wget \
Expand Down
2 changes: 1 addition & 1 deletion bindinfo/bind_cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@ import (
"errors"
"sync"

"github.com/cznic/mathutil"
"github.com/pingcap/tidb/sessionctx/variable"
"github.com/pingcap/tidb/util/hack"
"github.com/pingcap/tidb/util/kvcache"
"github.com/pingcap/tidb/util/mathutil"
"github.com/pingcap/tidb/util/memory"
)

Expand Down
8 changes: 3 additions & 5 deletions bindinfo/handle.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ import (
tablefilter "github.com/pingcap/tidb/util/table-filter"
"github.com/pingcap/tidb/util/timeutil"
"go.uber.org/zap"
"golang.org/x/exp/maps"
)

// BindHandle is used to handle all global sql bind operations.
Expand Down Expand Up @@ -724,9 +725,7 @@ func (h *BindHandle) removeBindRecord(hash string, meta *BindRecord) {

func copyBindRecordUpdateMap(oldMap map[string]*bindRecordUpdate) map[string]*bindRecordUpdate {
newMap := make(map[string]*bindRecordUpdate, len(oldMap))
for k, v := range oldMap {
newMap[k] = v
}
maps.Copy(newMap, oldMap)
return newMap
}

Expand Down Expand Up @@ -961,8 +960,7 @@ func GenerateBindSQL(ctx context.Context, stmtNode ast.StmtNode, planHint string
withIdx := strings.Index(bindSQL, "WITH")
restoreCtx := format.NewRestoreCtx(format.RestoreStringSingleQuotes|format.RestoreSpacesAroundBinaryOperation|format.RestoreStringWithoutCharset|format.RestoreNameBackQuotes, &withSb)
restoreCtx.DefaultDB = defaultDB
err := n.With.Restore(restoreCtx)
if err != nil {
if err := n.With.Restore(restoreCtx); err != nil {
logutil.BgLogger().Debug("[sql-bind] restore SQL failed", zap.Error(err))
return ""
}
Expand Down
2 changes: 1 addition & 1 deletion br/cmd/br/cmd.go
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ func Init(cmd *cobra.Command) (err error) {
tidbLogCfg.File.Filename = timestampLogFileName()
} else {
// Don't print slow log in br
config.GetGlobalConfig().Log.EnableSlowLog.Store(false)
config.GetGlobalConfig().Instance.EnableSlowLog.Store(false)
}
e = logutil.InitLogger(&tidbLogCfg)
if e != nil {
Expand Down
7 changes: 7 additions & 0 deletions br/cmd/br/debug.go
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,9 @@ func newCheckSumCommand() *cobra.Command {
if err != nil {
return errors.Trace(err)
}
if schema.Table == nil {
continue
}
tblInfo := &model.TableInfo{}
err = json.Unmarshal(schema.Table, tblInfo)
if err != nil {
Expand Down Expand Up @@ -216,6 +219,10 @@ func newBackupMetaValidateCommand() *cobra.Command {
tableIDMap := make(map[int64]int64)
// Simulate to create table
for _, table := range tables {
if table.Info == nil {
// empty database.
continue
}
indexIDAllocator := mockid.NewIDAllocator()
newTable := new(model.TableInfo)
tableID, _ := tableIDAllocator.Alloc()
Expand Down
50 changes: 1 addition & 49 deletions br/cmd/tidb-lightning-ctl/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import (
"github.com/pingcap/errors"
"github.com/pingcap/tidb/br/pkg/lightning"
"github.com/pingcap/tidb/br/pkg/lightning/backend"
"github.com/pingcap/tidb/br/pkg/lightning/backend/importer"
"github.com/pingcap/tidb/br/pkg/lightning/backend/local"
"github.com/pingcap/tidb/br/pkg/lightning/checkpoints"
"github.com/pingcap/tidb/br/pkg/lightning/common"
Expand All @@ -46,7 +45,7 @@ var exit = os.Exit
func run() error {
var (
compact, flagFetchMode *bool
mode, flagImportEngine, flagCleanupEngine *string
mode *string
cpRemove, cpErrIgnore, cpErrDestroy, cpDump *string
localStoringTables *bool

Expand All @@ -65,9 +64,6 @@ func run() error {
mode = fs.String("switch-mode", "", "switch tikv into import mode or normal mode, values can be ['import', 'normal']")
flagFetchMode = fs.Bool("fetch-mode", false, "obtain the current mode of every tikv in the cluster")

flagImportEngine = fs.String("import-engine", "", "manually import a closed engine (value can be '`db`.`table`:123' or a UUID")
flagCleanupEngine = fs.String("cleanup-engine", "", "manually delete a closed engine")

cpRemove = fs.String("checkpoint-remove", "", "remove the checkpoint associated with the given table (value can be 'all' or '`db`.`table`')")
cpErrIgnore = fs.String("checkpoint-error-ignore", "", "ignore errors encoutered previously on the given table (value can be 'all' or '`db`.`table`'); may corrupt this table if used incorrectly")
cpErrDestroy = fs.String("checkpoint-error-destroy", "", "deletes imported data with table which has an error before (value can be 'all' or '`db`.`table`')")
Expand Down Expand Up @@ -102,15 +98,9 @@ func run() error {
if *flagFetchMode {
return errors.Trace(fetchMode(ctx, cfg, tls))
}
if len(*flagImportEngine) != 0 {
return errors.Trace(importEngine(ctx, cfg, tls, *flagImportEngine))
}
if len(*mode) != 0 {
return errors.Trace(lightning.SwitchMode(ctx, cfg, tls, *mode))
}
if len(*flagCleanupEngine) != 0 {
return errors.Trace(lightning.CleanupEngine(ctx, cfg, tls, *flagCleanupEngine))
}

if len(*cpRemove) != 0 {
return errors.Trace(lightning.CheckpointRemove(ctx, cfg, *cpRemove))
Expand Down Expand Up @@ -199,26 +189,6 @@ func checkpointErrorDestroy(ctx context.Context, cfg *config.Config, tls *common
}
}

if cfg.TikvImporter.Backend == "importer" {
importer, err := importer.NewImporter(ctx, tls, cfg.TikvImporter.Addr, cfg.TiDB.PdAddr)
if err != nil {
return errors.Trace(err)
}
defer importer.Close()

for _, table := range targetTables {
for engineID := table.MinEngineID; engineID <= table.MaxEngineID; engineID++ {
fmt.Fprintln(os.Stderr, "Closing and cleaning up engine:", table.TableName, engineID)
closedEngine, err := importer.UnsafeCloseEngine(ctx, nil, table.TableName, engineID)
if err != nil {
fmt.Fprintln(os.Stderr, "* Encountered error while closing engine:", err)
lastErr = err
} else if err := closedEngine.Cleanup(ctx); err != nil {
lastErr = err
}
}
}
}
// For importer backend, engine was stored in importer's memory, we can retrieve it from alive importer process.
// But in local backend, if we want to use common API `UnsafeCloseEngine` and `Cleanup`,
// we need either lightning process alive or engine map persistent.
Expand Down Expand Up @@ -331,21 +301,3 @@ func getLocalStoringTables(ctx context.Context, cfg *config.Config) (err2 error)

return nil
}

func importEngine(ctx context.Context, cfg *config.Config, tls *common.TLS, engine string) error {
importer, err := importer.NewImporter(ctx, tls, cfg.TikvImporter.Addr, cfg.TiDB.PdAddr)
if err != nil {
return errors.Trace(err)
}

ce, err := lightning.UnsafeCloseEngine(ctx, importer, engine)
if err != nil {
return errors.Trace(err)
}

regionSplitSize := int64(cfg.TikvImporter.RegionSplitSize)
if regionSplitSize == 0 {
regionSplitSize = int64(config.SplitRegionSize)
}
return errors.Trace(ce.Import(ctx, regionSplitSize))
}
31 changes: 23 additions & 8 deletions br/pkg/backup/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -329,8 +329,8 @@ func BuildBackupRangeAndSchema(
}

if len(tables) == 0 {
log.Warn("It's not necessary for backing up empty database",
zap.Stringer("db", dbInfo.Name))
log.Info("backup empty database", zap.Stringer("db", dbInfo.Name))
backupSchemas.addSchema(dbInfo, nil)
continue
}

Expand Down Expand Up @@ -934,9 +934,17 @@ func doSendBackup(
})
bCli, err := client.Backup(ctx, &req)
failpoint.Inject("reset-retryable-error", func(val failpoint.Value) {
if val.(bool) {
logutil.CL(ctx).Debug("failpoint reset-retryable-error injected.")
err = status.Error(codes.Unavailable, "Unavailable error")
switch val.(string) {
case "Unavaiable":
{
logutil.CL(ctx).Debug("failpoint reset-retryable-error unavailable injected.")
err = status.Error(codes.Unavailable, "Unavailable error")
}
case "Internal":
{
logutil.CL(ctx).Debug("failpoint reset-retryable-error internal injected.")
err = status.Error(codes.Internal, "Internal error")
}
}
})
failpoint.Inject("reset-not-retryable-error", func(val failpoint.Value) {
Expand Down Expand Up @@ -1030,16 +1038,23 @@ const (

// isRetryableError represents whether we should retry reset grpc connection.
func isRetryableError(err error) bool {

if status.Code(err) == codes.Unavailable {
return true
// some errors can be retried
// https://github.com/pingcap/tidb/issues/34350
switch status.Code(err) {
case codes.Unavailable, codes.DeadlineExceeded,
codes.ResourceExhausted, codes.Aborted, codes.Internal:
{
log.Warn("backup met some errors, these errors can be retry 5 times", zap.Error(err))
return true
}
}

// At least, there are two possible cancel() call,
// one from backup range, another from gRPC, here we retry when gRPC cancel with connection closing
if status.Code(err) == codes.Canceled {
if s, ok := status.FromError(err); ok {
if strings.Contains(s.Message(), gRPC_Cancel) {
log.Warn("backup met grpc cancel error, this errors can be retry 5 times", zap.Error(err))
return true
}
}
Expand Down
Loading

0 comments on commit 405d2c0

Please sign in to comment.