Skip to content

Commit

Permalink
Fix: dump all generated column table (pingcap#166)
Browse files Browse the repository at this point in the history
* Fix dump all generated column table
  • Loading branch information
Abingcbc authored Oct 22, 2020
1 parent 9d3d5fd commit f447dd3
Show file tree
Hide file tree
Showing 7 changed files with 133 additions and 20 deletions.
42 changes: 42 additions & 0 deletions dumpling/tests/all_generate_column/conf/diff_config.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
# diff Configuration.

log-level = "info"

chunk-size = 1000

check-thread-count = 4

sample-percent = 100

use-rowid = false

use-checksum = true

fix-sql-file = "fix.sql"

# tables need to check.
[[check-tables]]
schema = "all_generate_column"
tables = ["~t.*"]

[[table-config]]
schema = "all_generate_column"
table = "t"

[[table-config.source-tables]]
instance-id = "source-1"
schema = "all_generate_column"
table = "t"

[[source-db]]
host = "127.0.0.1"
port = 3306
user = "root"
password = ""
instance-id = "source-1"

[target-db]
host = "127.0.0.1"
port = 4000
user = "root"
password = ""
20 changes: 20 additions & 0 deletions dumpling/tests/all_generate_column/conf/lightning.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
### tidb-lightning config

[lightning]
server-mode = false
level = "error"
check-requirements = false

[tikv-importer]
backend="tidb"
on-duplicate = "error"

[mydumper]
data-source-dir = "/tmp/dumpling_test_result/sql_res.all_generate_column"

[tidb]
host = "127.0.0.1"
port = 4000
user = "root"
password = ""
status-port = 10080
37 changes: 37 additions & 0 deletions dumpling/tests/all_generate_column/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
#!/bin/sh

set -eu
cur=$(cd `dirname $0`; pwd)

DB_NAME="all_generate_column"
TABLE_NAME="t"

# drop database on tidb
export DUMPLING_TEST_PORT=4000
run_sql "drop database if exists $DB_NAME;"

# drop database on mysql
export DUMPLING_TEST_PORT=3306
run_sql "drop database if exists $DB_NAME;"

# build data on mysql
run_sql "create database $DB_NAME;"

# build data with generate column full_name
run_sql "create table $DB_NAME.$TABLE_NAME(a int as (1), b int as (2)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;"

# insert 100 records
run_sql "insert into $DB_NAME.$TABLE_NAME values $(seq -s, 100 | sed 's/,*$//g' | sed "s/[0-9]*/()/g");"

# dumping
export DUMPLING_TEST_DATABASE=$DB_NAME
run_dumpling

cat "$cur/conf/lightning.toml"
# use lightning import data to tidb
run_lightning $cur/conf/lightning.toml

# check mysql and tidb data
check_sync_diff $cur/conf/diff_config.toml


4 changes: 2 additions & 2 deletions dumpling/v4/export/ir_impl.go
Original file line number Diff line number Diff line change
Expand Up @@ -138,8 +138,8 @@ func (td *tableData) Rows() SQLRowIter {
}

func (td *tableData) SelectedField() string {
if td.selectedField == "*" {
return ""
if td.selectedField == "*" || td.selectedField == "" {
return td.selectedField
}
return fmt.Sprintf("(%s)", td.selectedField)
}
Expand Down
15 changes: 13 additions & 2 deletions dumpling/v4/export/sql.go
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,13 @@ func SelectAllFromTable(conf *Config, db *sql.Conn, database, table string) (Tab
return nil, err
}

colTypes, err := GetColumnTypes(db, selectedField, database, table)
var colTypes []*sql.ColumnType
// If all columns are generated
if selectedField == "" {
colTypes, err = GetColumnTypes(db, "*", database, table)
} else {
colTypes, err = GetColumnTypes(db, selectedField, database, table)
}
if err != nil {
return nil, err
}
Expand All @@ -204,7 +210,12 @@ func SelectAllFromTable(conf *Config, db *sql.Conn, database, table string) (Tab
return nil, err
}

query := buildSelectQuery(database, table, selectedField, buildWhereCondition(conf, ""), orderByClause)
queryField := selectedField
// If all columns are generated
if queryField == "" {
queryField = "''"
}
query := buildSelectQuery(database, table, queryField, buildWhereCondition(conf, ""), orderByClause)

return &tableData{
database: database,
Expand Down
3 changes: 0 additions & 3 deletions dumpling/v4/export/test_util.go
Original file line number Diff line number Diff line change
Expand Up @@ -115,9 +115,6 @@ func (m *mockTableIR) ColumnNames() []string {
}

func (m *mockTableIR) SelectedField() string {
if m.selectedField == "*" {
return ""
}
return m.selectedField
}

Expand Down
32 changes: 19 additions & 13 deletions dumpling/v4/export/writer_util.go
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,9 @@ func WriteInsert(pCtx context.Context, tblIR TableDataIR, w storage.Writer, file
)

selectedField := tblIR.SelectedField()

// if has generated column
if selectedField != "" {
if selectedField != "" && selectedField != "*" {
insertStatementPrefix = fmt.Sprintf("INSERT INTO %s %s VALUES\n",
wrapBackTicks(escapeString(tblIR.TableName())), selectedField)
} else {
Expand All @@ -175,13 +176,17 @@ func WriteInsert(pCtx context.Context, tblIR TableDataIR, w storage.Writer, file
wp.AddFileSize(insertStatementPrefixLen)

for fileRowIter.HasNext() {
if err = fileRowIter.Decode(row); err != nil {
log.Error("scanning from sql.Row failed", zap.Error(err))
return err
}

lastBfSize := bf.Len()
row.WriteToBuffer(bf, escapeBackSlash)
if selectedField != "" {
if err = fileRowIter.Decode(row); err != nil {
log.Error("scanning from sql.Row failed", zap.Error(err))
return err
}
row.WriteToBuffer(bf, escapeBackSlash)
} else {
bf.WriteString("()")
}
counter += 1
wp.AddFileSize(uint64(bf.Len()-lastBfSize) + 2) // 2 is for ",\n" and ";\n"

Expand Down Expand Up @@ -260,7 +265,7 @@ func WriteInsertInCsv(pCtx context.Context, tblIR TableDataIR, w storage.Writer,
err error
)

if !noHeader && len(tblIR.ColumnNames()) != 0 {
if !noHeader && len(tblIR.ColumnNames()) != 0 && tblIR.SelectedField() != "" {
for i, col := range tblIR.ColumnNames() {
bf.Write(opt.delimiter)
escape([]byte(col), bf, getEscapeQuotation(escapeBackSlash, opt.delimiter))
Expand All @@ -274,13 +279,14 @@ func WriteInsertInCsv(pCtx context.Context, tblIR TableDataIR, w storage.Writer,
wp.currentFileSize += uint64(bf.Len())

for fileRowIter.HasNext() {
if err = fileRowIter.Decode(row); err != nil {
log.Error("scanning from sql.Row failed", zap.Error(err))
return err
}

lastBfSize := bf.Len()
row.WriteToBufferInCsv(bf, escapeBackSlash, opt)
if tblIR.SelectedField() != "" {
if err = fileRowIter.Decode(row); err != nil {
log.Error("scanning from sql.Row failed", zap.Error(err))
return err
}
row.WriteToBufferInCsv(bf, escapeBackSlash, opt)
}
counter += 1
wp.currentFileSize += uint64(bf.Len()-lastBfSize) + 1 // 1 is for "\n"

Expand Down

0 comments on commit f447dd3

Please sign in to comment.