Skip to content

Commit

Permalink
Fix alter primary key for write (#1644) (#1645)
Browse files Browse the repository at this point in the history
  • Loading branch information
marsishandsome authored Oct 14, 2020
1 parent 6200122 commit 9687b70
Show file tree
Hide file tree
Showing 10 changed files with 113 additions and 3 deletions.
14 changes: 13 additions & 1 deletion .ci/integration_test.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ def call(ghprbActualCommit, ghprbCommentBody, ghprbPullId, ghprbPullTitle, ghprb
def PARALLEL_NUMBER = 18
def TEST_REGION_SIZE = "normal"
def TEST_TIFLASH = "false"
def TEST_ALTER_PRIMARY_KEY = "true"

// parse tidb branch
def m1 = ghprbCommentBody =~ /tidb\s*=\s*([^\s\\]+)(\s|\\|$)/
Expand Down Expand Up @@ -59,12 +60,18 @@ def call(ghprbActualCommit, ghprbCommentBody, ghprbPullId, ghprbPullTitle, ghprb
TEST_REGION_SIZE = "${m7[0][1]}"
}

// parse test mode
// parse test tiflash
def m8 = ghprbCommentBody =~ /test-flash\s*=\s*([^\s\\]+)(\s|\\|$)/
if (m8) {
TEST_TIFLASH = "${m8[0][1]}"
}

// parse test alter primary key
def m9 = ghprbCommentBody =~ /test-alter-primary-key\s*=\s*([^\s\\]+)(\s|\\|$)/
if (m9) {
TEST_ALTER_PRIMARY_KEY = "${m9[0][1]}"
}

groovy.lang.Closure readfile = { filename ->
def file = readFile filename
return file.split("\n") as List
Expand Down Expand Up @@ -127,6 +134,11 @@ def call(ghprbActualCommit, ghprbCommentBody, ghprbPullId, ghprbPullTitle, ghprb
"""
stash includes: "tiflash/**", name: "tiflash_binary"
}
// alter-primary-key
if (TEST_ALTER_PRIMARY_KEY == "false") {
sh "sed -i 's/alter-primary-key = true/alter-primary-key = false/' config/tidb.toml"
sh "sed -i 's/alter-primary-key = true/alter-primary-key = false/' config/tidb-4.0.toml"
}

stash includes: "bin/**", name: "binaries"

Expand Down
7 changes: 6 additions & 1 deletion config/tidb-4.0.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,9 @@ split-table = true
delay-clean-table-lock = 60000

# enable-table-lock is used to control table lock feature. Default is false, indicate the table lock feature is disabled.
enable-table-lock = true
enable-table-lock = true

# alter-primary-key is used to control alter primary key feature. Default is false, indicate the alter primary key feature is disabled.
# If it is true, we can add the primary key by "alter table", but we may not be able to drop the primary key.
# In order to support "drop primary key" operation , this flag must be true and the table does not have the pkIsHandle flag.
alter-primary-key = true
5 changes: 5 additions & 0 deletions config/tidb.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,11 @@ enable-chunk = true
# enable-table-lock is used to control table lock feature. Default is false, indicate the table lock feature is disabled.
enable-table-lock = true

# alter-primary-key is used to control alter primary key feature. Default is false, indicate the alter primary key feature is disabled.
# If it is true, we can add the primary key by "alter table", but we may not be able to drop the primary key.
# In order to support "drop primary key" operation , this flag must be true and the table does not have the pkIsHandle flag.
alter-primary-key = true

[log]
# Log level: info, debug, warn, error, fatal.
level = "info"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ class TiBatchWriteTable(
if (colsMapInTiDB.contains(colsInDf(colOffset))) {
if (colsMapInTiDB(colsInDf(colOffset)).isAutoIncrement) {
val index = row._2 + 1
rowIDAllocator.getShardRowId(index)
rowIDAllocator.getAutoIncId(index)
} else {
data._1
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,37 @@ import org.apache.spark.sql.types._

class AutoIncrementSuite extends BaseBatchWriteTest("test_datasource_auto_increment") {

test("alter primary key + auto increment + shard row bits") {
if (!isEnableAlterPrimaryKey) {
cancel("enable alter-primary-key by changing tidb.toml")
}

val schema = StructType(List(StructField("j", LongType)))

jdbcUpdate(
s"create table $dbtable(i int NOT NULL AUTO_INCREMENT, j int NOT NULL, primary key (i)) SHARD_ROW_ID_BITS=4")

val tiTableInfo = ti.tiSession.getCatalog.getTable(dbPrefix + database, table)
assert(!tiTableInfo.isPkHandle)

(1L until 10L).foreach { i =>
jdbcUpdate(s"insert into $dbtable (j) values(${i * 2 - 1})")

tidbWrite(List(Row(i * 2)), schema)
}

println(listToString(queryTiDBViaJDBC(s"select _tidb_rowid, i, j from $dbtable")))

spark.sql(s"select * from $table").show

val maxI = queryTiDBViaJDBC(s"select max(i) from $dbtable").head.head.toString.toLong
assert(maxI < 10000000)

val maxTiDBRowID =
queryTiDBViaJDBC(s"select max(_tidb_rowid) from $dbtable").head.head.toString.toLong
assert(maxTiDBRowID > 10000000)
}

// Duplicate entry '2' for key 'PRIMARY'
// currently user provided auto increment value is not supported!
ignore("auto increment: user provide id") {
Expand Down Expand Up @@ -64,6 +95,10 @@ class AutoIncrementSuite extends BaseBatchWriteTest("test_datasource_auto_increm
}

test("bigint signed tidb overflow") {
if (isEnableAlterPrimaryKey) {
cancel()
}

val schema = StructType(List(StructField("j", LongType)))

jdbcUpdate(
Expand Down Expand Up @@ -91,6 +126,9 @@ class AutoIncrementSuite extends BaseBatchWriteTest("test_datasource_auto_increm
}

test("bigint signed tispark overflow") {
if (isEnableAlterPrimaryKey) {
cancel()
}

val schema = StructType(List(StructField("j", LongType)))

Expand Down Expand Up @@ -121,6 +159,9 @@ class AutoIncrementSuite extends BaseBatchWriteTest("test_datasource_auto_increm
}

test("bigint unsigned tidb overflow") {
if (isEnableAlterPrimaryKey) {
cancel()
}

val schema = StructType(List(StructField("j", LongType)))

Expand Down Expand Up @@ -148,6 +189,10 @@ class AutoIncrementSuite extends BaseBatchWriteTest("test_datasource_auto_increm
}

test("bigint unsigned tispark overflow") {
if (isEnableAlterPrimaryKey) {
cancel()
}

val schema = StructType(List(StructField("j", LongType)))

jdbcUpdate(
Expand Down Expand Up @@ -177,6 +222,10 @@ class AutoIncrementSuite extends BaseBatchWriteTest("test_datasource_auto_increm
}

test("tinyint signed tidb overflow") {
if (isEnableAlterPrimaryKey) {
cancel()
}

val schema = StructType(List(StructField("j", LongType)))

jdbcUpdate(
Expand Down Expand Up @@ -204,6 +253,10 @@ class AutoIncrementSuite extends BaseBatchWriteTest("test_datasource_auto_increm
}

test("tinyint signed tispark overflow") {
if (isEnableAlterPrimaryKey) {
cancel()
}

val schema = StructType(List(StructField("j", LongType)))

jdbcUpdate(
Expand Down Expand Up @@ -232,6 +285,10 @@ class AutoIncrementSuite extends BaseBatchWriteTest("test_datasource_auto_increm
}

test("tinyint unsigned tidb overflow") {
if (isEnableAlterPrimaryKey) {
cancel()
}

val schema = StructType(List(StructField("j", LongType)))

jdbcUpdate(
Expand All @@ -258,6 +315,10 @@ class AutoIncrementSuite extends BaseBatchWriteTest("test_datasource_auto_increm
}

test("tinyint unsigned tispark overflow") {
if (isEnableAlterPrimaryKey) {
cancel()
}

val schema = StructType(List(StructField("j", LongType)))

jdbcUpdate(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,10 @@ class ColumnMappingSuite
}

test("Test different column order without auto increment column") {
if (isEnableAlterPrimaryKey) {
cancel()
}

jdbcUpdate(
s"create table $dbtable(i int primary key auto_increment, s varchar(128), c varchar(128))")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,12 @@ class BaseTiSparkTest extends QueryTest with SharedSQLContext {
def setLogLevel(level: String): Unit =
spark.sparkContext.setLogLevel(level)

protected def isEnableAlterPrimaryKey: Boolean = {
val conn = TiDBUtils.createConnectionFactory(jdbcUrl)()
val tiDBJDBCClient = new TiDBJDBCClient(conn)
tiDBJDBCClient.isEnableAlterPrimaryKey
}

protected def isEnableTableLock: Boolean = {
val conn = TiDBUtils.createConnectionFactory(jdbcUrl)()
val tiDBJDBCClient = new TiDBJDBCClient(conn)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,9 @@ class StatisticsTestSuite extends BasePlanTest {
{
val tableName = "full_data_type_table_idx"
test(query) {
if (isEnableAlterPrimaryKey) {
cancel()
}
val df = spark.sql(query)
checkIsTableScan(df, tableName)
}
Expand All @@ -178,6 +181,9 @@ class StatisticsTestSuite extends BasePlanTest {
case (query, idxName) =>
val tableName = "full_data_type_table_idx"
test(query) {
if (isEnableAlterPrimaryKey) {
cancel()
}
val df = spark.sql(query)
checkIsCoveringIndexScan(df, tableName)
checkIndex(df, idxName)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,20 @@ public class TiDBJDBCClient implements AutoCloseable {
private static final int DELAY_CLEAN_TABLE_LOCK_DEFAULT = 0;
private static final String TIDB_ROW_FORMAT_VERSION_SQL = "select @@tidb_row_format_version";
private static final int TIDB_ROW_FORMAT_VERSION_DEFAULT = 1;
private static final String ALTER_PRIMARY_KEY_KEY = "alter-primary-key";
private static final Boolean ALTER_PRIMARY_KEY_DEFAULT = false;
private final Logger logger = LoggerFactory.getLogger(getClass().getName());
private final Connection connection;

public TiDBJDBCClient(Connection connection) {
this.connection = connection;
}

public boolean isEnableAlterPrimaryKey() throws IOException, SQLException {
Map<String, Object> configMap = readConfMapFromTiDB();
return (Boolean) configMap.getOrDefault(ALTER_PRIMARY_KEY_KEY, ALTER_PRIMARY_KEY_DEFAULT);
}

public boolean isEnableTableLock() throws IOException, SQLException {
Map<String, Object> configMap = readConfMapFromTiDB();
Object enableTableLock =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,10 @@ private RowIDAllocator(long maxShardRowIDBits, long dbId, long step, TiConfigura
this.conf = conf;
}

public long getAutoIncId(long index) {
return index + getStart();
}

/**
* @param index should >= 1
* @return
Expand Down

0 comments on commit 9687b70

Please sign in to comment.