Skip to content

Commit beee317

Browse files
authored
test(ticdc): modify generated column test (#11737)
ref #11704
1 parent 1e1f271 commit beee317

File tree

4 files changed

+16
-22
lines changed

4 files changed

+16
-22
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
force-replicate = true
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
use `generate_column`;
2+
-- see https://github.com/pingcap/tiflow/issues/11704
3+
create table t2 (a int, b int as (a + 1) stored primary key);
4+
insert into t2(a) values (1),(2), (3),(4),(5),(6),(7);
5+
update t2 set a = 10 where a = 1;
6+
update t2 set a = 11 where b = 3;
7+
delete from t2 where b=4;
8+
delete from t2 where a=4;

tests/integration_tests/generate_column/data/prepare.sql tests/integration_tests/generate_column/data/virtual.sql

-7
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,6 @@ drop database if exists `generate_column`;
22
create database `generate_column`;
33
use `generate_column`;
44

5-
create table t (a int, b int as (a + 1) stored primary key);
6-
insert into t(a) values (1),(2), (3),(4),(5),(6),(7);
7-
update t set a = 10 where a = 1;
8-
update t set a = 11 where b = 3;
9-
delete from t where b=4;
10-
delete from t where a=4;
11-
125
create table t1 (a int, b int as (a + 1) virtual not null, c int not null, unique index idx1(b), unique index idx2(c));
136
insert into t1 (a, c) values (1, 2),(2, 3), (3, 4),(4, 5),(5, 6),(6, 7),(7, 8);
147
update t1 set a = 10 where a = 1;

tests/integration_tests/generate_column/run.sh

+7-15
Original file line numberDiff line numberDiff line change
@@ -9,16 +9,6 @@ CDC_BINARY=cdc.test
99
SINK_TYPE=$1
1010

1111
function run() {
12-
# storage is not supported yet.
13-
if [ "$SINK_TYPE" == "storage" ]; then
14-
return
15-
fi
16-
17-
# TODO(dongmen): enable pulsar in the future.
18-
if [ "$SINK_TYPE" == "pulsar" ]; then
19-
exit 0
20-
fi
21-
2212
rm -rf $WORK_DIR && mkdir -p $WORK_DIR
2313

2414
start_tidb_cluster --workdir $WORK_DIR
@@ -43,15 +33,17 @@ function run() {
4333
run_cdc_cli changefeed create --start-ts=$start_ts --sink-uri="$SINK_URI"
4434
case $SINK_TYPE in
4535
kafka) run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&version=${KAFKA_VERSION}&max-message-bytes=10485760" ;;
46-
storage) run_storage_consumer $WORK_DIR $SINK_URI "" "" ;;
47-
pulsar) run_pulsar_consumer --upstream-uri $SINK_URI ;;
36+
storage) run_storage_consumer $WORK_DIR $SINK_URI $CUR/conf/changefeed.toml "" ;;
37+
pulsar) run_pulsar_consumer --upstream-uri $SINK_URI --config $CUR/conf/changefeed.toml ;;
4838
esac
49-
run_sql_file $CUR/data/prepare.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT}
39+
run_sql_file $CUR/data/virtual.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT}
5040
# sync_diff can't check non-exist table, so we check expected tables are created in downstream first
51-
check_table_exists generate_column.t ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT}
5241
check_table_exists generate_column.t1 ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT}
42+
if [[ "$SINK_TYPE" != "storage" && "$SINK_TYPE" != "pulsar" ]]; then
43+
run_sql_file $CUR/data/stored.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT}
44+
check_table_exists generate_column.t2 ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT}
45+
fi
5346
check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml
54-
5547
cleanup_process $CDC_BINARY
5648
}
5749

0 commit comments

Comments
 (0)