Skip to content

Commit 75449d9

Browse files
committed
tests: add retries and longer waits for iceberg IT
1 parent 5c51ee0 commit 75449d9

File tree

2 files changed

+22
-22
lines changed
  • tests/integration_tests

2 files changed

+22
-22
lines changed

tests/integration_tests/iceberg_append_basic/run.sh

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ function prepare() {
3232
# record tso before we create tables to skip the system table DDLs
3333
start_ts=$(run_cdc_cli_tso_query ${UP_PD_HOST_1} ${UP_PD_PORT_1})
3434

35-
run_sql "CREATE TABLE test.iceberg_append_basic(id INT PRIMARY KEY, val INT);"
35+
do_retry 5 2 run_sql "CREATE TABLE test.iceberg_append_basic(id INT PRIMARY KEY, val INT);"
3636

3737
run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY
3838

@@ -57,19 +57,19 @@ function wait_file_exists() {
5757
}
5858

5959
function iceberg_check_append_basic() {
60-
run_sql "INSERT INTO test.iceberg_append_basic(id, val) VALUES (1, 1);"
61-
run_sql "INSERT INTO test.iceberg_append_basic(id, val) VALUES (2, 2);"
62-
run_sql "UPDATE test.iceberg_append_basic SET val = 22 WHERE id = 2;"
63-
run_sql "DELETE FROM test.iceberg_append_basic WHERE id = 1;"
60+
do_retry 5 2 run_sql "INSERT INTO test.iceberg_append_basic(id, val) VALUES (1, 1);"
61+
do_retry 5 2 run_sql "INSERT INTO test.iceberg_append_basic(id, val) VALUES (2, 2);"
62+
do_retry 5 2 run_sql "UPDATE test.iceberg_append_basic SET val = 22 WHERE id = 2;"
63+
do_retry 5 2 run_sql "DELETE FROM test.iceberg_append_basic WHERE id = 1;"
6464

6565
WAREHOUSE_DIR="$WORK_DIR/iceberg_warehouse"
6666
TABLE_ROOT="$WAREHOUSE_DIR/ns/test/iceberg_append_basic"
6767
METADATA_DIR="$TABLE_ROOT/metadata"
6868
DATA_DIR="$TABLE_ROOT/data"
6969

7070
# Wait for iceberg commit output files.
71-
wait_file_exists "$METADATA_DIR/v*.metadata.json" 120
72-
wait_file_exists "$DATA_DIR/snap-*.parquet" 120
71+
wait_file_exists "$METADATA_DIR/v*.metadata.json" 180
72+
wait_file_exists "$DATA_DIR/snap-*.parquet" 180
7373

7474
# Hint: Spark readback is disabled by default.
7575
# Enable it via:
@@ -102,10 +102,10 @@ function iceberg_check_append_basic() {
102102
GLOBAL_CHECKPOINT_DIR="$WAREHOUSE_DIR/ns/__ticdc/__tidb_global_checkpoints/data"
103103
CHECKPOINT_METADATA_DIR="$WAREHOUSE_DIR/ns/__ticdc/__tidb_checkpoints/metadata"
104104
GLOBAL_CHECKPOINT_METADATA_DIR="$WAREHOUSE_DIR/ns/__ticdc/__tidb_global_checkpoints/metadata"
105-
wait_file_exists "$CHECKPOINT_DIR/snap-*.parquet" 120
106-
wait_file_exists "$GLOBAL_CHECKPOINT_DIR/snap-*.parquet" 120
107-
wait_file_exists "$CHECKPOINT_METADATA_DIR/v*.metadata.json" 120
108-
wait_file_exists "$GLOBAL_CHECKPOINT_METADATA_DIR/v*.metadata.json" 120
105+
wait_file_exists "$CHECKPOINT_DIR/snap-*.parquet" 180
106+
wait_file_exists "$GLOBAL_CHECKPOINT_DIR/snap-*.parquet" 180
107+
wait_file_exists "$CHECKPOINT_METADATA_DIR/v*.metadata.json" 180
108+
wait_file_exists "$GLOBAL_CHECKPOINT_METADATA_DIR/v*.metadata.json" 180
109109

110110
# Optional: Spark readback verification (requires Spark + Iceberg Spark runtime).
111111
if [ "${ICEBERG_SPARK_READBACK:-0}" = "1" ]; then

tests/integration_tests/iceberg_upsert_basic/run.sh

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ function prepare() {
3232
# record tso before we create tables to skip the system table DDLs
3333
start_ts=$(run_cdc_cli_tso_query ${UP_PD_HOST_1} ${UP_PD_PORT_1})
3434

35-
run_sql "CREATE TABLE test.iceberg_upsert_basic(id INT PRIMARY KEY, val INT);"
35+
do_retry 5 2 run_sql "CREATE TABLE test.iceberg_upsert_basic(id INT PRIMARY KEY, val INT);"
3636

3737
run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY
3838

@@ -57,17 +57,17 @@ function wait_file_exists() {
5757
}
5858

5959
function iceberg_check_upsert_basic() {
60-
run_sql "INSERT INTO test.iceberg_upsert_basic(id, val) VALUES (1, 1);"
61-
run_sql "INSERT INTO test.iceberg_upsert_basic(id, val) VALUES (2, 2);"
60+
do_retry 5 2 run_sql "INSERT INTO test.iceberg_upsert_basic(id, val) VALUES (1, 1);"
61+
do_retry 5 2 run_sql "INSERT INTO test.iceberg_upsert_basic(id, val) VALUES (2, 2);"
6262

6363
WAREHOUSE_DIR="$WORK_DIR/iceberg_warehouse"
6464
TABLE_ROOT="$WAREHOUSE_DIR/ns/test/iceberg_upsert_basic"
6565
METADATA_DIR="$TABLE_ROOT/metadata"
6666
DATA_DIR="$TABLE_ROOT/data"
6767

6868
# Wait for iceberg commit output files.
69-
wait_file_exists "$METADATA_DIR/v*.metadata.json" 120
70-
wait_file_exists "$DATA_DIR/snap-*.parquet" 120
69+
wait_file_exists "$METADATA_DIR/v*.metadata.json" 180
70+
wait_file_exists "$DATA_DIR/snap-*.parquet" 180
7171

7272
# Hint: Spark readback is disabled by default.
7373
# Enable it via:
@@ -83,16 +83,16 @@ function iceberg_check_upsert_basic() {
8383
# so equality delete files are required (otherwise they may be optimized away within the same batch).
8484
first_meta=$(ls -1 "$METADATA_DIR"/v*.metadata.json | sort -V | tail -n 1)
8585

86-
run_sql "UPDATE test.iceberg_upsert_basic SET val = 22 WHERE id = 2;"
87-
run_sql "DELETE FROM test.iceberg_upsert_basic WHERE id = 1;"
86+
do_retry 5 2 run_sql "UPDATE test.iceberg_upsert_basic SET val = 22 WHERE id = 2;"
87+
do_retry 5 2 run_sql "DELETE FROM test.iceberg_upsert_basic WHERE id = 1;"
8888

8989
# Upsert mode should produce equality delete files for UPDATE/DELETE events.
90-
wait_file_exists "$DATA_DIR/delete-*.parquet" 120
90+
wait_file_exists "$DATA_DIR/delete-*.parquet" 180
9191

9292
# Wait for a new metadata file after the UPDATE/DELETE commit.
9393
i=0
9494
latest_meta="$first_meta"
95-
while [ $i -lt 120 ]; do
95+
while [ $i -lt 180 ]; do
9696
latest_meta=$(ls -1 "$METADATA_DIR"/v*.metadata.json | sort -V | tail -n 1)
9797
if [ "$latest_meta" != "$first_meta" ]; then
9898
break
@@ -116,8 +116,8 @@ function iceberg_check_upsert_basic() {
116116
# Verify checkpoint table is created.
117117
CHECKPOINT_DIR="$WAREHOUSE_DIR/ns/__ticdc/__tidb_checkpoints/data"
118118
CHECKPOINT_METADATA_DIR="$WAREHOUSE_DIR/ns/__ticdc/__tidb_checkpoints/metadata"
119-
wait_file_exists "$CHECKPOINT_DIR/snap-*.parquet" 120
120-
wait_file_exists "$CHECKPOINT_METADATA_DIR/v*.metadata.json" 120
119+
wait_file_exists "$CHECKPOINT_DIR/snap-*.parquet" 180
120+
wait_file_exists "$CHECKPOINT_METADATA_DIR/v*.metadata.json" 180
121121

122122
# Optional: Spark readback verification (requires Spark + Iceberg Spark runtime).
123123
if [ "${ICEBERG_SPARK_READBACK:-0}" = "1" ]; then

0 commit comments

Comments
 (0)