Skip to content

Commit 5803215

Browse files
committed
fix
1 parent 76fbf37 commit 5803215

File tree

1 file changed

+5
-0
lines changed

1 file changed

+5
-0
lines changed

paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkWriteITCase.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -88,24 +88,29 @@ public void testWriteWithDefaultValue() {
8888
"CREATE TABLE T (a INT, b INT DEFAULT 2, c STRING DEFAULT 'my_value') TBLPROPERTIES"
8989
+ " ('file.format'='avro')");
9090

91+
// test show create table
9192
List<Row> show = spark.sql("SHOW CREATE TABLE T").collectAsList();
9293
assertThat(show.toString())
9394
.contains("a INT,\n" + " b INT DEFAULT 2,\n" + " c STRING DEFAULT 'my_value'");
9495

96+
// test partial write
9597
spark.sql("INSERT INTO T (a) VALUES (1), (2)").collectAsList();
9698
List<Row> rows = spark.sql("SELECT * FROM T").collectAsList();
9799
assertThat(rows.toString()).isEqualTo("[[1,2,my_value], [2,2,my_value]]");
98100

101+
// test write with DEFAULT
99102
spark.sql("INSERT INTO T VALUES (3, DEFAULT, DEFAULT)").collectAsList();
100103
rows = spark.sql("SELECT * FROM T").collectAsList();
101104
assertThat(rows.toString()).isEqualTo("[[1,2,my_value], [2,2,my_value], [3,2,my_value]]");
102105

106+
// test alter with DEFAULT not support
103107
assertThatThrownBy(() -> spark.sql("ALTER TABLE T ADD COLUMN d INT DEFAULT 5"))
104108
.hasMessageContaining(
105109
"Unsupported table change: Cannot add column [d] with default value");
106110
assertThatThrownBy(() -> spark.sql("ALTER TABLE T ALTER COLUMN a SET DEFAULT 3"))
107111
.hasMessageContaining("Change is not supported");
108112

113+
// test alter type to default column
109114
spark.sql("ALTER TABLE T ALTER COLUMN b TYPE STRING").collectAsList();
110115
spark.sql("INSERT INTO T (a) VALUES (4)").collectAsList();
111116
rows = spark.sql("SELECT * FROM T").collectAsList();

0 commit comments

Comments
 (0)