@@ -25,6 +25,7 @@ class ParquetSuite extends IntegrationSuiteBase {
25
25
val test_column_map_not_match : String = Random .alphanumeric.filter(_.isLetter).take(10 ).mkString
26
26
val test_nested_dataframe : String = Random .alphanumeric.filter(_.isLetter).take(10 ).mkString
27
27
val test_no_staging_table : String = Random .alphanumeric.filter(_.isLetter).take(10 ).mkString
28
+ val test_table_name : String = Random .alphanumeric.filter(_.isLetter).take(10 ).mkString
28
29
29
30
override def afterAll (): Unit = {
30
31
jdbcUpdate(s " drop table if exists $test_all_type" )
@@ -41,6 +42,7 @@ class ParquetSuite extends IntegrationSuiteBase {
41
42
jdbcUpdate(s " drop table if exists $test_column_map_not_match" )
42
43
jdbcUpdate(s " drop table if exists $test_nested_dataframe" )
43
44
jdbcUpdate(s " drop table if exists $test_no_staging_table" )
45
+ jdbcUpdate(s " drop table if exists $test_table_name" )
44
46
super .afterAll()
45
47
}
46
48
@@ -707,4 +709,53 @@ class ParquetSuite extends IntegrationSuiteBase {
707
709
val res = sparkSession.sql(s " show tables like '% ${test_all_type}_STAGING%' " ).collect()
708
710
assert(res.length == 0 )
709
711
}
712
+
713
+ test(" use parquet in structured type by default" ) {
714
+ // use CSV by default
715
+ sparkSession
716
+ .sql(" select 1" )
717
+ .write
718
+ .format(SNOWFLAKE_SOURCE_NAME )
719
+ .options(connectorOptionsNoTable)
720
+ .option(" dbtable" , test_table_name)
721
+ .mode(SaveMode .Overwrite )
722
+ .save()
723
+ assert(Utils .getLastCopyLoad.contains(" TYPE=CSV" ))
724
+
725
+ // use Parquet on structured types
726
+ sparkSession
727
+ .sql(" select array(1, 2)" )
728
+ .write
729
+ .format(SNOWFLAKE_SOURCE_NAME )
730
+ .options(connectorOptionsNoTable)
731
+ .option(" dbtable" , test_table_name)
732
+ .mode(SaveMode .Overwrite )
733
+ .save()
734
+ assert(Utils .getLastCopyLoad.contains(" TYPE=PARQUET" ))
735
+
736
+ // use Json on structured types when PARAM_USE_JSON_IN_STRUCTURED_DATA is true
737
+ sparkSession
738
+ .sql(" select array(1, 2)" )
739
+ .write
740
+ .format(SNOWFLAKE_SOURCE_NAME )
741
+ .options(connectorOptionsNoTable)
742
+ .option(" dbtable" , test_table_name)
743
+ .option(Parameters .PARAM_USE_JSON_IN_STRUCTURED_DATA , " true" )
744
+ .mode(SaveMode .Overwrite )
745
+ .save()
746
+ assert(Utils .getLastCopyLoad.contains(" TYPE = JSON" ))
747
+
748
+ // PARAM_USE_PARQUET_IN_WRITE can overwrite PARAM_USE_JSON_IN_STRUCTURED_DATA
749
+ sparkSession
750
+ .sql(" select array(1, 2)" )
751
+ .write
752
+ .format(SNOWFLAKE_SOURCE_NAME )
753
+ .options(connectorOptionsNoTable)
754
+ .option(" dbtable" , test_table_name)
755
+ .option(Parameters .PARAM_USE_JSON_IN_STRUCTURED_DATA , " true" )
756
+ .option(Parameters .PARAM_USE_PARQUET_IN_WRITE , " true" )
757
+ .mode(SaveMode .Overwrite )
758
+ .save()
759
+ assert(Utils .getLastCopyLoad.contains(" TYPE=PARQUET" ))
760
+ }
710
761
}
0 commit comments