139
139
import static io .trino .plugin .hive .HiveColumnHandle .ColumnType .PARTITION_KEY ;
140
140
import static io .trino .plugin .hive .HiveColumnHandle .ColumnType .REGULAR ;
141
141
import static io .trino .plugin .hive .HiveColumnHandle .createBaseColumn ;
142
- import static io .trino .plugin .hive .HiveErrorCode .HIVE_WRITER_OPEN_ERROR ;
143
142
import static io .trino .plugin .hive .HivePageSourceProvider .ColumnMapping .buildColumnMappings ;
144
143
import static io .trino .plugin .hive .HivePartitionKey .HIVE_DEFAULT_DYNAMIC_PARTITION ;
145
144
import static io .trino .plugin .hive .HiveStorageFormat .AVRO ;
157
156
import static io .trino .plugin .hive .HiveTestUtils .getHiveSession ;
158
157
import static io .trino .plugin .hive .HiveTestUtils .mapType ;
159
158
import static io .trino .plugin .hive .acid .AcidTransaction .NO_ACID_TRANSACTION ;
160
- import static io .trino .plugin .hive .ion .IonWriterOptions .ION_ENCODING_PROPERTY ;
161
- import static io .trino .plugin .hive .ion .IonWriterOptions .TEXT_ENCODING ;
162
159
import static io .trino .plugin .hive .util .HiveTypeTranslator .toHiveType ;
163
160
import static io .trino .plugin .hive .util .SerdeConstants .LIST_COLUMNS ;
164
161
import static io .trino .plugin .hive .util .SerdeConstants .LIST_COLUMN_TYPES ;
@@ -234,7 +231,6 @@ public final class TestHiveFileFormats
234
231
private static final FileFormatDataSourceStats STATS = new FileFormatDataSourceStats ();
235
232
private static final ConnectorSession PARQUET_SESSION = getHiveSession (createParquetHiveConfig (false ));
236
233
private static final ConnectorSession PARQUET_SESSION_USE_NAME = getHiveSession (createParquetHiveConfig (true ));
237
- private static final String ERROR_ENCODING = "error_encoding" ;
238
234
239
235
@ DataProvider (name = "rowCount" )
240
236
public static Object [][] rowCountProvider ()
@@ -377,7 +373,8 @@ public void testIonWithBinaryEncoding(int rowCount, long fileSizePadding)
377
373
throws Exception
378
374
{
379
375
List <TestColumn > testColumns = TEST_COLUMNS .stream ()
380
- // todo: add support for maps to trino impl
376
+ // even though maps with text keys work with the native trino impl
377
+ // there is an error when testing against the hive serde
381
378
.filter (tc -> !(tc .type instanceof MapType ))
382
379
.collect (toList ());
383
380
@@ -394,54 +391,6 @@ public void testIonWithBinaryEncoding(int rowCount, long fileSizePadding)
394
391
.isReadableByPageSource (fileSystemFactory -> new IonPageSourceFactory (fileSystemFactory , hiveConfig ));
395
392
}
396
393
397
- @ Test (dataProvider = "validRowAndFileSizePadding" )
398
- public void testIonWithTextEncoding (int rowCount , long fileSizePadding )
399
- throws Exception
400
- {
401
- List <TestColumn > testColumns = TEST_COLUMNS .stream ()
402
- // todo: add support for maps to trino impl
403
- .filter (tc -> !(tc .type instanceof MapType ))
404
- .collect (toList ());
405
-
406
- HiveConfig hiveConfig = new HiveConfig ();
407
- // enable Ion native trino integration for testing while the implementation is in progress
408
- // TODO: In future this flag should change to `true` as default and then the following statement can be removed.
409
- hiveConfig .setIonNativeTrinoEnabled (true );
410
-
411
- assertThatFileFormat (ION )
412
- .withColumns (testColumns )
413
- .withRowsCount (rowCount )
414
- .withFileSizePadding (fileSizePadding )
415
- .withTableProperties (ImmutableMap .of (ION_ENCODING_PROPERTY , TEXT_ENCODING ))
416
- .withFileWriterFactory (fileSystemFactory -> new IonFileWriterFactory (fileSystemFactory , TESTING_TYPE_MANAGER ))
417
- .isReadableByPageSource (fileSystemFactory -> new IonPageSourceFactory (fileSystemFactory , hiveConfig ));
418
- }
419
-
420
- @ Test (dataProvider = "validRowAndFileSizePadding" )
421
- public void testInvalidIonEncoding (int rowCount , long fileSizePadding )
422
- throws Exception
423
- {
424
- List <TestColumn > testColumns = TEST_COLUMNS .stream ()
425
- // todo: add support for maps to trino impl
426
- .filter (tc -> !(tc .type instanceof MapType ))
427
- .collect (toList ());
428
-
429
- HiveConfig hiveConfig = new HiveConfig ();
430
- // enable Ion native trino integration for testing while the implementation is in progress
431
- // TODO: In future this flag should change to `true` as default and then the following statement can be removed.
432
- hiveConfig .setIonNativeTrinoEnabled (true );
433
-
434
- assertTrinoExceptionThrownBy (() -> assertThatFileFormat (ION )
435
- .withColumns (testColumns )
436
- .withRowsCount (rowCount )
437
- .withFileSizePadding (fileSizePadding )
438
- .withTableProperties (ImmutableMap .of (ION_ENCODING_PROPERTY , ERROR_ENCODING ))
439
- .withFileWriterFactory (fileSystemFactory -> new IonFileWriterFactory (fileSystemFactory , TESTING_TYPE_MANAGER ))
440
- .isReadableByPageSource (fileSystemFactory -> new IonPageSourceFactory (fileSystemFactory , hiveConfig )))
441
- .hasErrorCode (HIVE_WRITER_OPEN_ERROR )
442
- .hasMessage ("Error creating Ion Output" );
443
- }
444
-
445
394
@ Test (dataProvider = "validRowAndFileSizePadding" )
446
395
public void testRcTextPageSource (int rowCount , long fileSizePadding )
447
396
throws Exception
@@ -1275,7 +1224,6 @@ private static class FileFormatAssertion
1275
1224
private boolean skipGenericWrite ;
1276
1225
private HiveFileWriterFactory fileWriterFactory ;
1277
1226
private long fileSizePadding ;
1278
- private Map <String , String > customTableProperties = ImmutableMap .of ();
1279
1227
1280
1228
private final TrinoFileSystemFactory fileSystemFactory = new MemoryFileSystemFactory ();
1281
1229
@@ -1333,12 +1281,6 @@ public FileFormatAssertion withRowsCount(int rowsCount)
1333
1281
return this ;
1334
1282
}
1335
1283
1336
- public FileFormatAssertion withTableProperties (Map <String , String > tableProperties )
1337
- {
1338
- this .customTableProperties = requireNonNull (tableProperties , "customTableProperties is null" );
1339
- return this ;
1340
- }
1341
-
1342
1284
public FileFormatAssertion withSession (ConnectorSession session )
1343
1285
{
1344
1286
this .session = requireNonNull (session , "session is null" );
@@ -1397,7 +1339,7 @@ private void assertRead(HivePageSourceFactory pageSourceFactory)
1397
1339
if (fileWriterFactory == null ) {
1398
1340
continue ;
1399
1341
}
1400
- createTestFileTrino (location , storageFormat , compressionCodec , writeColumns , session , rowsCount , fileWriterFactory , customTableProperties );
1342
+ createTestFileTrino (location , storageFormat , compressionCodec , writeColumns , session , rowsCount , fileWriterFactory );
1401
1343
}
1402
1344
else {
1403
1345
if (skipGenericWrite ) {
@@ -1427,8 +1369,7 @@ private static void createTestFileTrino(
1427
1369
List <TestColumn > testColumns ,
1428
1370
ConnectorSession session ,
1429
1371
int numRows ,
1430
- HiveFileWriterFactory fileWriterFactory ,
1431
- Map <String , String > customTableProperties )
1372
+ HiveFileWriterFactory fileWriterFactory )
1432
1373
{
1433
1374
// filter out partition keys, which are not written to the file
1434
1375
testColumns = testColumns .stream ()
@@ -1453,7 +1394,6 @@ private static void createTestFileTrino(
1453
1394
Map <String , String > tableProperties = ImmutableMap .<String , String >builder ()
1454
1395
.put (LIST_COLUMNS , testColumns .stream ().map (TestColumn ::name ).collect (Collectors .joining ("," )))
1455
1396
.put (LIST_COLUMN_TYPES , testColumns .stream ().map (TestColumn ::type ).map (HiveTypeTranslator ::toHiveType ).map (HiveType ::toString ).collect (Collectors .joining ("," )))
1456
- .putAll (customTableProperties )
1457
1397
.buildOrThrow ();
1458
1398
1459
1399
Optional <FileWriter > fileWriter = fileWriterFactory .createFileWriter (
0 commit comments