|
49 | 49 |
|
50 | 50 | import java.nio.file.Files; |
51 | 51 | import java.util.Arrays; |
| 52 | +import java.util.Collections; |
52 | 53 | import java.util.HashMap; |
53 | 54 | import java.util.List; |
54 | 55 | import java.util.Map; |
@@ -391,16 +392,12 @@ void testAlterLakeEnabledLogTable() throws Exception { |
391 | 392 | () -> |
392 | 393 | paimonCatalog.getTable( |
393 | 394 | Identifier.create(DATABASE, logTablePath.getTableName()))) |
394 | | - .isInstanceOf(Catalog.TableNotExistException.class) |
395 | | - .hasMessageContaining( |
396 | | - String.format( |
397 | | - "Table %s.%s does not exist.", |
398 | | - DATABASE, logTablePath.getTableName())); |
| 395 | + .isInstanceOf(Catalog.TableNotExistException.class); |
399 | 396 |
|
400 | 397 | // enable lake |
401 | 398 | TableChange.SetOption enableLake = |
402 | 399 | TableChange.set(ConfigOptions.TABLE_DATALAKE_ENABLED.key(), "true"); |
403 | | - List<TableChange> changes = Arrays.asList(enableLake); |
| 400 | + List<TableChange> changes = Collections.singletonList(enableLake); |
404 | 401 |
|
405 | 402 | admin.alterTable(logTablePath, changes, false).get(); |
406 | 403 |
|
@@ -434,150 +431,6 @@ void testAlterLakeEnabledLogTable() throws Exception { |
434 | 431 | BUCKET_NUM); |
435 | 432 | } |
436 | 433 |
|
437 | | - @Test |
438 | | - void testAlterPkLakeEnabledTable() throws Exception { |
439 | | - Map<String, String> customProperties = new HashMap<>(); |
440 | | - customProperties.put("k1", "v1"); |
441 | | - customProperties.put("paimon.file.format", "parquet"); |
442 | | - |
443 | | - // test pk table |
444 | | - TableDescriptor pkTable = |
445 | | - TableDescriptor.builder() |
446 | | - .schema( |
447 | | - Schema.newBuilder() |
448 | | - .column("pk_c1", DataTypes.INT()) |
449 | | - .column("pk_c2", DataTypes.STRING()) |
450 | | - .primaryKey("pk_c1") |
451 | | - .build()) |
452 | | - .distributedBy(BUCKET_NUM) |
453 | | - .property(ConfigOptions.TABLE_DATALAKE_ENABLED, false) |
454 | | - .customProperties(customProperties) |
455 | | - .build(); |
456 | | - TablePath pkTablePath = TablePath.of(DATABASE, "pk_table_alter"); |
457 | | - admin.createTable(pkTablePath, pkTable, false).get(); |
458 | | - |
459 | | - assertThatThrownBy( |
460 | | - () -> |
461 | | - paimonCatalog.getTable( |
462 | | - Identifier.create(DATABASE, pkTablePath.getTableName()))) |
463 | | - .isInstanceOf(Catalog.TableNotExistException.class) |
464 | | - .hasMessageContaining( |
465 | | - String.format( |
466 | | - "Table %s.%s does not exist.", |
467 | | - DATABASE, pkTablePath.getTableName())); |
468 | | - |
469 | | - // enable lake |
470 | | - TableChange.SetOption enableLake = |
471 | | - TableChange.set(ConfigOptions.TABLE_DATALAKE_ENABLED.key(), "true"); |
472 | | - List<TableChange> changes = Arrays.asList(enableLake); |
473 | | - |
474 | | - admin.alterTable(pkTablePath, changes, false).get(); |
475 | | - |
476 | | - Table enabledPaimonPkTable = |
477 | | - paimonCatalog.getTable(Identifier.create(DATABASE, pkTablePath.getTableName())); |
478 | | - |
479 | | - Map<String, String> updatedProperties = new HashMap<>(); |
480 | | - updatedProperties.put(ConfigOptions.TABLE_DATALAKE_ENABLED.key(), "true"); |
481 | | - TableDescriptor updatedPkTable = pkTable.withProperties(updatedProperties); |
482 | | - // check the gotten log table |
483 | | - verifyPaimonTable( |
484 | | - enabledPaimonPkTable, |
485 | | - updatedPkTable, |
486 | | - RowType.of( |
487 | | - new DataType[] { |
488 | | - org.apache.paimon.types.DataTypes.INT().notNull(), |
489 | | - org.apache.paimon.types.DataTypes.STRING(), |
490 | | - // for __bucket, __offset, __timestamp |
491 | | - org.apache.paimon.types.DataTypes.INT(), |
492 | | - org.apache.paimon.types.DataTypes.BIGINT(), |
493 | | - org.apache.paimon.types.DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE() |
494 | | - }, |
495 | | - new String[] { |
496 | | - "pk_c1", |
497 | | - "pk_c2", |
498 | | - BUCKET_COLUMN_NAME, |
499 | | - OFFSET_COLUMN_NAME, |
500 | | - TIMESTAMP_COLUMN_NAME |
501 | | - }), |
502 | | - "pk_c1", |
503 | | - BUCKET_NUM); |
504 | | - } |
505 | | - |
506 | | - @Test |
507 | | - void testAlterPartitionedLakeEnabledTable() throws Exception { |
508 | | - Map<String, String> customProperties = new HashMap<>(); |
509 | | - customProperties.put("k1", "v1"); |
510 | | - customProperties.put("paimon.file.format", "parquet"); |
511 | | - |
512 | | - // test partitioned table |
513 | | - TableDescriptor partitionedTableDescriptor = |
514 | | - TableDescriptor.builder() |
515 | | - .schema( |
516 | | - Schema.newBuilder() |
517 | | - .column("c1", DataTypes.INT()) |
518 | | - .column("c2", DataTypes.STRING()) |
519 | | - .column("c3", DataTypes.STRING()) |
520 | | - .primaryKey("c1", "c3") |
521 | | - .build()) |
522 | | - .distributedBy(BUCKET_NUM) |
523 | | - .partitionedBy("c3") |
524 | | - .property(ConfigOptions.TABLE_DATALAKE_ENABLED, false) |
525 | | - .customProperties(customProperties) |
526 | | - .build(); |
527 | | - TablePath partitionedTablePath = TablePath.of(DATABASE, "partitioned_table_alter"); |
528 | | - admin.createTable(partitionedTablePath, partitionedTableDescriptor, false).get(); |
529 | | - |
530 | | - assertThatThrownBy( |
531 | | - () -> |
532 | | - paimonCatalog.getTable( |
533 | | - Identifier.create( |
534 | | - DATABASE, partitionedTablePath.getTableName()))) |
535 | | - .isInstanceOf(Catalog.TableNotExistException.class) |
536 | | - .hasMessageContaining( |
537 | | - String.format( |
538 | | - "Table %s.%s does not exist.", |
539 | | - DATABASE, partitionedTablePath.getTableName())); |
540 | | - |
541 | | - // enable lake |
542 | | - TableChange.SetOption enableLake = |
543 | | - TableChange.set(ConfigOptions.TABLE_DATALAKE_ENABLED.key(), "true"); |
544 | | - List<TableChange> changes = Arrays.asList(enableLake); |
545 | | - admin.alterTable(partitionedTablePath, changes, false).get(); |
546 | | - |
547 | | - Table enabledPaimonPartitionedTable = |
548 | | - paimonCatalog.getTable( |
549 | | - Identifier.create(DATABASE, partitionedTablePath.getTableName())); |
550 | | - |
551 | | - Map<String, String> updatedProperties = new HashMap<>(); |
552 | | - updatedProperties.put(ConfigOptions.TABLE_DATALAKE_ENABLED.key(), "true"); |
553 | | - TableDescriptor updatedPartitionedTable = |
554 | | - partitionedTableDescriptor.withProperties(updatedProperties); |
555 | | - |
556 | | - verifyPaimonTable( |
557 | | - enabledPaimonPartitionedTable, |
558 | | - updatedPartitionedTable, |
559 | | - RowType.of( |
560 | | - new DataType[] { |
561 | | - org.apache.paimon.types.DataTypes.INT().notNull(), |
562 | | - org.apache.paimon.types.DataTypes.STRING(), |
563 | | - org.apache.paimon.types.DataTypes.STRING().notNull(), |
564 | | - // for __bucket, __offset, __timestamp |
565 | | - org.apache.paimon.types.DataTypes.INT(), |
566 | | - org.apache.paimon.types.DataTypes.BIGINT(), |
567 | | - org.apache.paimon.types.DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE() |
568 | | - }, |
569 | | - new String[] { |
570 | | - "c1", |
571 | | - "c2", |
572 | | - "c3", |
573 | | - BUCKET_COLUMN_NAME, |
574 | | - OFFSET_COLUMN_NAME, |
575 | | - TIMESTAMP_COLUMN_NAME |
576 | | - }), |
577 | | - "c1", |
578 | | - BUCKET_NUM); |
579 | | - } |
580 | | - |
581 | 434 | @Test |
582 | 435 | void testThrowExceptionWhenConflictWithSystemColumn() { |
583 | 436 | for (String systemColumn : |
|
0 commit comments