|
86 | 86 |
|
87 | 87 | public class SparkBigQueryConfig |
88 | 88 | implements BigQueryConfig, |
89 | | - BigQueryClient.CreateTableOptions, |
90 | | - BigQueryClient.LoadDataOptions, |
91 | | - Serializable { |
| 89 | + BigQueryClient.CreateTableOptions, |
| 90 | + BigQueryClient.LoadDataOptions, |
| 91 | + Serializable { |
92 | 92 |
|
93 | 93 | public static final int MAX_TRACE_ID_LENGTH = 256; |
94 | 94 |
|
@@ -171,8 +171,6 @@ public static WriteMethod from(@Nullable String writeMethod) { |
171 | 171 | public static final String BIG_NUMERIC_DEFAULT_PRECISION = "bigNumericDefaultPrecision"; |
172 | 172 | public static final String BIG_NUMERIC_DEFAULT_SCALE = "bigNumericDefaultScale"; |
173 | 173 |
|
174 | | - private static final String DATAPROC_SYSTEM_BUCKET_CONFIGURATION = "fs.gs.system.bucket"; |
175 | | - |
176 | 174 | TableId tableId; |
177 | 175 | // as the config needs to be Serializable, internally it uses |
178 | 176 | // com.google.common.base.Optional<String> but externally it uses the regular java.util.Optional |
@@ -400,10 +398,7 @@ public static SparkBigQueryConfig from( |
400 | 398 | .orNull(); |
401 | 399 | config.defaultParallelism = defaultParallelism; |
402 | 400 | config.temporaryGcsBucket = |
403 | | - stripPrefix(getAnyOption(globalOptions, options, "temporaryGcsBucket")) |
404 | | - .or( |
405 | | - com.google.common.base.Optional.fromNullable( |
406 | | - hadoopConfiguration.get(DATAPROC_SYSTEM_BUCKET_CONFIGURATION))); |
| 401 | + stripPrefix(getAnyOption(globalOptions, options, "temporaryGcsBucket")); |
407 | 402 | config.persistentGcsBucket = |
408 | 403 | stripPrefix(getAnyOption(globalOptions, options, "persistentGcsBucket")); |
409 | 404 | config.persistentGcsPath = getOption(options, "persistentGcsPath"); |
@@ -610,6 +605,7 @@ public static SparkBigQueryConfig from( |
610 | 605 |
|
611 | 606 | config.partitionOverwriteModeValue = |
612 | 607 | getAnyOption(globalOptions, options, partitionOverwriteModeProperty) |
| 608 | + .transform(String::toUpperCase) |
613 | 609 | .transform(PartitionOverwriteMode::valueOf) |
614 | 610 | .or(PartitionOverwriteMode.STATIC); |
615 | 611 |
|
@@ -723,19 +719,19 @@ static ImmutableMap<String, String> normalizeConf(Map<String, String> conf) { |
723 | 719 | public Credentials createCredentials() { |
724 | 720 |
|
725 | 721 | return new BigQueryCredentialsSupplier( |
726 | | - accessTokenProviderFQCN.toJavaUtil(), |
727 | | - accessTokenProviderConfig.toJavaUtil(), |
728 | | - accessToken.toJavaUtil(), |
729 | | - credentialsKey.toJavaUtil(), |
730 | | - credentialsFile.toJavaUtil(), |
731 | | - loggedInUserName, |
732 | | - loggedInUserGroups, |
733 | | - impersonationServiceAccountsForUsers.toJavaUtil(), |
734 | | - impersonationServiceAccountsForGroups.toJavaUtil(), |
735 | | - impersonationServiceAccount.toJavaUtil(), |
736 | | - sparkBigQueryProxyAndHttpConfig.getProxyUri(), |
737 | | - sparkBigQueryProxyAndHttpConfig.getProxyUsername(), |
738 | | - sparkBigQueryProxyAndHttpConfig.getProxyPassword()) |
| 722 | + accessTokenProviderFQCN.toJavaUtil(), |
| 723 | + accessTokenProviderConfig.toJavaUtil(), |
| 724 | + accessToken.toJavaUtil(), |
| 725 | + credentialsKey.toJavaUtil(), |
| 726 | + credentialsFile.toJavaUtil(), |
| 727 | + loggedInUserName, |
| 728 | + loggedInUserGroups, |
| 729 | + impersonationServiceAccountsForUsers.toJavaUtil(), |
| 730 | + impersonationServiceAccountsForGroups.toJavaUtil(), |
| 731 | + impersonationServiceAccount.toJavaUtil(), |
| 732 | + sparkBigQueryProxyAndHttpConfig.getProxyUri(), |
| 733 | + sparkBigQueryProxyAndHttpConfig.getProxyUsername(), |
| 734 | + sparkBigQueryProxyAndHttpConfig.getProxyPassword()) |
739 | 735 | .getCredentials(); |
740 | 736 | } |
741 | 737 |
|
|
0 commit comments