Skip to content

Commit 6dfce0d

Browse files
reverting
1 parent 6a9640f commit 6dfce0d

File tree

1 file changed

+18
-22
lines changed

1 file changed

+18
-22
lines changed

spark-bigquery-connector-common/src/main/java/com/google/cloud/spark/bigquery/SparkBigQueryConfig.java

Lines changed: 18 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -86,9 +86,9 @@
8686

8787
public class SparkBigQueryConfig
8888
implements BigQueryConfig,
89-
BigQueryClient.CreateTableOptions,
90-
BigQueryClient.LoadDataOptions,
91-
Serializable {
89+
BigQueryClient.CreateTableOptions,
90+
BigQueryClient.LoadDataOptions,
91+
Serializable {
9292

9393
public static final int MAX_TRACE_ID_LENGTH = 256;
9494

@@ -171,8 +171,6 @@ public static WriteMethod from(@Nullable String writeMethod) {
171171
public static final String BIG_NUMERIC_DEFAULT_PRECISION = "bigNumericDefaultPrecision";
172172
public static final String BIG_NUMERIC_DEFAULT_SCALE = "bigNumericDefaultScale";
173173

174-
private static final String DATAPROC_SYSTEM_BUCKET_CONFIGURATION = "fs.gs.system.bucket";
175-
176174
TableId tableId;
177175
// as the config needs to be Serializable, internally it uses
178176
// com.google.common.base.Optional<String> but externally it uses the regular java.util.Optional
@@ -400,10 +398,7 @@ public static SparkBigQueryConfig from(
400398
.orNull();
401399
config.defaultParallelism = defaultParallelism;
402400
config.temporaryGcsBucket =
403-
stripPrefix(getAnyOption(globalOptions, options, "temporaryGcsBucket"))
404-
.or(
405-
com.google.common.base.Optional.fromNullable(
406-
hadoopConfiguration.get(DATAPROC_SYSTEM_BUCKET_CONFIGURATION)));
401+
stripPrefix(getAnyOption(globalOptions, options, "temporaryGcsBucket"));
407402
config.persistentGcsBucket =
408403
stripPrefix(getAnyOption(globalOptions, options, "persistentGcsBucket"));
409404
config.persistentGcsPath = getOption(options, "persistentGcsPath");
@@ -610,6 +605,7 @@ public static SparkBigQueryConfig from(
610605

611606
config.partitionOverwriteModeValue =
612607
getAnyOption(globalOptions, options, partitionOverwriteModeProperty)
608+
.transform(String::toUpperCase)
613609
.transform(PartitionOverwriteMode::valueOf)
614610
.or(PartitionOverwriteMode.STATIC);
615611

@@ -723,19 +719,19 @@ static ImmutableMap<String, String> normalizeConf(Map<String, String> conf) {
723719
public Credentials createCredentials() {
724720

725721
return new BigQueryCredentialsSupplier(
726-
accessTokenProviderFQCN.toJavaUtil(),
727-
accessTokenProviderConfig.toJavaUtil(),
728-
accessToken.toJavaUtil(),
729-
credentialsKey.toJavaUtil(),
730-
credentialsFile.toJavaUtil(),
731-
loggedInUserName,
732-
loggedInUserGroups,
733-
impersonationServiceAccountsForUsers.toJavaUtil(),
734-
impersonationServiceAccountsForGroups.toJavaUtil(),
735-
impersonationServiceAccount.toJavaUtil(),
736-
sparkBigQueryProxyAndHttpConfig.getProxyUri(),
737-
sparkBigQueryProxyAndHttpConfig.getProxyUsername(),
738-
sparkBigQueryProxyAndHttpConfig.getProxyPassword())
722+
accessTokenProviderFQCN.toJavaUtil(),
723+
accessTokenProviderConfig.toJavaUtil(),
724+
accessToken.toJavaUtil(),
725+
credentialsKey.toJavaUtil(),
726+
credentialsFile.toJavaUtil(),
727+
loggedInUserName,
728+
loggedInUserGroups,
729+
impersonationServiceAccountsForUsers.toJavaUtil(),
730+
impersonationServiceAccountsForGroups.toJavaUtil(),
731+
impersonationServiceAccount.toJavaUtil(),
732+
sparkBigQueryProxyAndHttpConfig.getProxyUri(),
733+
sparkBigQueryProxyAndHttpConfig.getProxyUsername(),
734+
sparkBigQueryProxyAndHttpConfig.getProxyPassword())
739735
.getCredentials();
740736
}
741737

0 commit comments

Comments
 (0)