@@ -268,6 +268,24 @@ public static SparkBigQueryConfig from(
268268 SparkSession spark ,
269269 Optional <StructType > schema ,
270270 boolean tableIsMandatory ) {
271+ return from (
272+ options ,
273+ customDefaults ,
274+ dataSourceVersion ,
275+ spark ,
276+ schema ,
277+ tableIsMandatory ,
278+ Optional .empty ());
279+ }
280+
281+ public static SparkBigQueryConfig from (
282+ Map <String , String > options ,
283+ ImmutableMap <String , String > customDefaults ,
284+ DataSourceVersion dataSourceVersion ,
285+ SparkSession spark ,
286+ Optional <StructType > schema ,
287+ boolean tableIsMandatory ,
288+ Optional <TableId > overrideTableId ) {
271289 Map <String , String > optionsMap = new HashMap <>(options );
272290 dataSourceVersion .updateOptionsMap (optionsMap );
273291 return SparkBigQueryConfig .from (
@@ -279,7 +297,8 @@ public static SparkBigQueryConfig from(
279297 spark .sqlContext ().conf (),
280298 spark .version (),
281299 schema ,
282- tableIsMandatory );
300+ tableIsMandatory ,
301+ overrideTableId );
283302 }
284303
285304 @ VisibleForTesting
@@ -293,6 +312,31 @@ public static SparkBigQueryConfig from(
293312 String sparkVersion ,
294313 Optional <StructType > schema ,
295314 boolean tableIsMandatory ) {
315+ return from (
316+ optionsInput ,
317+ originalGlobalOptions ,
318+ hadoopConfiguration ,
319+ customDefaults ,
320+ defaultParallelism ,
321+ sqlConf ,
322+ sparkVersion ,
323+ schema ,
324+ tableIsMandatory ,
325+ Optional .empty ());
326+ }
327+
328+ @ VisibleForTesting
329+ public static SparkBigQueryConfig from (
330+ Map <String , String > optionsInput ,
331+ ImmutableMap <String , String > originalGlobalOptions ,
332+ Configuration hadoopConfiguration ,
333+ ImmutableMap <String , String > customDefaults ,
334+ int defaultParallelism ,
335+ SQLConf sqlConf ,
336+ String sparkVersion ,
337+ Optional <StructType > schema ,
338+ boolean tableIsMandatory ,
339+ Optional <TableId > overrideTableId ) {
296340 SparkBigQueryConfig config = new SparkBigQueryConfig ();
297341
298342 ImmutableMap <String , String > options = toLowerCaseKeysMap (optionsInput );
@@ -314,9 +358,6 @@ public static SparkBigQueryConfig from(
314358 com .google .common .base .Optional .fromNullable (
315359 hadoopConfiguration .get (GCS_CONFIG_PROJECT_ID_PROPERTY ))
316360 .toJavaUtil ();
317- Optional <String > tableParam =
318- getOptionFromMultipleParams (options , ImmutableList .of ("table" , "path" ), DEFAULT_FALLBACK )
319- .toJavaUtil ();
320361 Optional <String > datasetParam = getOption (options , "dataset" ).or (fallbackDataset ).toJavaUtil ();
321362 Optional <String > projectParam =
322363 firstPresent (getOption (options , "project" ).toJavaUtil (), fallbackProject );
@@ -327,28 +368,36 @@ public static SparkBigQueryConfig from(
327368 config .partitionRangeEnd = getOption (options , "partitionRangeEnd" ).transform (Long ::parseLong );
328369 config .partitionRangeInterval =
329370 getOption (options , "partitionRangeInterval" ).transform (Long ::parseLong );
330- Optional <String > datePartitionParam = getOption (options , DATE_PARTITION_PARAM ).toJavaUtil ();
331- datePartitionParam .ifPresent (
332- date -> validateDateFormat (date , config .getPartitionTypeOrDefault (), DATE_PARTITION_PARAM ));
333- // checking for query
334- if (tableParam .isPresent ()) {
335- String tableParamStr = tableParam .get ().trim ();
336- if (isQuery (tableParamStr )) {
337- // it is a query in practice
338- config .query = com .google .common .base .Optional .of (tableParamStr );
339- config .tableId = parseTableId ("QUERY" , datasetParam , projectParam , datePartitionParam );
340- } else {
341- config .tableId =
342- parseTableId (tableParamStr , datasetParam , projectParam , datePartitionParam );
343- }
371+ if (overrideTableId .isPresent ()) {
372+ config .tableId = overrideTableId .get ();
344373 } else {
345- // no table has been provided, it is either a query or an error
346- config .query = getOption (options , "query" ).transform (String ::trim );
347- if (config .query .isPresent ()) {
348- config .tableId = parseTableId ("QUERY" , datasetParam , projectParam , datePartitionParam );
349- } else if (tableIsMandatory ) {
350- // No table nor query were set. We cannot go further.
351- throw new IllegalArgumentException ("No table has been specified" );
374+ // checking for query
375+ Optional <String > tableParam =
376+ getOptionFromMultipleParams (options , ImmutableList .of ("table" , "path" ), DEFAULT_FALLBACK )
377+ .toJavaUtil ();
378+ Optional <String > datePartitionParam = getOption (options , DATE_PARTITION_PARAM ).toJavaUtil ();
379+ datePartitionParam .ifPresent (
380+ date ->
381+ validateDateFormat (date , config .getPartitionTypeOrDefault (), DATE_PARTITION_PARAM ));
382+ if (tableParam .isPresent ()) {
383+ String tableParamStr = tableParam .get ().trim ();
384+ if (isQuery (tableParamStr )) {
385+ // it is a query in practice
386+ config .query = com .google .common .base .Optional .of (tableParamStr );
387+ config .tableId = parseTableId ("QUERY" , datasetParam , projectParam , datePartitionParam );
388+ } else {
389+ config .tableId =
390+ parseTableId (tableParamStr , datasetParam , projectParam , datePartitionParam );
391+ }
392+ } else {
393+ // no table has been provided, it is either a query or an error
394+ config .query = getOption (options , "query" ).transform (String ::trim );
395+ if (config .query .isPresent ()) {
396+ config .tableId = parseTableId ("QUERY" , datasetParam , projectParam , datePartitionParam );
397+ } else if (tableIsMandatory ) {
398+ // No table nor query were set. We cannot go further.
399+ throw new IllegalArgumentException ("No table has been specified" );
400+ }
352401 }
353402 }
354403
0 commit comments