@@ -456,7 +456,7 @@ def copy_from_gcs(
456456 Other arguments to pass to the underlying load_table_from_uri
457457 call on the BigQuery client.
458458 """
459- self ._validate_copy_inputs (if_exists = if_exists , data_type = data_type )
459+ self ._validate_copy_inputs (if_exists = if_exists , data_type = data_type , override_data_type_check = True )
460460
461461 job_config = self ._process_job_config (
462462 job_config = job_config ,
@@ -1559,12 +1559,14 @@ def _fetch_query_results(self, cursor) -> Table:
15591559 ptable = petl .frompickle (temp_filename )
15601560 return Table (ptable )
15611561
1562- def _validate_copy_inputs (self , if_exists : str ):
1562+ def _validate_copy_inputs (self , if_exists : str , data_type : str , override_data_type_check : bool = False ):
15631563 if if_exists not in ["fail" , "truncate" , "append" , "drop" ]:
15641564 raise ValueError (
15651565 f"Unexpected value for if_exists: { if_exists } , must be one of "
15661566 '"append", "drop", "truncate", or "fail"'
15671567 )
1568+ if data_type not in ["csv" , "json" ] and not override_data_type_check :
1569+ raise ValueError (f"Only supports csv or json files [data_type = { data_type } ]" )
15681570
15691571 def _load_table_from_uri (
15701572 self , source_uris , destination , job_config , max_timeout , ** load_kwargs
0 commit comments