@@ -456,7 +456,19 @@ def copy_from_gcs(
456456 Other arguments to pass to the underlying load_table_from_uri
457457 call on the BigQuery client.
458458 """
459- self ._validate_copy_inputs (if_exists = if_exists , data_type = data_type )
459+ self ._validate_copy_inputs (
460+ if_exists = if_exists ,
461+ data_type = data_type ,
462+ accepted_data_types = [
463+ "csv" ,
464+ "json" ,
465+ "parquet" ,
466+ "datastore_backup" ,
467+ "newline_delimited_json" ,
468+ "avro" ,
469+ "orc" ,
470+ ],
471+ )
460472
461473 job_config = self ._process_job_config (
462474 job_config = job_config ,
@@ -1565,25 +1577,13 @@ def _fetch_query_results(self, cursor) -> Table:
15651577 ptable = petl .frompickle (temp_filename )
15661578 return Table (ptable )
15671579
1568- def _validate_copy_inputs (
1569- self ,
1570- if_exists : str ,
1571- data_type : str ,
1572- accepted_data_types : list [str ] = [
1573- "csv" ,
1574- "json" ,
1575- "parquet" ,
1576- "datastore_backup" ,
1577- "newline_delimited_json" ,
1578- "avro" ,
1579- "orc" ,
1580- ],
1581- ):
1580+ def _validate_copy_inputs (self , if_exists : str , data_type : str , accepted_data_types : list [str ]):
15821581 if if_exists not in ["fail" , "truncate" , "append" , "drop" ]:
15831582 raise ValueError (
15841583 f"Unexpected value for if_exists: { if_exists } , must be one of "
15851584 '"append", "drop", "truncate", or "fail"'
15861585 )
1586+
15871587 if data_type not in accepted_data_types :
15881588 raise ValueError (f"Only supports { accepted_data_types } files [data_type = { data_type } ]" )
15891589
0 commit comments