2828from ibis .common .dispatch import lazy_singledispatch
2929from ibis .expr .operations .udf import InputType
3030from ibis .formats .pyarrow import PyArrowSchema , PyArrowType
31- from ibis .util import deprecated , gen_name , normalize_filename , normalize_filenames
31+ from ibis .util import deprecated , normalize_filename , normalize_filenames
3232
3333try :
3434 from datafusion import ExecutionContext as SessionContext
@@ -160,7 +160,7 @@ def _safe_raw_sql(self, sql: sge.Statement) -> Any:
160160 yield self .raw_sql (sql ).collect ()
161161
162162 def _get_schema_using_query (self , query : str ) -> sch .Schema :
163- name = gen_name ("datafusion_metadata_view" )
163+ name = util . gen_name ("datafusion_metadata_view" )
164164 table = sg .table (name , quoted = self .compiler .quoted )
165165 src = sge .Create (
166166 this = table ,
@@ -437,11 +437,11 @@ def read_csv(
437437 The just-registered table
438438
439439 """
440- path = normalize_filenames (source_list )
441- table_name = table_name or gen_name ( "read_csv " )
440+ paths = normalize_filenames (source_list )
441+ table_name = table_name or util . gen_name_from_path ( paths [ 0 ], "csv " )
442442 # Our other backends support overwriting views / tables when re-registering
443443 self .con .deregister_table (table_name )
444- self .con .register_csv (table_name , path , ** kwargs )
444+ self .con .register_csv (table_name , paths , ** kwargs )
445445 return self .table (table_name )
446446
447447 def read_parquet (
@@ -466,7 +466,7 @@ def read_parquet(
466466
467467 """
468468 path = normalize_filename (path )
469- table_name = table_name or gen_name ( "read_parquet " )
469+ table_name = table_name or util . gen_name_from_path ( path , "parquet " )
470470 # Our other backends support overwriting views / tables when reregistering
471471 self .con .deregister_table (table_name )
472472 self .con .register_parquet (table_name , path , ** kwargs )
@@ -496,7 +496,7 @@ def read_delta(
496496 """
497497 source_table = normalize_filename (source_table )
498498
499- table_name = table_name or gen_name ( "read_delta " )
499+ table_name = table_name or util . gen_name_from_path ( source_table , "delta " )
500500
501501 # Our other backends support overwriting views / tables when reregistering
502502 self .con .deregister_table (table_name )
@@ -730,55 +730,55 @@ def _read_in_memory(
730730
731731@_read_in_memory .register (dict )
732732def _pydict (source , table_name , _conn , overwrite : bool = False ):
733- tmp_name = gen_name ("pydict" )
733+ tmp_name = util . gen_name ("pydict" )
734734 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
735735 _conn .con .from_pydict (source , name = tmp_name )
736736
737737
738738@_read_in_memory .register ("polars.DataFrame" )
739739def _polars (source , table_name , _conn , overwrite : bool = False ):
740- tmp_name = gen_name ("polars" )
740+ tmp_name = util . gen_name ("polars" )
741741 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
742742 _conn .con .from_polars (source , name = tmp_name )
743743
744744
745745@_read_in_memory .register ("polars.LazyFrame" )
746746def _polars (source , table_name , _conn , overwrite : bool = False ):
747- tmp_name = gen_name ("polars" )
747+ tmp_name = util . gen_name ("polars" )
748748 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
749749 _conn .con .from_polars (source .collect (), name = tmp_name )
750750
751751
752752@_read_in_memory .register ("pyarrow.Table" )
753753def _pyarrow_table (source , table_name , _conn , overwrite : bool = False ):
754- tmp_name = gen_name ("pyarrow" )
754+ tmp_name = util . gen_name ("pyarrow" )
755755 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
756756 _conn .con .from_arrow (source , name = tmp_name )
757757
758758
759759@_read_in_memory .register ("pyarrow.RecordBatchReader" )
760760def _pyarrow_rbr (source , table_name , _conn , overwrite : bool = False ):
761- tmp_name = gen_name ("pyarrow" )
761+ tmp_name = util . gen_name ("pyarrow" )
762762 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
763763 _conn .con .from_arrow (source .read_all (), name = tmp_name )
764764
765765
766766@_read_in_memory .register ("pyarrow.RecordBatch" )
767767def _pyarrow_rb (source , table_name , _conn , overwrite : bool = False ):
768- tmp_name = gen_name ("pyarrow" )
768+ tmp_name = util . gen_name ("pyarrow" )
769769 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
770770 _conn .con .register_record_batches (tmp_name , [[source ]])
771771
772772
773773@_read_in_memory .register ("pyarrow.dataset.Dataset" )
774774def _pyarrow_rb (source , table_name , _conn , overwrite : bool = False ):
775- tmp_name = gen_name ("pyarrow" )
775+ tmp_name = util . gen_name ("pyarrow" )
776776 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
777777 _conn .con .register_dataset (tmp_name , source )
778778
779779
780780@_read_in_memory .register ("pandas.DataFrame" )
781781def _pandas (source : pd .DataFrame , table_name , _conn , overwrite : bool = False ):
782- tmp_name = gen_name ("pandas" )
782+ tmp_name = util . gen_name ("pandas" )
783783 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
784784 _conn .con .from_pandas (source , name = tmp_name )
0 commit comments