2828from ibis .common .dispatch import lazy_singledispatch
2929from ibis .expr .operations .udf import InputType
3030from ibis .formats .pyarrow import PyArrowSchema , PyArrowType
31- from ibis .util import deprecated , gen_name , normalize_filename , normalize_filenames
3231
3332try :
3433 from datafusion import ExecutionContext as SessionContext
@@ -160,7 +159,7 @@ def _safe_raw_sql(self, sql: sge.Statement) -> Any:
160159 yield self .raw_sql (sql ).collect ()
161160
162161 def _get_schema_using_query (self , query : str ) -> sch .Schema :
163- name = gen_name ("datafusion_metadata_view" )
162+ name = util . gen_name ("datafusion_metadata_view" )
164163 table = sg .table (name , quoted = self .compiler .quoted )
165164 src = sge .Create (
166165 this = table ,
@@ -345,7 +344,7 @@ def get_schema(
345344 table = database .table (table_name )
346345 return sch .schema (table .schema )
347346
348- @deprecated (
347+ @util . deprecated (
349348 as_of = "9.1" ,
350349 instead = "use the explicit `read_*` method for the filetype you are trying to read, e.g., read_parquet, read_csv, etc." ,
351350 )
@@ -437,11 +436,11 @@ def read_csv(
437436 The just-registered table
438437
439438 """
440- path = normalize_filenames (source_list )
441- table_name = table_name or gen_name ( "read_csv " )
439+ paths = util . normalize_filenames (source_list )
440+ table_name = table_name or util . gen_name_from_path ( paths [ 0 ], "csv " )
442441 # Our other backends support overwriting views / tables when re-registering
443442 self .con .deregister_table (table_name )
444- self .con .register_csv (table_name , path , ** kwargs )
443+ self .con .register_csv (table_name , paths , ** kwargs )
445444 return self .table (table_name )
446445
447446 def read_parquet (
@@ -465,8 +464,8 @@ def read_parquet(
465464 The just-registered table
466465
467466 """
468- path = normalize_filename (path )
469- table_name = table_name or gen_name ( "read_parquet " )
467+ path = util . normalize_filename (path )
468+ table_name = table_name or util . gen_name_from_path ( path , "parquet " )
470469 # Our other backends support overwriting views / tables when reregistering
471470 self .con .deregister_table (table_name )
472471 self .con .register_parquet (table_name , path , ** kwargs )
@@ -494,9 +493,9 @@ def read_delta(
494493 The just-registered table
495494
496495 """
497- source_table = normalize_filename (source_table )
496+ source_table = util . normalize_filename (source_table )
498497
499- table_name = table_name or gen_name ( "read_delta " )
498+ table_name = table_name or util . gen_name_from_path ( source_table , "delta " )
500499
501500 # Our other backends support overwriting views / tables when reregistering
502501 self .con .deregister_table (table_name )
@@ -730,55 +729,55 @@ def _read_in_memory(
730729
731730@_read_in_memory .register (dict )
732731def _pydict (source , table_name , _conn , overwrite : bool = False ):
733- tmp_name = gen_name ("pydict" )
732+ tmp_name = util . gen_name ("pydict" )
734733 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
735734 _conn .con .from_pydict (source , name = tmp_name )
736735
737736
738737@_read_in_memory .register ("polars.DataFrame" )
739738def _polars (source , table_name , _conn , overwrite : bool = False ):
740- tmp_name = gen_name ("polars" )
739+ tmp_name = util . gen_name ("polars" )
741740 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
742741 _conn .con .from_polars (source , name = tmp_name )
743742
744743
745744@_read_in_memory .register ("polars.LazyFrame" )
746745def _polars (source , table_name , _conn , overwrite : bool = False ):
747- tmp_name = gen_name ("polars" )
746+ tmp_name = util . gen_name ("polars" )
748747 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
749748 _conn .con .from_polars (source .collect (), name = tmp_name )
750749
751750
752751@_read_in_memory .register ("pyarrow.Table" )
753752def _pyarrow_table (source , table_name , _conn , overwrite : bool = False ):
754- tmp_name = gen_name ("pyarrow" )
753+ tmp_name = util . gen_name ("pyarrow" )
755754 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
756755 _conn .con .from_arrow (source , name = tmp_name )
757756
758757
759758@_read_in_memory .register ("pyarrow.RecordBatchReader" )
760759def _pyarrow_rbr (source , table_name , _conn , overwrite : bool = False ):
761- tmp_name = gen_name ("pyarrow" )
760+ tmp_name = util . gen_name ("pyarrow" )
762761 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
763762 _conn .con .from_arrow (source .read_all (), name = tmp_name )
764763
765764
766765@_read_in_memory .register ("pyarrow.RecordBatch" )
767766def _pyarrow_rb (source , table_name , _conn , overwrite : bool = False ):
768- tmp_name = gen_name ("pyarrow" )
767+ tmp_name = util . gen_name ("pyarrow" )
769768 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
770769 _conn .con .register_record_batches (tmp_name , [[source ]])
771770
772771
773772@_read_in_memory .register ("pyarrow.dataset.Dataset" )
774773def _pyarrow_rb (source , table_name , _conn , overwrite : bool = False ):
775- tmp_name = gen_name ("pyarrow" )
774+ tmp_name = util . gen_name ("pyarrow" )
776775 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
777776 _conn .con .register_dataset (tmp_name , source )
778777
779778
780779@_read_in_memory .register ("pandas.DataFrame" )
781780def _pandas (source : pd .DataFrame , table_name , _conn , overwrite : bool = False ):
782- tmp_name = gen_name ("pandas" )
781+ tmp_name = util . gen_name ("pandas" )
783782 with _create_and_drop_memtable (_conn , table_name , tmp_name , overwrite ):
784783 _conn .con .from_pandas (source , name = tmp_name )
0 commit comments