@@ -293,9 +293,42 @@ def test_timestamp_extract_week_of_year(backend, alltypes, df):
293
293
@pytest .mark .parametrize (
294
294
("ibis_unit" , "pandas_unit" ),
295
295
[
296
- param ("Y" , "Y" , id = "year" ),
297
- param ("Q" , "Q" , id = "quarter" ),
298
- param ("M" , "M" , id = "month" ),
296
+ param (
297
+ "Y" ,
298
+ "Y" ,
299
+ id = "year" ,
300
+ marks = [
301
+ pytest .mark .xfail_version (
302
+ pyspark = ["pyspark<3.4" ],
303
+ reason = "no support for timezoneless timestamps" ,
304
+ raises = UserWarning ,
305
+ ),
306
+ ],
307
+ ),
308
+ param (
309
+ "Q" ,
310
+ "Q" ,
311
+ id = "quarter" ,
312
+ marks = [
313
+ pytest .mark .xfail_version (
314
+ pyspark = ["pyspark<3.4" ],
315
+ reason = "no support for timezoneless timestamps" ,
316
+ raises = UserWarning ,
317
+ ),
318
+ ],
319
+ ),
320
+ param (
321
+ "M" ,
322
+ "M" ,
323
+ id = "month" ,
324
+ marks = [
325
+ pytest .mark .xfail_version (
326
+ pyspark = ["pyspark<3.4" ],
327
+ reason = "no support for timezoneless timestamps" ,
328
+ raises = UserWarning ,
329
+ ),
330
+ ],
331
+ ),
299
332
param (
300
333
"W" ,
301
334
"W" ,
@@ -306,9 +339,24 @@ def test_timestamp_extract_week_of_year(backend, alltypes, df):
306
339
raises = AssertionError ,
307
340
reason = "implemented, but doesn't match other backends" ,
308
341
),
342
+ pytest .mark .xfail_version (
343
+ pyspark = ["pyspark<3.4" ],
344
+ reason = "no support for timezoneless timestamps" ,
345
+ raises = UserWarning ,
346
+ ),
347
+ ],
348
+ ),
349
+ param (
350
+ "D" ,
351
+ "D" ,
352
+ marks = [
353
+ pytest .mark .xfail_version (
354
+ pyspark = ["pyspark<3.4" ],
355
+ reason = "no support for timezoneless timestamps" ,
356
+ raises = UserWarning ,
357
+ ),
309
358
],
310
359
),
311
- param ("D" , "D" ),
312
360
param (
313
361
"h" ,
314
362
"h" ,
@@ -432,6 +480,11 @@ def test_timestamp_truncate(backend, alltypes, df, ibis_unit, pandas_unit):
432
480
),
433
481
],
434
482
)
483
+ @pytest .mark .xfail_version (
484
+ pyspark = ["pyspark<3.4" ],
485
+ reason = "no support for timezoneless timestamps" ,
486
+ raises = UserWarning ,
487
+ )
435
488
@pytest .mark .notimpl (["druid" ], raises = com .OperationNotDefinedError )
436
489
def test_date_truncate (backend , alltypes , df , unit ):
437
490
expr = alltypes .timestamp_col .date ().truncate (unit ).name ("tmp" )
@@ -501,10 +554,31 @@ def test_date_truncate(backend, alltypes, df, unit):
501
554
reason = "Bind error: Invalid unit: week" ,
502
555
),
503
556
sqlite_without_ymd_intervals ,
557
+ pytest .mark .xfail_version (
558
+ pyspark = ["pyspark<3.4" ], raises = AssertionError
559
+ ),
560
+ ],
561
+ ),
562
+ param (
563
+ "D" ,
564
+ pd .offsets .DateOffset ,
565
+ marks = [
566
+ sqlite_without_ymd_intervals ,
567
+ pytest .mark .xfail_version (
568
+ pyspark = ["pyspark<3.4" ], raises = AssertionError
569
+ ),
570
+ ],
571
+ ),
572
+ param (
573
+ "h" ,
574
+ pd .Timedelta ,
575
+ marks = [
576
+ sqlite_without_hms_intervals ,
577
+ pytest .mark .xfail_version (
578
+ pyspark = ["pyspark<3.4" ], raises = AssertionError
579
+ ),
504
580
],
505
581
),
506
- param ("D" , pd .offsets .DateOffset , marks = sqlite_without_ymd_intervals ),
507
- param ("h" , pd .Timedelta , marks = sqlite_without_hms_intervals ),
508
582
param ("m" , pd .Timedelta , marks = sqlite_without_hms_intervals ),
509
583
param ("s" , pd .Timedelta , marks = sqlite_without_hms_intervals ),
510
584
param (
@@ -770,6 +844,10 @@ def convert_to_offset(x):
770
844
raises = PySparkConnectGrpcException ,
771
845
reason = "arrow conversion breaks" ,
772
846
),
847
+ pytest .mark .xfail_version (
848
+ pyspark = ["pyspark<3.4" ],
849
+ reason = "no support for timezoneless timestamps" ,
850
+ ),
773
851
pytest .mark .notyet (
774
852
["databricks" ],
775
853
raises = AssertionError ,
@@ -830,6 +908,10 @@ def convert_to_offset(x):
830
908
raises = PySparkConnectGrpcException ,
831
909
reason = "arrow conversion breaks" ,
832
910
),
911
+ pytest .mark .xfail_version (
912
+ pyspark = ["pyspark<3.4" ],
913
+ reason = "no support for timezoneless timestamps" ,
914
+ ),
833
915
pytest .mark .notyet (
834
916
["databricks" ],
835
917
raises = AssertionError ,
@@ -876,6 +958,9 @@ def test_temporal_binop(backend, con, alltypes, df, expr_fn, expected_fn):
876
958
reason = "TableException: DAY_INTERVAL_TYPES precision is not supported: 5" ,
877
959
),
878
960
sqlite_without_ymd_intervals ,
961
+ pytest .mark .xfail_version (
962
+ pyspark = ["pyspark<3.4" ], raises = AssertionError
963
+ ),
879
964
],
880
965
),
881
966
param ("5W" , plus , id = "weeks-plus" , marks = sqlite_without_ymd_intervals ),
@@ -940,7 +1025,8 @@ def test_timestamp_comparison_filter(backend, con, alltypes, df, func_name):
940
1025
comparison_fn (alltypes .timestamp_col .cast ("timestamp('UTC')" ), ts )
941
1026
)
942
1027
943
- col = df .timestamp_col .dt .tz_localize ("UTC" )
1028
+ if getattr ((col := df .timestamp_col ).dtype , "tz" , None ) is None :
1029
+ col = df .timestamp_col .dt .tz_localize ("UTC" )
944
1030
expected = df [comparison_fn (col , ts )]
945
1031
result = con .execute (expr )
946
1032
@@ -973,7 +1059,8 @@ def test_timestamp_comparison_filter_numpy(backend, con, alltypes, df, func_name
973
1059
974
1060
ts = pd .Timestamp (ts .item (), tz = "UTC" )
975
1061
976
- col = df .timestamp_col .dt .tz_localize ("UTC" )
1062
+ if getattr ((col := df .timestamp_col ).dtype , "tz" , None ) is None :
1063
+ col = df .timestamp_col .dt .tz_localize ("UTC" )
977
1064
expected = df [comparison_fn (col , ts )]
978
1065
result = con .execute (expr )
979
1066
0 commit comments