1616from xarray .testing import assert_identical
1717
1818
19- def test_store_dask () -> None :
19+ def test_store_dask (any_spec_version : int | None ) -> None :
2020 shape = (100 , 100 )
2121 dask_chunks = (20 , 20 )
2222 dask_array = dask .array .random .random (shape , chunks = dask_chunks )
2323
2424 zarr_chunks = (10 , 10 )
2525 with tempfile .TemporaryDirectory () as tmpdir :
26- repo = Repository .create (local_filesystem_storage (tmpdir ))
26+ repo = Repository .create (
27+ local_filesystem_storage (tmpdir ),
28+ spec_version = any_spec_version ,
29+ )
2730 session = repo .writable_session ("main" )
2831 group = zarr .group (store = session .store , overwrite = True )
2932
@@ -52,27 +55,35 @@ def test_store_dask() -> None:
5255 store_dask (sources = [dask_array ], targets = [zarray ])
5356
5457
55- def test_distributed () -> None :
58+ def test_distributed (any_spec_version : int | None ) -> None :
5659 with distributed .Client (): # type: ignore [no-untyped-call]
5760 ds = create_test_data ().chunk (dim1 = 3 , dim2 = 4 )
58- with roundtrip (ds , commit = True ) as actual :
61+ with roundtrip (ds , commit = True , spec_version = any_spec_version ) as actual :
5962 assert_identical (actual , ds )
6063
6164
6265@pytest .mark .parametrize ("scheduler" , ["threads" , "processes" ])
63- def test_dask_schedulers (scheduler ) -> None :
66+ def test_dask_schedulers (scheduler , any_spec_version : int | None ) -> None :
6467 with dask .config .set (scheduler = scheduler ):
6568 ds = create_test_data ().chunk (dim1 = 3 , dim2 = 4 )
66- with roundtrip (ds , commit = scheduler == "processes" ) as actual :
69+ with roundtrip (
70+ ds ,
71+ commit = scheduler == "processes" ,
72+ spec_version = any_spec_version ,
73+ ) as actual :
6774 assert_identical (actual , ds )
6875
6976
7077@pytest .mark .parametrize ("scheduler" , ["threads" , "processes" ])
71- def test_xarray_to_icechunk_nested_pickling (scheduler ) -> None :
78+ def test_xarray_to_icechunk_nested_pickling (
79+ scheduler , any_spec_version : int | None
80+ ) -> None :
7281 with dask .config .set (scheduler = scheduler ):
7382 ds = create_test_data (dim_sizes = (2 , 3 , 4 )).chunk (- 1 )
7483 with tempfile .TemporaryDirectory () as tmpdir :
75- repo = Repository .create (local_filesystem_storage (tmpdir ))
84+ repo = Repository .create (
85+ local_filesystem_storage (tmpdir ), spec_version = any_spec_version
86+ )
7687 session = repo .writable_session ("main" )
7788
7889 to_icechunk (ds , session = session , mode = "w" )
@@ -93,11 +104,14 @@ def test_xarray_to_icechunk_nested_pickling(scheduler) -> None:
93104
94105
95106@pytest .mark .parametrize ("scheduler" , ["threads" , "processes" ])
96- def test_fork_session_deep_copies (scheduler ) -> None :
107+ def test_fork_session_deep_copies (scheduler , any_spec_version : int | None ) -> None :
97108 with dask .config .set (scheduler = scheduler ):
98109 ds = create_test_data (dim_sizes = (2 , 3 , 4 )).drop_encoding ().chunk (dim3 = 1 )
99110 with tempfile .TemporaryDirectory () as tmpdir :
100- repo = Repository .create (local_filesystem_storage (tmpdir ))
111+ repo = Repository .create (
112+ local_filesystem_storage (tmpdir ),
113+ spec_version = any_spec_version ,
114+ )
101115
102116 session = repo .writable_session ("main" )
103117 ds .to_zarr (session .store , mode = "w" , compute = False )
0 commit comments