19
19
from arcticdb_ext .exceptions import InternalException , SortingException , UserInputException
20
20
from arcticdb_ext .storage import NoDataFoundException
21
21
from arcticdb_ext .version_store import SortedValue
22
- from arcticdb .exceptions import ArcticDbNotYetImplemented , LibraryNotFound , MismatchingLibraryOptions , StreamDescriptorMismatch
22
+ from arcticdb .exceptions import ArcticDbNotYetImplemented , LibraryNotFound , MismatchingLibraryOptions , StreamDescriptorMismatch , SchemaException
23
23
from arcticdb .adapters .mongo_library_adapter import MongoLibraryAdapter
24
24
from arcticdb .arctic import Arctic
25
25
from arcticdb .options import LibraryOptions , EnterpriseLibraryOptions
@@ -309,8 +309,8 @@ def test_staged_segment_has_empty_df(self, arctic_library, finalize_method):
309
309
lib .write ("sym" , df1 , staged = True )
310
310
lib .write ("sym" , df2 , staged = True )
311
311
lib .write ("sym" , df3 , staged = True )
312
- lib . finalize_staged_data ( "sym" , mode = finalize_method )
313
- assert_frame_equal ( lib .read ("sym" ). data , pd . concat ([ df1 , df2 , df3 ]) )
312
+ with pytest . raises ( SchemaException ):
313
+ lib .finalize_staged_data ("sym" , mode = finalize_method )
314
314
315
315
def test_df_without_rows (self , arctic_library , finalize_method ):
316
316
lib = arctic_library
@@ -334,9 +334,8 @@ def test_append_throws_with_missmatched_column_set(self, arctic_library):
334
334
335
335
appended_df = pd .DataFrame ({"col_1" : [1 ]}, index = pd .DatetimeIndex ([pd .Timestamp (2024 , 1 , 2 )]))
336
336
lib .write ("sym" , appended_df , staged = True )
337
- with pytest .raises (StreamDescriptorMismatch ) as exception_info :
337
+ with pytest .raises (SchemaException ) as exception_info :
338
338
lib .finalize_staged_data ("sym" , mode = StagedDataFinalizeMethod .APPEND )
339
- assert "APPEND" in str (exception_info .value )
340
339
assert "col_1" in str (exception_info .value )
341
340
342
341
def test_append_throws_column_subset (self , arctic_library ):
@@ -352,9 +351,8 @@ def test_append_throws_column_subset(self, arctic_library):
352
351
lib .write ("sym" , df1 )
353
352
df2 = pd .DataFrame ({"b" : [1 ]}, index = pd .DatetimeIndex ([pd .Timestamp ("2024-01-02" )]))
354
353
lib .write ("sym" , df2 , staged = True )
355
- with pytest .raises (StreamDescriptorMismatch ) as exception_info :
354
+ with pytest .raises (SchemaException ) as exception_info :
356
355
lib .finalize_staged_data ("sym" , StagedDataFinalizeMethod .APPEND )
357
- assert "APPEND" in str (exception_info .value )
358
356
assert "a" in str (exception_info .value )
359
357
assert "b" in str (exception_info .value )
360
358
@@ -366,9 +364,8 @@ def test_append_throws_on_incompatible_dtype(self, arctic_library):
366
364
367
365
appended_df = pd .DataFrame ({"col_0" : ["asd" ]}, index = pd .DatetimeIndex ([pd .Timestamp (2024 , 1 , 2 )]))
368
366
lib .write ("sym" , appended_df , staged = True )
369
- with pytest .raises (StreamDescriptorMismatch ) as exception_info :
367
+ with pytest .raises (SchemaException ) as exception_info :
370
368
lib .finalize_staged_data ("sym" , mode = StagedDataFinalizeMethod .APPEND )
371
- assert "APPEND" in str (exception_info .value )
372
369
assert "col_0" in str (exception_info .value )
373
370
assert "INT64" in str (exception_info .value )
374
371
@@ -385,9 +382,8 @@ def test_types_cant_be_promoted(self, arctic_library):
385
382
lib .write ("sym" , pd .DataFrame ({"col" : np .array ([1 ], dtype = "int64" )}, index = pd .DatetimeIndex ([pd .Timestamp (2024 , 1 , 1 )])))
386
383
387
384
lib .write ("sym" , pd .DataFrame ({"col" : np .array ([1 ], dtype = "int32" )}, index = pd .DatetimeIndex ([pd .Timestamp (2024 , 1 , 2 )])), staged = True )
388
- with pytest .raises (StreamDescriptorMismatch ) as exception_info :
385
+ with pytest .raises (SchemaException ) as exception_info :
389
386
lib .finalize_staged_data ("sym" , mode = StagedDataFinalizeMethod .APPEND )
390
- assert "APPEND" in str (exception_info .value )
391
387
assert "INT32" in str (exception_info .value )
392
388
assert "INT64" in str (exception_info .value )
393
389
@@ -397,9 +393,8 @@ def test_appending_reordered_column_set_throws(self, arctic_library):
397
393
lib .write ("sym" , pd .DataFrame ({"col_0" : [1 ], "col_1" : ["test" ], "col_2" : [1.2 ]}, index = pd .DatetimeIndex ([pd .Timestamp (2024 , 1 , 1 )])))
398
394
399
395
lib .write ("sym" , pd .DataFrame ({"col_1" : ["asd" ], "col_2" : [2.5 ], "col_1" : [2 ]}, index = pd .DatetimeIndex ([pd .Timestamp (2024 , 1 , 2 )])), staged = True )
400
- with pytest .raises (StreamDescriptorMismatch ) as exception_info :
396
+ with pytest .raises (SchemaException ) as exception_info :
401
397
lib .finalize_staged_data ("sym" , mode = StagedDataFinalizeMethod .APPEND )
402
- assert "APPEND" in str (exception_info .value )
403
398
assert "col_0" in str (exception_info .value )
404
399
assert "col_1" in str (exception_info .value )
405
400
assert "col_2" in str (exception_info .value )
@@ -425,8 +420,8 @@ def test_appended_df_interleaves_with_storage(self, arctic_library):
425
420
lib .finalize_staged_data ("sym" , mode = StagedDataFinalizeMethod .APPEND )
426
421
assert "append" in str (exception_info .value )
427
422
428
- def test_appended_df_start_same_as_df_end (self , lmdb_library ):
429
- lib = lmdb_library
423
+ def test_appended_df_start_same_as_df_end (self , arctic_library ):
424
+ lib = arctic_library
430
425
df = pd .DataFrame (
431
426
{"col" : [1 , 2 , 3 ]},
432
427
index = pd .DatetimeIndex ([np .datetime64 ('2023-01-01' ), np .datetime64 ('2023-01-02' ), np .datetime64 ('2023-01-03' )], dtype = "datetime64[ns]" )
0 commit comments