@@ -431,7 +431,7 @@ async def run_migrations_with_monitoring(
431431 dry_run : bool = False ,
432432 limit : Optional [int ] = None ,
433433 verbose : bool = False ,
434- progress_callback : Optional [Callable ] = None ,
434+ progress_callback : Optional [Callable ] = None # type: ignore ,
435435 ) -> Dict [str , Any ]:
436436 """
437437 Run pending migrations with enhanced performance monitoring.
@@ -551,10 +551,10 @@ async def run_migrations_with_monitoring(
551551 stats = result ["performance_stats" ]
552552 if stats :
553553 print (f"Total time: { stats .get ('total_time_seconds' , 0 ):.2f} s" )
554- if "items_per_second" in stats :
555- print (f"Performance: { stats ['items_per_second' ]:.1f} items/second" )
556- if "peak_memory_mb" in stats :
557- print (f"Peak memory: { stats ['peak_memory_mb' ]:.1f} MB" )
554+ if "items_per_second" in stats : # type: ignore
555+ print (f"Performance: { stats ['items_per_second' ]:.1f} items/second" ) # type: ignore
556+ if "peak_memory_mb" in stats : # type: ignore
557+ print (f"Peak memory: { stats ['peak_memory_mb' ]:.1f} MB" ) # type: ignore
558558
559559 return result
560560
@@ -615,7 +615,7 @@ async def verify_data_integrity(self, verbose: bool = False) -> Dict[str, Any]:
615615 else :
616616 scan_iter = self .redis .scan_iter (match = key_pattern , _type = "HASH" )
617617
618- async for _ in scan_iter : # type: ignore[misc]
618+ async for _ in scan_iter : # type: ignore[misc,union-attr ]
619619 checked_keys += 1
620620
621621 except Exception as e :
@@ -644,7 +644,7 @@ async def _verify_model_data(
644644 else :
645645 scan_iter = self .redis .scan_iter (match = key_pattern , _type = "HASH" )
646646
647- async for key in scan_iter : # type: ignore[misc]
647+ async for key in scan_iter : # type: ignore[misc,union-attr ]
648648 if isinstance (key , bytes ):
649649 key = key .decode ("utf-8" )
650650
@@ -770,8 +770,8 @@ async def get_migration_statistics(self) -> Dict[str, Any]:
770770 datetime_fields .append (field_name )
771771
772772 if datetime_fields :
773- stats ["models_with_datetime_fields" ] += 1
774- stats ["total_datetime_fields" ] += len (datetime_fields )
773+ stats ["models_with_datetime_fields" ] += 1 # type: ignore
774+ stats ["total_datetime_fields" ] += len (datetime_fields ) # type: ignore
775775
776776 # Count keys for this model
777777 key_pattern = model_class .make_key ("*" )
@@ -790,12 +790,12 @@ async def get_migration_statistics(self) -> Dict[str, Any]:
790790 match = key_pattern , _type = "HASH"
791791 )
792792
793- async for _ in scan_iter : # type: ignore[misc]
793+ async for _ in scan_iter : # type: ignore[misc,union-attr ]
794794 key_count += 1
795795
796- stats ["estimated_keys_to_migrate" ] += key_count
796+ stats ["estimated_keys_to_migrate" ] += key_count # type: ignore
797797
798- stats ["model_details" ].append (
798+ stats ["model_details" ].append ( # type: ignore
799799 {
800800 "model_name" : model_name ,
801801 "model_type" : "JsonModel" if is_json_model else "HashModel" ,
0 commit comments