@@ -170,7 +170,7 @@ impl ValidationWorker {
170170 } ;
171171 let key = OutPointKey :: from ( & outpoint) ;
172172 let serialized = borsh:: to_vec ( output)
173- . map_err ( |e| crate :: state:: Error :: BorshSerialize ( e ) ) ?;
173+ . map_err ( crate :: state:: Error :: BorshSerialize ) ?;
174174 Ok :: < ( OutPointKey , Vec < u8 > ) , crate :: state:: Error > ( (
175175 key, serialized,
176176 ) )
@@ -283,7 +283,7 @@ impl ValidationWorker {
283283 } ;
284284
285285 // Send result to Stage B coordinator
286- if let Err ( _ ) = self . result_sender . send ( validation_result) {
286+ if self . result_sender . send ( validation_result) . is_err ( ) {
287287 tracing:: warn!(
288288 "Worker {} failed to send result - coordinator may have shut down" ,
289289 self . worker_id
@@ -512,7 +512,7 @@ impl WriterCoordinator {
512512 // Deserialize the SpentOutput to use with the typed database
513513 let spent_output: SpentOutput =
514514 borsh:: from_slice ( serialized_spent_output)
515- . map_err ( |e| Error :: BorshDeserialize ( e ) ) ?;
515+ . map_err ( Error :: BorshDeserialize ) ?;
516516 self . state . stxos . put ( rwtxn, key, & spent_output) ?;
517517 }
518518
@@ -521,7 +521,7 @@ impl WriterCoordinator {
521521 // Deserialize the Output to use with the typed database
522522 let output: crate :: types:: Output =
523523 borsh:: from_slice ( serialized_output)
524- . map_err ( |e| Error :: BorshDeserialize ( e ) ) ?;
524+ . map_err ( Error :: BorshDeserialize ) ?;
525525 self . state . utxos . put ( rwtxn, key, & output) ?;
526526 }
527527
@@ -554,7 +554,7 @@ impl ParallelBlockProcessor {
554554 env : Arc < Env > ,
555555 num_workers : usize ,
556556 ) -> Result < Self , Error > {
557- let num_workers = num_workers. min ( MAX_PARALLEL_BLOCKS ) . max ( 1 ) ;
557+ let num_workers = num_workers. clamp ( 1 , MAX_PARALLEL_BLOCKS ) ;
558558
559559 // Create channels for Stage A (parallel workers)
560560 let mut work_senders = Vec :: with_capacity ( num_workers) ;
@@ -713,13 +713,10 @@ impl Drop for ParallelBlockProcessor {
713713 self . work_senders . clear ( ) ;
714714
715715 // Wait for coordinator if still running
716- if let Some ( coordinator_handle) = self . coordinator_handle . take ( ) {
717- if let Err ( e) = coordinator_handle. join ( ) {
718- tracing:: error!(
719- "Coordinator panicked during drop: {:?}" ,
720- e
721- ) ;
722- }
716+ if let Some ( coordinator_handle) = self . coordinator_handle . take ( )
717+ && let Err ( e) = coordinator_handle. join ( )
718+ {
719+ tracing:: error!( "Coordinator panicked during drop: {:?}" , e) ;
723720 }
724721 }
725722 }
@@ -832,17 +829,17 @@ mod tests {
832829 } ;
833830
834831 // Test coordination logic
835- if let Some ( work) = pending_work. remove ( & result. block_id ) {
836- if result. result . is_ok ( ) {
837- let pending = PendingBlock {
838- block_id : work . block_id ,
839- header : work. header ,
840- body : work. body ,
841- prevalidated : result . result . unwrap ( ) ,
842- serialized_data : result. serialized_data ,
843- } ;
844- pending_blocks . insert ( work . block_id , pending ) ;
845- }
832+ if let Some ( work) = pending_work. remove ( & result. block_id )
833+ && result. result . is_ok ( )
834+ {
835+ let pending = PendingBlock {
836+ block_id : work. block_id ,
837+ header : work. header ,
838+ body : work . body ,
839+ prevalidated : result. result . unwrap ( ) ,
840+ serialized_data : result . serialized_data ,
841+ } ;
842+ pending_blocks . insert ( work . block_id , pending ) ;
846843 }
847844
848845 assert_eq ! ( pending_blocks. len( ) , 1 ) ;
@@ -900,6 +897,7 @@ mod tests {
900897
901898 /// Integration test demonstrating Phase 1, 2, and 3 optimization compatibility
902899 #[ test]
900+ #[ allow( clippy:: assertions_on_constants) ]
903901 fn test_phase_integration ( ) {
904902 // This test validates that Phase 3 parallel processing integrates
905903 // correctly with Phase 1 (sorted operations) and Phase 2 (memory pools)
@@ -916,7 +914,7 @@ mod tests {
916914 assert_eq ! ( memory_pool. capacity( ) , 100 ) ;
917915
918916 // Phase 3: Parallel processing maintains data integrity
919- let block_ids = vec ! [ 0 , 1 , 2 , 3 , 4 ] ;
917+ let block_ids = [ 0 , 1 , 2 , 3 , 4 ] ;
920918 let processed_in_parallel =
921919 block_ids. iter ( ) . map ( |& id| id * 2 ) . collect :: < Vec < _ > > ( ) ;
922920 assert_eq ! ( processed_in_parallel, vec![ 0 , 2 , 4 , 6 , 8 ] ) ;
0 commit comments