@@ -2511,10 +2511,21 @@ impl AsRef<Isolate> for Isolate {
25112511 }
25122512}
25132513
2514- /// Locks an isolate and enters it for the current thread .
2514+ /// Acquires V8's per- isolate mutex (pure mutex, no Enter/Exit) .
25152515///
2516- /// This is a RAII wrapper around V8's `v8::Locker`. It ensures that the isolate
2517- /// is properly locked before any V8 operations and unlocked when dropped.
2516+ /// After `Locker::new()`, the isolate is locked but **not** entered — i.e.
2517+ /// `Isolate::GetCurrent()` does not return this isolate. Use [`IsolateScope`]
2518+ /// (via [`Locker::enter()`]) to enter the isolate before performing V8 work.
2519+ ///
2520+ /// # Why the Locker doesn't Enter/Exit
2521+ ///
2522+ /// V8's `Isolate::Enter()`/`Exit()` manage a per-isolate `entry_stack_` that
2523+ /// stores the "previous isolate" at each nesting level. When multiple Lockers
2524+ /// coexist on the same thread (e.g. cooperative scheduling via `spawn_local`),
2525+ /// non-LIFO drop ordering causes `Exit()` to restore a stale pointer, leading
2526+ /// to a NULL dereference on the next `Enter()`. By making the Locker a pure
2527+ /// mutex and delegating Enter/Exit to short-lived [`IsolateScope`] guards,
2528+ /// each Enter/Exit pair is properly nested within a single synchronous block.
25182529///
25192530/// # Thread Safety
25202531///
@@ -2532,7 +2543,9 @@ pub struct Locker<'a> {
25322543 isolate : & ' a mut UnenteredIsolate ,
25332544}
25342545
2535- /// Guard to ensure `v8__Isolate__Exit` is called if panic occurs after Enter.
2546+ /// Drop guard that calls `Isolate::Exit()` to undo the temporary Enter
2547+ /// in `Locker::new()`. On the success path, the guard is dropped normally
2548+ /// (calling Exit). On panic, Drop runs the same cleanup.
25362549struct IsolateExitGuard ( * mut RealIsolate ) ;
25372550
25382551impl Drop for IsolateExitGuard {
@@ -2544,40 +2557,46 @@ impl Drop for IsolateExitGuard {
25442557impl < ' a > Locker < ' a > {
25452558 /// Creates a new `Locker` for the given isolate.
25462559 ///
2547- /// This will:
2548- /// 1. Enter the isolate (via `v8::Isolate::Enter()`)
2549- /// 2. Acquire the V8 lock (via `v8::Locker`)
2550- ///
2551- /// When the `Locker` is dropped, the lock is released and the isolate is exited.
2552- ///
2553- /// # Panics
2554- ///
2555- /// This function is panic-safe. If initialization fails, the isolate will be
2556- /// properly exited.
2560+ /// Acquires V8's per-isolate mutex. The isolate is **not** entered after
2561+ /// construction — use [`Locker::enter()`] to get an [`IsolateScope`].
25572562 pub fn new ( isolate : & ' a mut UnenteredIsolate ) -> Self {
25582563 let isolate_ptr = isolate. cxx_isolate ;
25592564
2560- // Enter the isolate first
2561- unsafe {
2562- v8__Isolate__Enter ( isolate_ptr. as_ptr ( ) ) ;
2563- }
2565+ // Temporarily enter the isolate so the C++ Locker constructor sees
2566+ // IsCurrent()==true → sets top_level_=false → skips its own Enter/Exit.
2567+ unsafe { v8__Isolate__Enter ( isolate_ptr. as_ptr ( ) ) } ;
25642568
2565- // Create exit guard - will call Exit if we panic before completing
2569+ // Guard: calls Exit on both success and panic paths.
25662570 let exit_guard = IsolateExitGuard ( isolate_ptr. as_ptr ( ) ) ;
25672571
2568- // Initialize the raw Locker
2572+ // Initialize the C++ Locker (acts as pure mutex since top_level_=false).
25692573 let mut raw = unsafe { crate :: scope:: raw:: Locker :: uninit ( ) } ;
25702574 unsafe { raw. init ( isolate_ptr) } ;
25712575
2572- // Success - forget the guard so it doesn't call Exit
2573- std :: mem :: forget ( exit_guard) ;
2576+ // Undo the temporary Enter — isolate is now locked but not entered.
2577+ drop ( exit_guard) ;
25742578
25752579 Self {
25762580 raw : std:: mem:: ManuallyDrop :: new ( raw) ,
25772581 isolate,
25782582 }
25792583 }
25802584
2585+ /// Enter the isolate, returning an [`IsolateScope`] that exits on drop.
2586+ ///
2587+ /// ```ignore
2588+ /// let mut locker = v8::Locker::new(&mut isolate);
2589+ /// let _scope = locker.enter();
2590+ /// let scope = std::pin::pin!(v8::HandleScope::new(&mut *locker));
2591+ /// // ... V8 work ...
2592+ /// // IsolateScope dropped → Isolate::Exit() → safe to yield
2593+ /// ```
2594+ pub fn enter ( & mut self ) -> IsolateScope {
2595+ // SAFETY: Locker holds &mut UnenteredIsolate (valid pointer) and
2596+ // the V8 mutex (acquired in new()).
2597+ unsafe { IsolateScope :: new ( self ) }
2598+ }
2599+
25812600 /// Returns `true` if the given isolate is currently locked by any `Locker`.
25822601 pub fn is_locked ( isolate : & UnenteredIsolate ) -> bool {
25832602 crate :: scope:: raw:: Locker :: is_locked ( isolate. cxx_isolate )
@@ -2586,9 +2605,10 @@ impl<'a> Locker<'a> {
25862605
25872606impl Drop for Locker < ' _ > {
25882607 fn drop ( & mut self ) {
2608+ // Release the V8 mutex only — no Exit() needed since the Locker
2609+ // never entered the isolate.
25892610 unsafe {
25902611 std:: mem:: ManuallyDrop :: drop ( & mut self . raw ) ;
2591- v8__Isolate__Exit ( self . isolate . cxx_isolate . as_ptr ( ) ) ;
25922612 }
25932613 }
25942614}
@@ -2605,3 +2625,60 @@ impl DerefMut for Locker<'_> {
26052625 unsafe { Isolate :: from_raw_ref_mut ( & mut self . isolate . cxx_isolate ) }
26062626 }
26072627}
2628+
2629+ /// RAII scope that enters a V8 isolate on creation and exits on drop.
2630+ ///
2631+ /// Calls `Isolate::Enter()` / `Isolate::Exit()`, managing V8's per-thread
2632+ /// `Isolate::GetCurrent()` thread-local. Rust equivalent of C++
2633+ /// `v8::Isolate::Scope`.
2634+ ///
2635+ /// # Usage
2636+ ///
2637+ /// ```ignore
2638+ /// let mut locker = v8::Locker::new(&mut isolate); // locks mutex only
2639+ /// {
2640+ /// let _scope = locker.enter(); // Enter()
2641+ /// let scope = pin!(v8::HandleScope::new(&mut *locker));
2642+ /// // ... V8 work ...
2643+ /// }
2644+ /// // IsolateScope dropped → Exit() → safe to yield
2645+ /// ```
2646+ ///
2647+ /// # Why it exists
2648+ ///
2649+ /// When multiple tasks share a thread (e.g. `spawn_local`), each holding a
2650+ /// [`Locker`] for a different isolate, `GetCurrent()` can be stale after a
2651+ /// yield. Wrapping each synchronous V8 block in an `IsolateScope` ensures
2652+ /// correctness. V8's entry stack is per-isolate, so entering X does not
2653+ /// interfere with Y's stack.
2654+ ///
2655+ /// # Why `unsafe`
2656+ ///
2657+ /// Stores a raw `*mut Isolate` to avoid exclusively borrowing the [`Locker`]
2658+ /// (which would prevent creating `HandleScope` etc. from the same Locker).
2659+ /// The caller must ensure:
2660+ ///
2661+ /// - A [`Locker`] is held for this isolate for the entire scope lifetime.
2662+ /// - The scope is dropped before any `.await` (stale `GetCurrent()` otherwise).
2663+ pub struct IsolateScope {
2664+ isolate : * mut Isolate ,
2665+ }
2666+
2667+ impl IsolateScope {
2668+ /// Enter the isolate (`Isolate::Enter()`). On drop, calls `Isolate::Exit()`.
2669+ ///
2670+ /// # Safety
2671+ ///
2672+ /// A [`Locker`] must be held for this isolate for the entire lifetime of the
2673+ /// returned scope.
2674+ pub unsafe fn new ( isolate : & mut Isolate ) -> Self {
2675+ unsafe { isolate. enter ( ) } ;
2676+ Self { isolate }
2677+ }
2678+ }
2679+
2680+ impl Drop for IsolateScope {
2681+ fn drop ( & mut self ) {
2682+ unsafe { ( * self . isolate ) . exit ( ) } ;
2683+ }
2684+ }
0 commit comments