Skip to content

Commit 16b5007

Browse files
max-ltclaude
andcommitted
Add v8::Locker, v8::Unlocker, and UnenteredIsolate bindings
Add support for multi-threaded isolate pooling architectures: - UnenteredIsolate: An isolate type that doesn't auto-enter, allowing flexible ownership and no LIFO drop constraint - Locker: Acquires exclusive access to an isolate for the current thread - Unlocker: Temporarily releases a lock within a Locker scope Key implementation details: - Use isolate.as_real_ptr() for correct pointer casting - Implement Send for UnenteredIsolate (thread safety via Locker) - Add NewHandleScope impl for UnenteredIsolate - Each thread must call enter() before first use to set up V8's thread-local state (LocalHeap) This enables architectures similar to Cloudflare Workers where isolates can be pooled and shared across threads. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1 parent 4d9f927 commit 16b5007

File tree

6 files changed

+581
-2
lines changed

6 files changed

+581
-2
lines changed

src/binding.cc

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -170,6 +170,23 @@ void v8__Isolate__Enter(v8::Isolate* isolate) { isolate->Enter(); }
170170

171171
void v8__Isolate__Exit(v8::Isolate* isolate) { isolate->Exit(); }
172172

173+
void v8__Locker__CONSTRUCT(uninit_t<v8::Locker>* buf, v8::Isolate* isolate) {
174+
construct_in_place<v8::Locker>(buf, isolate);
175+
}
176+
177+
void v8__Locker__DESTRUCT(v8::Locker* self) { self->~Locker(); }
178+
179+
bool v8__Locker__IsLocked(v8::Isolate* isolate) {
180+
return v8::Locker::IsLocked(isolate);
181+
}
182+
183+
void v8__Unlocker__CONSTRUCT(uninit_t<v8::Unlocker>* buf,
184+
v8::Isolate* isolate) {
185+
construct_in_place<v8::Unlocker>(buf, isolate);
186+
}
187+
188+
void v8__Unlocker__DESTRUCT(v8::Unlocker* self) { self->~Unlocker(); }
189+
173190
v8::Isolate* v8__Isolate__GetCurrent() { return v8::Isolate::GetCurrent(); }
174191

175192
const v8::Data* v8__Isolate__GetCurrentHostDefinedOptions(

src/isolate.rs

Lines changed: 163 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -914,6 +914,26 @@ impl Isolate {
914914
OwnedIsolate::new(Self::new_impl(params))
915915
}
916916

917+
/// Creates a new isolate that does not automatically enter/exit.
918+
///
919+
/// This is designed for use with `v8::Locker` in multi-threaded scenarios.
920+
/// Unlike `Isolate::new()` which returns an `OwnedIsolate` that automatically
921+
/// enters on creation and exits on drop, this returns an `UnenteredIsolate`
922+
/// that requires manual entry management via `v8::Locker`.
923+
///
924+
/// # Example
925+
/// ```ignore
926+
/// let isolate = v8::Isolate::new_unentered(params);
927+
/// let locker = v8::Locker::new(&*isolate);
928+
/// // Use isolate...
929+
/// ```
930+
///
931+
/// V8::initialize() must have run prior to this.
932+
#[allow(clippy::new_ret_no_self)]
933+
pub fn new_unentered(params: CreateParams) -> UnenteredIsolate {
934+
UnenteredIsolate::new(Self::new_impl(params))
935+
}
936+
917937
#[allow(clippy::new_ret_no_self)]
918938
pub fn snapshot_creator(
919939
external_references: Option<Cow<'static, [ExternalReference]>>,
@@ -2118,6 +2138,83 @@ impl AsMut<Isolate> for Isolate {
21182138
}
21192139
}
21202140

2141+
/// An isolate that does not automatically enter/exit, designed for use with v8::Locker.
2142+
///
2143+
/// Unlike `OwnedIsolate` which automatically calls `enter()` on creation and `exit()` on drop,
2144+
/// `UnenteredIsolate` requires manual entry management via `v8::Locker`.
2145+
///
2146+
/// This is useful for multi-threaded scenarios where isolates are shared across threads
2147+
/// using the Locker pattern, as automatic enter/exit conflicts with Locker semantics.
2148+
///
2149+
/// # Example
2150+
/// ```ignore
2151+
/// use v8::{Isolate, UnenteredIsolate, Locker};
2152+
///
2153+
/// // Create isolate without entering
2154+
/// let isolate = Isolate::new_unentered(params);
2155+
///
2156+
/// // Lock and enter on current thread
2157+
/// let locker = Locker::new(&*isolate);
2158+
///
2159+
/// // Use isolate...
2160+
///
2161+
/// // Locker drop unlocks, isolate drop disposes (no exit call)
2162+
/// ```
2163+
#[derive(Debug)]
2164+
pub struct UnenteredIsolate {
2165+
cxx_isolate: NonNull<RealIsolate>,
2166+
}
2167+
2168+
impl UnenteredIsolate {
2169+
pub(crate) fn new(cxx_isolate: *mut RealIsolate) -> Self {
2170+
let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
2171+
Self { cxx_isolate }
2172+
}
2173+
}
2174+
2175+
impl Drop for UnenteredIsolate {
2176+
fn drop(&mut self) {
2177+
unsafe {
2178+
let snapshot_creator = self.get_annex_mut().maybe_snapshot_creator.take();
2179+
assert!(
2180+
snapshot_creator.is_none(),
2181+
"If isolate was created using v8::Isolate::snapshot_creator, you should use v8::UnenteredIsolate::create_blob before dropping an isolate."
2182+
);
2183+
2184+
// Unlike OwnedIsolate, we don't call exit() here as the isolate
2185+
// was never entered (or was entered/exited via Locker).
2186+
// We also don't assert on GetCurrent() since Locker manages entry.
2187+
2188+
self.dispose_annex();
2189+
Platform::notify_isolate_shutdown(&get_current_platform(), self);
2190+
self.dispose();
2191+
}
2192+
}
2193+
}
2194+
2195+
impl Deref for UnenteredIsolate {
2196+
type Target = Isolate;
2197+
fn deref(&self) -> &Self::Target {
2198+
unsafe { Isolate::from_raw_ref(&self.cxx_isolate) }
2199+
}
2200+
}
2201+
2202+
impl DerefMut for UnenteredIsolate {
2203+
fn deref_mut(&mut self) -> &mut Self::Target {
2204+
unsafe { Isolate::from_raw_ref_mut(&mut self.cxx_isolate) }
2205+
}
2206+
}
2207+
2208+
impl AsMut<Isolate> for UnenteredIsolate {
2209+
fn as_mut(&mut self) -> &mut Isolate {
2210+
self
2211+
}
2212+
}
2213+
2214+
// SAFETY: UnenteredIsolate can be sent between threads.
2215+
// Thread safety is ensured by using v8::Locker before accessing the isolate.
2216+
unsafe impl Send for UnenteredIsolate {}
2217+
21212218
/// Collection of V8 heap information.
21222219
///
21232220
/// Instances of this class can be passed to v8::Isolate::GetHeapStatistics to
@@ -2417,3 +2514,69 @@ impl AsRef<Isolate> for Isolate {
24172514
self
24182515
}
24192516
}
2517+
2518+
/// A stack-allocated class that governs exclusive access to an isolate.
2519+
///
2520+
/// Locks V8 in a given thread. All threads writing to a single isolate must
2521+
/// use a `Locker` to ensure thread-safe access.
2522+
///
2523+
/// The isolate is automatically unlocked when the `Locker` goes out of scope.
2524+
pub struct Locker {
2525+
_raw: crate::scope::raw::Locker,
2526+
_no_send: std::marker::PhantomData<*mut ()>,
2527+
}
2528+
2529+
impl Locker {
2530+
/// Initialize Locker for a given Isolate.
2531+
pub fn new(isolate: &Isolate) -> Self {
2532+
let mut raw = unsafe { crate::scope::raw::Locker::uninit() };
2533+
let isolate_ptr = NonNull::new(isolate.as_real_ptr()).unwrap();
2534+
2535+
unsafe {
2536+
raw.init(isolate_ptr);
2537+
}
2538+
2539+
Self {
2540+
_raw: raw,
2541+
_no_send: std::marker::PhantomData,
2542+
}
2543+
}
2544+
2545+
/// Returns whether or not the locker for a given isolate is locked by the
2546+
/// current thread.
2547+
pub fn is_locked(isolate: &Isolate) -> bool {
2548+
let isolate_ptr = NonNull::new(isolate.as_real_ptr()).unwrap();
2549+
2550+
crate::scope::raw::Locker::is_locked(isolate_ptr)
2551+
}
2552+
}
2553+
2554+
/// A stack-allocated class that temporarily unlocks an isolate.
2555+
///
2556+
/// An Unlocker may be used to temporarily release the lock on an isolate,
2557+
/// allowing other threads to access it. This is useful for long-running
2558+
/// operations where you want to yield control to other threads.
2559+
///
2560+
/// An Unlocker can only be used within the scope of a `Locker` and will
2561+
/// restore the lock when it goes out of scope.
2562+
pub struct Unlocker {
2563+
_raw: crate::scope::raw::Unlocker,
2564+
_no_send: std::marker::PhantomData<*mut ()>,
2565+
}
2566+
2567+
impl Unlocker {
2568+
/// Initialize Unlocker for a given Isolate.
2569+
pub fn new(isolate: &Isolate) -> Self {
2570+
let mut raw = unsafe { crate::scope::raw::Unlocker::uninit() };
2571+
let isolate_ptr = NonNull::new(isolate.as_real_ptr()).unwrap();
2572+
2573+
unsafe {
2574+
raw.init(isolate_ptr);
2575+
}
2576+
2577+
Self {
2578+
_raw: raw,
2579+
_no_send: std::marker::PhantomData,
2580+
}
2581+
}
2582+
}

src/lib.rs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,7 @@ pub use isolate::HostImportModuleWithPhaseDynamicallyCallback;
114114
pub use isolate::HostInitializeImportMetaObjectCallback;
115115
pub use isolate::Isolate;
116116
pub use isolate::IsolateHandle;
117+
pub use isolate::Locker;
117118
pub use isolate::MemoryPressureLevel;
118119
pub use isolate::MessageCallback;
119120
pub use isolate::MessageErrorLevel;
@@ -128,6 +129,8 @@ pub use isolate::PromiseHookType;
128129
pub use isolate::PromiseRejectCallback;
129130
pub use isolate::RealIsolate;
130131
pub use isolate::TimeZoneDetection;
132+
pub use isolate::UnenteredIsolate;
133+
pub use isolate::Unlocker;
131134
pub use isolate::UseCounterCallback;
132135
pub use isolate::UseCounterFeature;
133136
pub use isolate::WasmAsyncSuccess;

src/scope.rs

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -128,8 +128,8 @@
128128
use crate::{
129129
Context, Data, DataError, Function, FunctionCallbackInfo, Isolate, Local,
130130
Message, Object, OwnedIsolate, PromiseRejectMessage, PropertyCallbackInfo,
131-
SealedLocal, Value, fast_api::FastApiCallbackOptions, isolate::RealIsolate,
132-
support::assert_layout_subset,
131+
SealedLocal, UnenteredIsolate, Value, fast_api::FastApiCallbackOptions,
132+
isolate::RealIsolate, support::assert_layout_subset,
133133
};
134134
use std::{
135135
any::type_name,
@@ -442,6 +442,20 @@ impl<'s> NewHandleScope<'s> for OwnedIsolate {
442442
}
443443
}
444444

445+
impl<'s> NewHandleScope<'s> for UnenteredIsolate {
446+
type NewScope = HandleScope<'s, ()>;
447+
448+
fn make_new_scope(me: &'s mut Self) -> Self::NewScope {
449+
HandleScope {
450+
raw_handle_scope: unsafe { raw::HandleScope::uninit() },
451+
isolate: unsafe { NonNull::new_unchecked(me.get_isolate_ptr()) },
452+
context: Cell::new(None),
453+
_phantom: PhantomData,
454+
_pinned: PhantomPinned,
455+
}
456+
}
457+
}
458+
445459
impl<'s, 'p: 's, 'i, C> NewHandleScope<'s>
446460
for PinnedRef<'p, CallbackScope<'i, C>>
447461
{

src/scope/raw.rs

Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -219,6 +219,77 @@ impl Drop for AllowJavascriptExecutionScope {
219219
}
220220
}
221221

222+
#[repr(C)]
223+
#[derive(Debug)]
224+
pub(crate) struct Locker([MaybeUninit<usize>; 2]);
225+
226+
impl Locker {
227+
/// Creates an uninitialized `Locker`.
228+
///
229+
/// This function is marked unsafe because the caller must ensure that the
230+
/// returned value isn't dropped before `init()` has been called.
231+
#[inline]
232+
pub unsafe fn uninit() -> Self {
233+
Self(unsafe { MaybeUninit::uninit().assume_init() })
234+
}
235+
236+
/// This function is marked unsafe because `init()` must be called exactly
237+
/// once, no more and no less, after creating a `Locker` value with
238+
/// `Locker::uninit()`.
239+
#[inline]
240+
pub unsafe fn init(&mut self, isolate: NonNull<RealIsolate>) {
241+
let buf = NonNull::from(self).cast();
242+
unsafe {
243+
v8__Locker__CONSTRUCT(buf.as_ptr(), isolate.as_ptr());
244+
}
245+
}
246+
247+
/// Check if the isolate is locked by any thread.
248+
pub fn is_locked(isolate: NonNull<RealIsolate>) -> bool {
249+
unsafe { v8__Locker__IsLocked(isolate.as_ptr()) }
250+
}
251+
}
252+
253+
impl Drop for Locker {
254+
#[inline(always)]
255+
fn drop(&mut self) {
256+
unsafe { v8__Locker__DESTRUCT(self) };
257+
}
258+
}
259+
260+
#[repr(C)]
261+
#[derive(Debug)]
262+
pub(crate) struct Unlocker([MaybeUninit<usize>; 2]);
263+
264+
impl Unlocker {
265+
/// Creates an uninitialized `Unlocker`.
266+
///
267+
/// This function is marked unsafe because the caller must ensure that the
268+
/// returned value isn't dropped before `init()` has been called.
269+
#[inline]
270+
pub unsafe fn uninit() -> Self {
271+
Self(unsafe { MaybeUninit::uninit().assume_init() })
272+
}
273+
274+
/// This function is marked unsafe because `init()` must be called exactly
275+
/// once, no more and no less, after creating an `Unlocker` value with
276+
/// `Unlocker::uninit()`.
277+
#[inline]
278+
pub unsafe fn init(&mut self, isolate: NonNull<RealIsolate>) {
279+
let buf = NonNull::from(self).cast();
280+
unsafe {
281+
v8__Unlocker__CONSTRUCT(buf.as_ptr(), isolate.as_ptr());
282+
}
283+
}
284+
}
285+
286+
impl Drop for Unlocker {
287+
#[inline(always)]
288+
fn drop(&mut self) {
289+
unsafe { v8__Unlocker__DESTRUCT(self) };
290+
}
291+
}
292+
222293
unsafe extern "C" {
223294
pub(super) fn v8__Isolate__GetCurrent() -> *mut RealIsolate;
224295
pub(super) fn v8__Isolate__GetCurrentContext(
@@ -311,4 +382,17 @@ unsafe extern "C" {
311382
pub(super) fn v8__AllowJavascriptExecutionScope__DESTRUCT(
312383
this: *mut AllowJavascriptExecutionScope,
313384
);
385+
386+
pub(super) fn v8__Locker__CONSTRUCT(
387+
buf: *mut MaybeUninit<Locker>,
388+
isolate: *mut RealIsolate,
389+
);
390+
pub(super) fn v8__Locker__DESTRUCT(this: *mut Locker);
391+
pub(super) fn v8__Locker__IsLocked(isolate: *mut RealIsolate) -> bool;
392+
393+
pub(super) fn v8__Unlocker__CONSTRUCT(
394+
buf: *mut MaybeUninit<Unlocker>,
395+
isolate: *mut RealIsolate,
396+
);
397+
pub(super) fn v8__Unlocker__DESTRUCT(this: *mut Unlocker);
314398
}

0 commit comments

Comments
 (0)