From 98339cf32743467105c9d30cac222e284eceb28d Mon Sep 17 00:00:00 2001 From: snek Date: Tue, 4 Feb 2025 15:30:40 +0100 Subject: [PATCH 01/17] fix(ext/napi): napi_is_buffer tests for ArrayBufferView (#27956) use correct type check Fixes: https://github.com/denoland/deno/issues/27951 --- ext/napi/node_api.rs | 12 +----------- tests/napi/src/typedarray.rs | 16 ++++++++++++++++ tests/napi/typedarray_test.js | 9 +++++++++ 3 files changed, 26 insertions(+), 11 deletions(-) diff --git a/ext/napi/node_api.rs b/ext/napi/node_api.rs index 13ea0b3e625ebe..fc7377de0880a4 100644 --- a/ext/napi/node_api.rs +++ b/ext/napi/node_api.rs @@ -400,18 +400,8 @@ fn napi_is_buffer( check_arg!(env, value); check_arg!(env, result); - let buffer_constructor = - v8::Local::new(&mut env.scope(), &env.buffer_constructor); - - let Some(is_buffer) = value - .unwrap() - .instance_of(&mut env.scope(), buffer_constructor.into()) - else { - return napi_set_last_error(env, napi_generic_failure); - }; - unsafe { - *result = is_buffer; + *result = value.unwrap().is_array_buffer_view(); } napi_clear_last_error(env) diff --git a/tests/napi/src/typedarray.rs b/tests/napi/src/typedarray.rs index 95adf957e45ec1..a70aaaf6b9acab 100644 --- a/tests/napi/src/typedarray.rs +++ b/tests/napi/src/typedarray.rs @@ -144,10 +144,26 @@ extern "C" fn test_external( typedarray } +extern "C" fn test_is_buffer( + env: napi_env, + info: napi_callback_info, +) -> napi_value { + let (args, argc, _) = napi_get_callback_info!(env, info, 1); + assert_eq!(argc, 1); + + let mut is_buffer: bool = false; + assert_napi_ok!(napi_is_buffer(env, args[0], &mut is_buffer)); + + let mut result: napi_value = std::ptr::null_mut(); + assert_napi_ok!(napi_get_boolean(env, is_buffer, &mut result)); + result +} + pub fn init(env: napi_env, exports: napi_value) { let properties = &[ napi_new_property!(env, "test_external", test_external), napi_new_property!(env, "test_multiply", test_multiply), + napi_new_property!(env, "test_is_buffer", test_is_buffer), ]; assert_napi_ok!(napi_define_properties( diff --git a/tests/napi/typedarray_test.js b/tests/napi/typedarray_test.js index f7887e4b1c8ba5..36f29a203fc33c 100644 --- a/tests/napi/typedarray_test.js +++ b/tests/napi/typedarray_test.js @@ -1,5 +1,6 @@ // Copyright 2018-2025 the Deno authors. MIT license. +import { Buffer } from "node:buffer"; import { assert, assertEquals, loadTestLibrary } from "./common.js"; const typedarray = loadTestLibrary(); @@ -28,6 +29,14 @@ Deno.test("napi typedarray float64", function () { assertEquals(Math.round(10 * doubleResult[2]) / 10, -6.6); }); +Deno.test("napi_is_buffer", () => { + assert(!typedarray.test_is_buffer(5)); + assert(!typedarray.test_is_buffer([])); + assert(typedarray.test_is_buffer(new Uint8Array())); + assert(typedarray.test_is_buffer(new Uint32Array())); + assert(typedarray.test_is_buffer(new Buffer([]))); +}); + // TODO(bartlomieju): this test causes segfaults when used with jemalloc. // Node documentation provides a hint that this function is not supported by // other runtime like electron. From 28834a89bb85d708ec7b67ab844724e1221cc6cf Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Tue, 4 Feb 2025 21:59:13 +0530 Subject: [PATCH 02/17] fix(ext/node): implement SQLite Session API (#27909) https://nodejs.org/api/sqlite.html#class-session --------- Signed-off-by: Divy Srivastava --- Cargo.lock | 31 +++++++- Cargo.toml | 2 +- ext/node/lib.rs | 1 + ext/node/ops/sqlite/database.rs | 62 ++++++++++++++++ ext/node/ops/sqlite/mod.rs | 14 +++- ext/node/ops/sqlite/session.rs | 124 ++++++++++++++++++++++++++++++++ tests/unit_node/sqlite_test.ts | 21 ++++++ 7 files changed, 250 insertions(+), 5 deletions(-) create mode 100644 ext/node/ops/sqlite/session.rs diff --git a/Cargo.lock b/Cargo.lock index cf0e5efc0562bd..5b41c6387e4c4c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -511,6 +511,26 @@ dependencies = [ "serde", ] +[[package]] +name = "bindgen" +version = "0.69.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" +dependencies = [ + "bitflags 2.6.0", + "cexpr", + "clang-sys", + "itertools 0.10.5", + "lazy_static", + "lazycell", + "proc-macro2", + "quote", + "regex", + "rustc-hash 1.1.0", + "shlex", + "syn 2.0.87", +] + [[package]] name = "bindgen" version = "0.70.1" @@ -4819,6 +4839,12 @@ dependencies = [ "spin", ] +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "libc" version = "0.2.168" @@ -4886,6 +4912,7 @@ version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" dependencies = [ + "bindgen 0.69.5", "cc", "pkg-config", "vcpkg", @@ -4910,7 +4937,7 @@ version = "1.48.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca8dfd1a173826d193e3b955e07c22765829890f62c677a59c4a410cb4f47c01" dependencies = [ - "bindgen", + "bindgen 0.70.1", "libloading 0.8.5", ] @@ -8689,7 +8716,7 @@ version = "130.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a511192602f7b435b0a241c1947aa743eb7717f20a9195f4b5e8ed1952e01db1" dependencies = [ - "bindgen", + "bindgen 0.70.1", "bitflags 2.6.0", "fslock", "gzip-header", diff --git a/Cargo.toml b/Cargo.toml index ca32b029dfbe1c..1ba196d6619f93 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -184,7 +184,7 @@ rand = "=0.8.5" regex = "^1.7.0" reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955 ring = "^0.17.0" -rusqlite = { version = "0.32.0", features = ["unlock_notify", "bundled"] } +rusqlite = { version = "0.32.0", features = ["unlock_notify", "bundled", "session"] } rustls = { version = "0.23.11", default-features = false, features = ["logging", "std", "tls12", "ring"] } rustls-pemfile = "2" rustls-tokio-stream = "=0.3.0" diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 68d16bfd6a2b80..ac68e875b40017 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -443,6 +443,7 @@ deno_core::extension!(deno_node, objects = [ ops::perf_hooks::EldHistogram, ops::sqlite::DatabaseSync, + ops::sqlite::Session, ops::sqlite::StatementSync ], esm_entry_point = "ext:deno_node/02_init.js", diff --git a/ext/node/ops/sqlite/database.rs b/ext/node/ops/sqlite/database.rs index 03ccc7fb53b5fe..73063b6276ba90 100644 --- a/ext/node/ops/sqlite/database.rs +++ b/ext/node/ops/sqlite/database.rs @@ -2,6 +2,8 @@ use std::cell::Cell; use std::cell::RefCell; +use std::ffi::CString; +use std::ptr::null; use std::rc::Rc; use deno_core::op2; @@ -10,6 +12,8 @@ use deno_core::OpState; use deno_permissions::PermissionsContainer; use serde::Deserialize; +use super::session::SessionOptions; +use super::Session; use super::SqliteError; use super::StatementSync; @@ -192,4 +196,62 @@ impl DatabaseSync { use_big_ints: Cell::new(false), }) } + + // Creates and attaches a session to the database. + // + // This method is a wrapper around `sqlite3session_create()` and + // `sqlite3session_attach()`. + #[cppgc] + fn create_session( + &self, + #[serde] options: Option, + ) -> Result { + let db = self.conn.borrow(); + let db = db.as_ref().ok_or(SqliteError::AlreadyClosed)?; + + // SAFETY: lifetime of the connection is guaranteed by reference + // counting. + let raw_handle = unsafe { db.handle() }; + + let mut raw_session = std::ptr::null_mut(); + let mut options = options; + + let z_db = options + .as_mut() + .and_then(|options| options.db.take()) + .map(|db| CString::new(db).unwrap()) + .unwrap_or_else(|| CString::new("main").unwrap()); + // SAFETY: `z_db` points to a valid c-string. + let r = unsafe { + libsqlite3_sys::sqlite3session_create( + raw_handle, + z_db.as_ptr() as *const _, + &mut raw_session, + ) + }; + + if r != libsqlite3_sys::SQLITE_OK { + return Err(SqliteError::SessionCreateFailed); + } + + let table = options + .as_mut() + .and_then(|options| options.table.take()) + .map(|table| CString::new(table).unwrap()); + let z_table = table.as_ref().map(|table| table.as_ptr()).unwrap_or(null()); + let r = + // SAFETY: `z_table` points to a valid c-string and `raw_session` + // is a valid session handle. + unsafe { libsqlite3_sys::sqlite3session_attach(raw_session, z_table) }; + + if r != libsqlite3_sys::SQLITE_OK { + return Err(SqliteError::SessionCreateFailed); + } + + Ok(Session { + inner: raw_session, + freed: Cell::new(false), + _db: self.conn.clone(), + }) + } } diff --git a/ext/node/ops/sqlite/mod.rs b/ext/node/ops/sqlite/mod.rs index c53f244cc70c2c..d3c273a66347d9 100644 --- a/ext/node/ops/sqlite/mod.rs +++ b/ext/node/ops/sqlite/mod.rs @@ -1,17 +1,18 @@ // Copyright 2018-2025 the Deno authors. MIT license. mod database; +mod session; mod statement; pub use database::DatabaseSync; -use deno_permissions::PermissionCheckError; +pub use session::Session; pub use statement::StatementSync; #[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum SqliteError { #[class(inherit)] #[error(transparent)] - Permission(#[from] PermissionCheckError), + Permission(#[from] deno_permissions::PermissionCheckError), #[class(generic)] #[error(transparent)] SqliteError(#[from] rusqlite::Error), @@ -40,6 +41,15 @@ pub enum SqliteError { #[error("Failed to prepare statement")] PrepareFailed, #[class(generic)] + #[error("Failed to create session")] + SessionCreateFailed, + #[class(generic)] + #[error("Failed to retrieve changeset")] + SessionChangesetFailed, + #[class(generic)] + #[error("Session is already closed")] + SessionClosed, + #[class(generic)] #[error("Invalid constructor")] InvalidConstructor, #[class(generic)] diff --git a/ext/node/ops/sqlite/session.rs b/ext/node/ops/sqlite/session.rs new file mode 100644 index 00000000000000..520904d536ffac --- /dev/null +++ b/ext/node/ops/sqlite/session.rs @@ -0,0 +1,124 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::cell::Cell; +use std::cell::RefCell; +use std::ffi::c_void; +use std::rc::Rc; + +use deno_core::op2; +use deno_core::GarbageCollected; +use libsqlite3_sys as ffi; +use serde::Deserialize; + +use super::SqliteError; + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SessionOptions { + pub table: Option, + pub db: Option, +} + +pub struct Session { + pub(crate) inner: *mut ffi::sqlite3_session, + pub(crate) freed: Cell, + + // Hold a strong reference to the database. + pub(crate) _db: Rc>>, +} + +impl GarbageCollected for Session {} + +impl Drop for Session { + fn drop(&mut self) { + let _ = self.delete(); + } +} + +impl Session { + fn delete(&self) -> Result<(), SqliteError> { + if self.freed.get() { + return Err(SqliteError::SessionClosed); + } + + self.freed.set(true); + // Safety: `self.inner` is a valid session. double free is + // prevented by `freed` flag. + unsafe { + ffi::sqlite3session_delete(self.inner); + } + + Ok(()) + } +} + +#[op2] +impl Session { + // Closes the session. + #[fast] + fn close(&self) -> Result<(), SqliteError> { + self.delete() + } + + // Retrieves a changeset containing all changes since the changeset + // was created. Can be called multiple times. + // + // This method is a wrapper around `sqlite3session_changeset()`. + #[buffer] + fn changeset(&self) -> Result, SqliteError> { + if self.freed.get() { + return Err(SqliteError::SessionClosed); + } + + session_buffer_op(self.inner, ffi::sqlite3session_changeset) + } + + // Similar to the method above, but generates a more compact patchset. + // + // This method is a wrapper around `sqlite3session_patchset()`. + #[buffer] + fn patchset(&self) -> Result, SqliteError> { + if self.freed.get() { + return Err(SqliteError::SessionClosed); + } + + session_buffer_op(self.inner, ffi::sqlite3session_patchset) + } +} + +fn session_buffer_op( + s: *mut ffi::sqlite3_session, + f: unsafe extern "C" fn( + *mut ffi::sqlite3_session, + *mut i32, + *mut *mut c_void, + ) -> i32, +) -> Result, SqliteError> { + let mut n_buffer = 0; + let mut p_buffer = std::ptr::null_mut(); + + // Safety: `s` is a valid session and the buffer is allocated + // by sqlite3 and will be freed later. + let r = unsafe { f(s, &mut n_buffer, &mut p_buffer) }; + if r != ffi::SQLITE_OK { + return Err(SqliteError::SessionChangesetFailed); + } + + if n_buffer == 0 { + return Ok(Default::default()); + } + + // Safety: n_buffer is the size of the buffer. + let buffer = unsafe { + std::slice::from_raw_parts(p_buffer as *const u8, n_buffer as usize) + } + .to_vec() + .into_boxed_slice(); + + // Safety: free sqlite allocated buffer, we copied it into the JS buffer. + unsafe { + ffi::sqlite3_free(p_buffer); + } + + Ok(buffer) +} diff --git a/tests/unit_node/sqlite_test.ts b/tests/unit_node/sqlite_test.ts index 9377d6d34eb3ca..ec54780ae92980 100644 --- a/tests/unit_node/sqlite_test.ts +++ b/tests/unit_node/sqlite_test.ts @@ -77,6 +77,27 @@ Deno.test("[node/sqlite] StatementSync read bigints are supported", () => { assertEquals(stmt.expandedSQL, "SELECT * FROM data"); }); +Deno.test("[node/sqlite] createSession and changesets", () => { + const db = new DatabaseSync(":memory:"); + const session = db.createSession(); + + db.exec("CREATE TABLE test (id INTEGER PRIMARY KEY, name TEXT)"); + db.exec("INSERT INTO test (name) VALUES ('foo')"); + + assert(session.changeset() instanceof Uint8Array); + assert(session.patchset() instanceof Uint8Array); + + assert(session.changeset().byteLength > 0); + assert(session.patchset().byteLength > 0); + + session.close(); + + // Use after close shoud throw. + assertThrows(() => session.changeset(), Error, "Session is already closed"); + // Close after close should throw. + assertThrows(() => session.close(), Error, "Session is already closed"); +}); + Deno.test("[node/sqlite] StatementSync integer too large", () => { const db = new DatabaseSync(":memory:"); db.exec("CREATE TABLE data(key INTEGER PRIMARY KEY);"); From b440d2d4f7d191245e597b91a5c1ed7ea6f6c614 Mon Sep 17 00:00:00 2001 From: Nathan Whitaker <17734409+nathanwhit@users.noreply.github.com> Date: Tue, 4 Feb 2025 15:41:56 -0800 Subject: [PATCH 03/17] feat(outdated): interactive update (#27812) interactively select which packages to upgrade. a future improvement could be to add a way to select the version as well, though not sure how valuable that would be. --- Cargo.lock | 76 +++- Cargo.toml | 2 +- cli/Cargo.toml | 2 + cli/args/flags.rs | 53 ++- cli/tools/registry/pm/deps.rs | 6 + cli/tools/registry/pm/outdated.rs | 58 ++- cli/tools/registry/pm/outdated/interactive.rs | 426 ++++++++++++++++++ 7 files changed, 596 insertions(+), 27 deletions(-) create mode 100644 cli/tools/registry/pm/outdated/interactive.rs diff --git a/Cargo.lock b/Cargo.lock index 5b41c6387e4c4c..9f46d90b42e171 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -990,12 +990,12 @@ dependencies = [ [[package]] name = "console_static_text" -version = "0.8.1" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4be93df536dfbcbd39ff7c129635da089901116b88bfc29ec1acb9b56f8ff35" +checksum = "55d8a913e62f6444b79e038be3eb09839e9cfc34d55d85f9336460710647d2f6" dependencies = [ "unicode-width", - "vte", + "vte 0.13.1", ] [[package]] @@ -1125,6 +1125,31 @@ version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +[[package]] +name = "crossterm" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6" +dependencies = [ + "bitflags 2.6.0", + "crossterm_winapi", + "mio 1.0.3", + "parking_lot", + "rustix", + "signal-hook", + "signal-hook-mio", + "winapi", +] + +[[package]] +name = "crossterm_winapi" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" +dependencies = [ + "winapi", +] + [[package]] name = "crypto-bigint" version = "0.5.5" @@ -1295,6 +1320,7 @@ dependencies = [ "clap_complete_fig", "color-print", "console_static_text", + "crossterm", "dashmap", "data-encoding", "deno_ast", @@ -1390,6 +1416,7 @@ dependencies = [ "tracing", "twox-hash", "typed-arena", + "unicode-width", "uuid", "walkdir", "which", @@ -5177,6 +5204,18 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "mio" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +dependencies = [ + "libc", + "log", + "wasi", + "windows-sys 0.52.0", +] + [[package]] name = "moka" version = "0.12.10" @@ -5359,7 +5398,7 @@ dependencies = [ "kqueue", "libc", "log", - "mio", + "mio 0.8.11", "walkdir", "windows-sys 0.48.0", ] @@ -6710,9 +6749,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.32" +version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ "bitflags 2.6.0", "errno", @@ -7150,6 +7189,17 @@ dependencies = [ "signal-hook-registry", ] +[[package]] +name = "signal-hook-mio" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34db1a06d485c9142248b7a054f034b349b212551f3dfd19c94d45a754a217cd" +dependencies = [ + "libc", + "mio 1.0.3", + "signal-hook", +] + [[package]] name = "signal-hook-registry" version = "1.4.1" @@ -7405,7 +7455,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55ff8ef943b384c414f54aefa961dd2bd853add74ec75e7ac74cf91dba62bcfa" dependencies = [ - "vte", + "vte 0.11.1", ] [[package]] @@ -8220,7 +8270,7 @@ dependencies = [ "backtrace", "bytes", "libc", - "mio", + "mio 0.8.11", "num_cpus", "parking_lot", "pin-project-lite", @@ -8783,6 +8833,16 @@ name = "vte" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197" +dependencies = [ + "utf8parse", + "vte_generate_state_changes", +] + +[[package]] +name = "vte" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a0b683b20ef64071ff03745b14391751f6beab06a54347885459b77a3f2caa5" dependencies = [ "arrayvec", "utf8parse", diff --git a/Cargo.toml b/Cargo.toml index 1ba196d6619f93..5af9ac3a88980d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -123,7 +123,7 @@ cbc = { version = "=0.1.2", features = ["alloc"] } # Instead use util::time::utc_now() chrono = { version = "0.4", default-features = false, features = ["std", "serde"] } color-print = "0.3.5" -console_static_text = "=0.8.1" +console_static_text = "=0.8.3" ctr = { version = "0.9.2", features = ["alloc"] } dashmap = "5.5.3" data-encoding = "2.3.3" diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 54de3db642ffab..dee17bd34474a2 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -105,6 +105,7 @@ clap_complete = "=4.5.24" clap_complete_fig = "=4.5.2" color-print.workspace = true console_static_text.workspace = true +crossterm = "0.28.1" dashmap.workspace = true data-encoding.workspace = true dhat = { version = "0.3.3", optional = true } @@ -169,6 +170,7 @@ tower-lsp.workspace = true tracing = { version = "0.1", features = ["log", "default"] } twox-hash.workspace = true typed-arena = "=2.0.2" +unicode-width = "0.1.3" uuid = { workspace = true, features = ["serde"] } walkdir.workspace = true which.workspace = true diff --git a/cli/args/flags.rs b/cli/args/flags.rs index 144d1a57b95e87..7a817486def07f 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -475,7 +475,7 @@ pub enum DenoSubcommand { #[derive(Clone, Debug, PartialEq, Eq)] pub enum OutdatedKind { - Update { latest: bool }, + Update { latest: bool, interactive: bool }, PrintOutdated { compatible: bool }, } @@ -2660,7 +2660,7 @@ Specific version requirements to update to can be specified: .long("latest") .action(ArgAction::SetTrue) .help( - "Update to the latest version, regardless of semver constraints", + "Consider the latest version, regardless of semver constraints", ) .conflicts_with("compatible"), ) @@ -2669,15 +2669,21 @@ Specific version requirements to update to can be specified: .long("update") .short('u') .action(ArgAction::SetTrue) - .conflicts_with("compatible") .help("Update dependency versions"), ) + .arg( + Arg::new("interactive") + .long("interactive") + .short('i') + .action(ArgAction::SetTrue) + .requires("update") + .help("Interactively select which dependencies to update") + ) .arg( Arg::new("compatible") .long("compatible") .action(ArgAction::SetTrue) - .help("Only output versions that satisfy semver requirements") - .conflicts_with("update"), + .help("Only consider versions that satisfy semver requirements") ) .arg( Arg::new("recursive") @@ -4462,7 +4468,11 @@ fn outdated_parse( let update = matches.get_flag("update"); let kind = if update { let latest = matches.get_flag("latest"); - OutdatedKind::Update { latest } + let interactive = matches.get_flag("interactive"); + OutdatedKind::Update { + latest, + interactive, + } } else { let compatible = matches.get_flag("compatible"); OutdatedKind::PrintOutdated { compatible } @@ -11646,7 +11656,10 @@ Usage: deno repl [OPTIONS] [-- [ARGS]...]\n" svec!["--update"], OutdatedFlags { filters: vec![], - kind: OutdatedKind::Update { latest: false }, + kind: OutdatedKind::Update { + latest: false, + interactive: false, + }, recursive: false, }, ), @@ -11654,7 +11667,10 @@ Usage: deno repl [OPTIONS] [-- [ARGS]...]\n" svec!["--update", "--latest"], OutdatedFlags { filters: vec![], - kind: OutdatedKind::Update { latest: true }, + kind: OutdatedKind::Update { + latest: true, + interactive: false, + }, recursive: false, }, ), @@ -11662,7 +11678,10 @@ Usage: deno repl [OPTIONS] [-- [ARGS]...]\n" svec!["--update", "--recursive"], OutdatedFlags { filters: vec![], - kind: OutdatedKind::Update { latest: false }, + kind: OutdatedKind::Update { + latest: false, + interactive: false, + }, recursive: true, }, ), @@ -11670,7 +11689,10 @@ Usage: deno repl [OPTIONS] [-- [ARGS]...]\n" svec!["--update", "@foo/bar"], OutdatedFlags { filters: svec!["@foo/bar"], - kind: OutdatedKind::Update { latest: false }, + kind: OutdatedKind::Update { + latest: false, + interactive: false, + }, recursive: false, }, ), @@ -11682,6 +11704,17 @@ Usage: deno repl [OPTIONS] [-- [ARGS]...]\n" recursive: false, }, ), + ( + svec!["--update", "--latest", "--interactive"], + OutdatedFlags { + filters: svec![], + kind: OutdatedKind::Update { + latest: true, + interactive: true, + }, + recursive: false, + }, + ), ]; for (input, expected) in cases { let mut args = svec!["deno", "outdated"]; diff --git a/cli/tools/registry/pm/deps.rs b/cli/tools/registry/pm/deps.rs index 621dd4693df8e3..849e72d3ec1754 100644 --- a/cli/tools/registry/pm/deps.rs +++ b/cli/tools/registry/pm/deps.rs @@ -194,6 +194,12 @@ pub struct Dep { pub alias: Option, } +impl Dep { + pub fn alias_or_name(&self) -> &str { + self.alias.as_deref().unwrap_or_else(|| &self.req.name) + } +} + fn import_map_entries( import_map: &ImportMap, ) -> impl Iterator)> { diff --git a/cli/tools/registry/pm/outdated.rs b/cli/tools/registry/pm/outdated.rs index 1afe7a503469be..146f654f39a198 100644 --- a/cli/tools/registry/pm/outdated.rs +++ b/cli/tools/registry/pm/outdated.rs @@ -1,5 +1,7 @@ // Copyright 2018-2025 the Deno authors. MIT license. +mod interactive; + use std::collections::HashSet; use std::sync::Arc; @@ -13,6 +15,7 @@ use deno_semver::VersionReq; use deno_terminal::colors; use super::deps::Dep; +use super::deps::DepId; use super::deps::DepManager; use super::deps::DepManagerArgs; use super::deps::PackageLatestVersion; @@ -240,8 +243,11 @@ pub async fn outdated( deps.resolve_versions().await?; match update_flags.kind { - crate::args::OutdatedKind::Update { latest } => { - update(deps, latest, &filter_set, flags).await?; + crate::args::OutdatedKind::Update { + latest, + interactive, + } => { + update(deps, latest, &filter_set, interactive, flags).await?; } crate::args::OutdatedKind::PrintOutdated { compatible } => { print_outdated(&mut deps, compatible)?; @@ -299,9 +305,10 @@ async fn update( mut deps: DepManager, update_to_latest: bool, filter_set: &filter::FilterSet, + interactive: bool, flags: Arc, ) -> Result<(), AnyError> { - let mut updated = Vec::new(); + let mut to_update = Vec::new(); for (dep_id, resolved, latest_versions) in deps .deps_with_resolved_latest_versions() @@ -320,19 +327,54 @@ async fn update( continue; }; - updated.push(( + to_update.push(( dep_id, format!("{}:{}", dep.kind.scheme(), dep.req.name), deps.resolved_version(dep.id).cloned(), new_version_req.clone(), )); + } - deps.update_dep(dep_id, new_version_req); + if interactive && !to_update.is_empty() { + let selected = interactive::select_interactive( + to_update + .iter() + .map( + |(dep_id, _, current_version, new_req): &( + DepId, + String, + Option, + VersionReq, + )| { + let dep = deps.get_dep(*dep_id); + interactive::PackageInfo { + id: *dep_id, + current_version: current_version + .as_ref() + .map(|nv| nv.version.clone()), + name: dep.alias_or_name().into(), + kind: dep.kind, + new_version: new_req.clone(), + } + }, + ) + .collect(), + )?; + if let Some(selected) = selected { + to_update.retain(|(id, _, _, _)| selected.contains(id)); + } else { + log::info!("Cancelled, not updating"); + return Ok(()); + } } - deps.commit_changes()?; + if !to_update.is_empty() { + for (dep_id, _, _, new_version_req) in &to_update { + deps.update_dep(*dep_id, new_version_req.clone()); + } + + deps.commit_changes()?; - if !updated.is_empty() { let factory = super::npm_install_after_modification( flags.clone(), Some(deps.jsr_fetch_resolver.clone()), @@ -352,7 +394,7 @@ async fn update( let mut deps = deps.reloaded_after_modification(args); deps.resolve_current_versions().await?; for (dep_id, package_name, maybe_current_version, new_version_req) in - updated + to_update { if let Some(nv) = deps.resolved_version(dep_id) { updated_to_versions.insert(( diff --git a/cli/tools/registry/pm/outdated/interactive.rs b/cli/tools/registry/pm/outdated/interactive.rs new file mode 100644 index 00000000000000..fb7bb0c69b89e3 --- /dev/null +++ b/cli/tools/registry/pm/outdated/interactive.rs @@ -0,0 +1,426 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::collections::HashMap; +use std::collections::HashSet; +use std::fmt::Write as _; +use std::io; + +use console_static_text::ConsoleSize; +use console_static_text::TextItem; +use crossterm::cursor; +use crossterm::event::KeyCode; +use crossterm::event::KeyEvent; +use crossterm::event::KeyEventKind; +use crossterm::event::KeyModifiers; +use crossterm::terminal; +use crossterm::ExecutableCommand; +use deno_core::anyhow; +use deno_semver::Version; +use deno_semver::VersionReq; +use deno_terminal::colors; +use unicode_width::UnicodeWidthStr; + +use crate::tools::registry::pm::deps::DepId; +use crate::tools::registry::pm::deps::DepKind; + +#[derive(Debug)] +pub struct PackageInfo { + pub id: DepId, + pub current_version: Option, + pub new_version: VersionReq, + pub name: String, + pub kind: DepKind, +} + +#[derive(Debug)] +struct FormattedPackageInfo { + dep_ids: Vec, + current_version_string: Option, + new_version_highlighted: String, + formatted_name: String, + formatted_name_len: usize, + name: String, +} + +#[derive(Debug)] +struct State { + packages: Vec, + currently_selected: usize, + checked: HashSet, + + name_width: usize, + current_width: usize, +} + +impl From for FormattedPackageInfo { + fn from(package: PackageInfo) -> Self { + let new_version_string = + package.new_version.version_text().trim_start_matches('^'); + + let new_version_highlighted = + if let (Some(current_version), Ok(new_version)) = ( + &package.current_version, + Version::parse_standard(new_version_string), + ) { + highlight_new_version(current_version, &new_version) + } else { + new_version_string.to_string() + }; + FormattedPackageInfo { + dep_ids: vec![package.id], + current_version_string: package + .current_version + .as_ref() + .map(|v| v.to_string()), + new_version_highlighted, + formatted_name: format!( + "{}{}", + colors::gray(format!("{}:", package.kind.scheme())), + package.name + ), + formatted_name_len: package.kind.scheme().len() + 1 + package.name.len(), + name: package.name, + } + } +} + +impl State { + fn new(packages: Vec) -> anyhow::Result { + let mut deduped_packages: HashMap< + (String, Option, VersionReq), + FormattedPackageInfo, + > = HashMap::with_capacity(packages.len()); + for package in packages { + match deduped_packages.entry(( + package.name.clone(), + package.current_version.clone(), + package.new_version.clone(), + )) { + std::collections::hash_map::Entry::Occupied(mut occupied_entry) => { + occupied_entry.get_mut().dep_ids.push(package.id) + } + std::collections::hash_map::Entry::Vacant(vacant_entry) => { + vacant_entry.insert(FormattedPackageInfo::from(package)); + } + } + } + + let mut packages: Vec<_> = deduped_packages.into_values().collect(); + packages.sort_by(|a, b| a.name.cmp(&b.name)); + let name_width = packages + .iter() + .map(|p| p.formatted_name_len) + .max() + .unwrap_or_default(); + let current_width = packages + .iter() + .map(|p| { + p.current_version_string + .as_ref() + .map(|s| s.len()) + .unwrap_or_default() + }) + .max() + .unwrap_or_default(); + + Ok(Self { + packages, + currently_selected: 0, + checked: HashSet::new(), + + name_width, + current_width, + }) + } + + fn instructions_line() -> &'static str { + "Select which packages to update ( to select, ↑/↓/j/k to navigate, a to select all, i to invert selection, enter to accept, to cancel)" + } + + fn render(&self) -> anyhow::Result> { + let mut items = Vec::with_capacity(self.packages.len() + 1); + + items.push(TextItem::new_owned(format!( + "{} {}", + colors::intense_blue("?"), + Self::instructions_line() + ))); + + for (i, package) in self.packages.iter().enumerate() { + let mut line = String::new(); + let f = &mut line; + + let checked = self.checked.contains(&i); + write!( + f, + "{} {} ", + if self.currently_selected == i { + colors::intense_blue("❯").to_string() + } else { + " ".to_string() + }, + if checked { "●" } else { "○" } + )?; + + let name_pad = + " ".repeat(self.name_width + 2 - package.formatted_name_len); + write!( + f, + "{formatted_name}{name_pad} {: {}", + package + .current_version_string + .as_deref() + .unwrap_or_default(), + &package.new_version_highlighted, + name_pad = name_pad, + formatted_name = package.formatted_name, + current_width = self.current_width + )?; + + items.push(TextItem::with_hanging_indent_owned(line, 1)); + } + + Ok(items) + } +} + +enum VersionDifference { + Major, + Minor, + Patch, + Prerelease, +} + +fn version_diff(a: &Version, b: &Version) -> VersionDifference { + if a.major != b.major { + VersionDifference::Major + } else if a.minor != b.minor { + VersionDifference::Minor + } else if a.patch != b.patch { + VersionDifference::Patch + } else { + VersionDifference::Prerelease + } +} + +fn highlight_new_version(current: &Version, new: &Version) -> String { + let diff = version_diff(current, new); + + let new_pre = if new.pre.is_empty() { + String::new() + } else { + let mut s = String::new(); + s.push('-'); + for p in &new.pre { + s.push_str(p); + } + s + }; + + match diff { + VersionDifference::Major => format!( + "{}.{}.{}{}", + colors::red_bold(new.major), + colors::red_bold(new.minor), + colors::red_bold(new.patch), + colors::red_bold(new_pre) + ), + VersionDifference::Minor => format!( + "{}.{}.{}{}", + new.major, + colors::yellow_bold(new.minor), + colors::yellow_bold(new.patch), + colors::yellow_bold(new_pre) + ), + VersionDifference::Patch => format!( + "{}.{}.{}{}", + new.major, + new.minor, + colors::green_bold(new.patch), + colors::green_bold(new_pre) + ), + VersionDifference::Prerelease => format!( + "{}.{}.{}{}", + new.major, + new.minor, + new.patch, + colors::red_bold(new_pre) + ), + } +} + +struct RawMode { + needs_disable: bool, +} + +impl RawMode { + fn enable() -> io::Result { + terminal::enable_raw_mode()?; + Ok(Self { + needs_disable: true, + }) + } + fn disable(mut self) -> io::Result<()> { + self.needs_disable = false; + terminal::disable_raw_mode() + } +} + +impl Drop for RawMode { + fn drop(&mut self) { + if self.needs_disable { + let _ = terminal::disable_raw_mode(); + } + } +} + +pub fn select_interactive( + packages: Vec, +) -> anyhow::Result>> { + let mut stderr = io::stderr(); + + let raw_mode = RawMode::enable()?; + let mut static_text = + console_static_text::ConsoleStaticText::new(move || { + if let Ok((cols, rows)) = terminal::size() { + ConsoleSize { + cols: Some(cols), + rows: Some(rows), + } + } else { + ConsoleSize { + cols: None, + rows: None, + } + } + }); + static_text.keep_cursor_zero_column(true); + + let (_, start_row) = cursor::position().unwrap_or_default(); + let (_, rows) = terminal::size()?; + if rows - start_row < (packages.len() + 2) as u16 { + let pad = ((packages.len() + 2) as u16) - (rows - start_row); + stderr.execute(terminal::ScrollUp(pad.min(rows)))?; + stderr.execute(cursor::MoveUp(pad.min(rows)))?; + } + + let mut state = State::new(packages)?; + stderr.execute(cursor::Hide)?; + + let instructions_width = format!("? {}", State::instructions_line()).width(); + + let mut do_it = false; + let mut scroll_offset = 0; + loop { + let mut items = state.render()?; + let size = static_text.console_size(); + let first_line_rows = size + .cols + .map(|cols| (instructions_width / cols as usize) + 1) + .unwrap_or(1); + if let Some(rows) = size.rows { + if items.len() + first_line_rows >= rows as usize { + let adj = if scroll_offset == 0 { + first_line_rows.saturating_sub(1) + } else { + 0 + }; + if state.currently_selected < scroll_offset { + scroll_offset = state.currently_selected; + } else if state.currently_selected + 1 + >= scroll_offset + (rows as usize).saturating_sub(adj) + { + scroll_offset = + (state.currently_selected + 1).saturating_sub(rows as usize) + 1; + } + let adj = if scroll_offset == 0 { + first_line_rows.saturating_sub(1) + } else { + 0 + }; + let mut new_items = Vec::with_capacity(rows as usize); + + scroll_offset = scroll_offset.clamp(0, items.len() - 1); + new_items.extend( + items.drain( + scroll_offset + ..(scroll_offset + (rows as usize).saturating_sub(adj)) + .min(items.len()), + ), + ); + items = new_items; + } + } + static_text.eprint_items(items.iter()); + + let event = crossterm::event::read()?; + #[allow(clippy::single_match)] + match event { + crossterm::event::Event::Key(KeyEvent { + kind: KeyEventKind::Press, + code, + modifiers, + .. + }) => match (code, modifiers) { + (KeyCode::Char('c'), KeyModifiers::CONTROL) => break, + (KeyCode::Up | KeyCode::Char('k'), KeyModifiers::NONE) => { + state.currently_selected = if state.currently_selected == 0 { + state.packages.len() - 1 + } else { + state.currently_selected - 1 + }; + } + (KeyCode::Down | KeyCode::Char('j'), KeyModifiers::NONE) => { + state.currently_selected = + (state.currently_selected + 1) % state.packages.len(); + } + (KeyCode::Char(' '), _) => { + if !state.checked.insert(state.currently_selected) { + state.checked.remove(&state.currently_selected); + } + } + (KeyCode::Char('a'), _) => { + if (0..state.packages.len()).all(|idx| state.checked.contains(&idx)) { + state.checked.clear(); + } else { + state.checked.extend(0..state.packages.len()); + } + } + (KeyCode::Char('i'), _) => { + for idx in 0..state.packages.len() { + if state.checked.contains(&idx) { + state.checked.remove(&idx); + } else { + state.checked.insert(idx); + } + } + } + (KeyCode::Enter, _) => { + do_it = true; + break; + } + _ => {} + }, + _ => {} + } + } + + static_text.eprint_clear(); + + crossterm::execute!(&mut stderr, cursor::Show)?; + + raw_mode.disable()?; + + if do_it { + Ok(Some( + state + .checked + .into_iter() + .flat_map(|idx| &state.packages[idx].dep_ids) + .copied() + .collect(), + )) + } else { + Ok(None) + } +} From 1eb41421438376aa25af2aa9ecba7844ab7d6a6f Mon Sep 17 00:00:00 2001 From: Hajime-san <41257923+Hajime-san@users.noreply.github.com> Date: Wed, 5 Feb 2025 21:10:11 +0900 Subject: [PATCH 04/17] feat(ext/canvas): enhance `createImageBitmap` specification compliance (#25517) --- Cargo.lock | 73 ++- ext/canvas/01_image.js | 282 ++++---- ext/canvas/Cargo.toml | 14 +- ext/canvas/README.md | 29 + ext/canvas/image_ops.rs | 609 ++++++++++++++++++ ext/canvas/lib.deno_canvas.d.ts | 2 +- ext/canvas/lib.rs | 161 +---- ext/canvas/op_create_image_bitmap.rs | 563 ++++++++++++++++ ext/web/01_mimesniff.js | 4 + runtime/js/99_main.js | 6 + .../image/1x1-2f-animated-has-def.png | Bin 0 -> 233 bytes .../testdata/image/1x1-3f-animated-no-def.png | Bin 0 -> 271 bytes tests/testdata/image/1x1-3f-animated.gif | Bin 0 -> 126 bytes ...3f-lossless-animated-semi-transparent.webp | Bin 0 -> 188 bytes tests/testdata/image/1x1-red16.png | Bin 0 -> 77 bytes tests/testdata/image/1x1-red32f.exr | Bin 0 -> 452 bytes tests/testdata/image/1x1-red8.bmp | Bin 0 -> 126 bytes tests/testdata/image/1x1-red8.gif | Bin 0 -> 49 bytes tests/testdata/image/1x1-red8.ico | Bin 0 -> 95 bytes tests/testdata/image/1x1-red8.jpeg | Bin 0 -> 631 bytes tests/testdata/image/1x1-red8.png | Bin 0 -> 73 bytes tests/testdata/image/1x1-red8.webp | Bin 0 -> 34 bytes tests/testdata/image/1x1-white.png | Bin 109 -> 0 bytes tests/testdata/image/2x2-transparent8.png | Bin 0 -> 86 bytes tests/testdata/image/squares_6.jpg | Bin 0 -> 1227 bytes tests/testdata/image/wide-gamut-pattern.png | Bin 0 -> 5320 bytes tests/unit/image_bitmap_test.ts | 365 ++++++++++- 27 files changed, 1798 insertions(+), 310 deletions(-) create mode 100644 ext/canvas/image_ops.rs create mode 100644 ext/canvas/op_create_image_bitmap.rs create mode 100644 tests/testdata/image/1x1-2f-animated-has-def.png create mode 100644 tests/testdata/image/1x1-3f-animated-no-def.png create mode 100644 tests/testdata/image/1x1-3f-animated.gif create mode 100644 tests/testdata/image/1x1-3f-lossless-animated-semi-transparent.webp create mode 100644 tests/testdata/image/1x1-red16.png create mode 100644 tests/testdata/image/1x1-red32f.exr create mode 100644 tests/testdata/image/1x1-red8.bmp create mode 100644 tests/testdata/image/1x1-red8.gif create mode 100644 tests/testdata/image/1x1-red8.ico create mode 100644 tests/testdata/image/1x1-red8.jpeg create mode 100644 tests/testdata/image/1x1-red8.png create mode 100644 tests/testdata/image/1x1-red8.webp delete mode 100644 tests/testdata/image/1x1-white.png create mode 100644 tests/testdata/image/2x2-transparent8.png create mode 100644 tests/testdata/image/squares_6.jpg create mode 100644 tests/testdata/image/wide-gamut-pattern.png diff --git a/Cargo.lock b/Cargo.lock index 9f46d90b42e171..e64d40feba7b02 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -688,6 +688,12 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" +[[package]] +name = "byteorder-lite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495" + [[package]] name = "bytes" version = "1.9.0" @@ -959,12 +965,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "color_quant" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" - [[package]] name = "colorchoice" version = "1.0.0" @@ -1565,10 +1565,14 @@ dependencies = [ name = "deno_canvas" version = "0.58.0" dependencies = [ + "bytemuck", "deno_core", "deno_error", + "deno_terminal 0.2.0", "deno_webgpu", "image", + "lcms2", + "num-traits", "serde", "thiserror 2.0.3", ] @@ -3134,6 +3138,12 @@ dependencies = [ "zeroize", ] +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + [[package]] name = "dyn-clone" version = "1.0.17" @@ -4537,15 +4547,16 @@ dependencies = [ [[package]] name = "image" -version = "0.24.9" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5690139d2f55868e080017335e4b94cb7414274c74f1669c84fb5feba2c9f69d" +checksum = "bc144d44a31d753b02ce64093d532f55ff8dc4ebf2ffb8a63c0dda691385acae" dependencies = [ "bytemuck", - "byteorder", - "color_quant", + "byteorder-lite", "num-traits", "png", + "zune-core", + "zune-jpeg", ] [[package]] @@ -4866,6 +4877,29 @@ dependencies = [ "spin", ] +[[package]] +name = "lcms2" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "680ec3fa42c36e0af9ca02f20a3742a82229c7f1ee0e6754294de46a80be6f74" +dependencies = [ + "bytemuck", + "foreign-types", + "lcms2-sys", +] + +[[package]] +name = "lcms2-sys" +version = "4.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "593265f9a3172180024fb62580ee31348f31be924b19416da174ebb7fb623d2e" +dependencies = [ + "cc", + "dunce", + "libc", + "pkg-config", +] + [[package]] name = "lazycell" version = "1.3.0" @@ -5481,9 +5515,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", "libm", @@ -9726,3 +9760,18 @@ dependencies = [ "cc", "pkg-config", ] + +[[package]] +name = "zune-core" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a" + +[[package]] +name = "zune-jpeg" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16099418600b4d8f028622f73ff6e3deaabdff330fb9a2a131dea781ee8b0768" +dependencies = [ + "zune-core", +] diff --git a/ext/canvas/01_image.js b/ext/canvas/01_image.js index 4b39c041b45cc3..c39c04ab1e693c 100644 --- a/ext/canvas/01_image.js +++ b/ext/canvas/01_image.js @@ -1,7 +1,7 @@ // Copyright 2018-2025 the Deno authors. MIT license. import { internals, primordials } from "ext:core/mod.js"; -import { op_image_decode_png, op_image_process } from "ext:core/ops"; +import { op_create_image_bitmap } from "ext:core/ops"; import * as webidl from "ext:deno_webidl/00_webidl.js"; import { DOMException } from "ext:deno_web/01_dom_exception.js"; import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; @@ -11,13 +11,11 @@ const { ObjectPrototypeIsPrototypeOf, Symbol, SymbolFor, - TypeError, TypedArrayPrototypeGetBuffer, Uint8Array, - MathCeil, - PromiseResolve, PromiseReject, RangeError, + ArrayPrototypeJoin, } = primordials; import { _data, @@ -164,6 +162,12 @@ function createImageBitmap( options = undefined, ) { const prefix = "Failed to execute 'createImageBitmap'"; + // Add the value when implementing to add support for ImageBitmapSource + const imageBitmapSources = [ + "Blob", + "ImageData", + "ImageBitmap", + ]; // Overload: createImageBitmap(image [, options ]) if (arguments.length < 3) { @@ -184,6 +188,7 @@ function createImageBitmap( "Argument 6", ); + // 1. if (sw === 0) { return PromiseReject(new RangeError("sw has to be greater than 0")); } @@ -193,6 +198,7 @@ function createImageBitmap( } } + // 2. if (options.resizeWidth === 0) { return PromiseReject( new DOMException( @@ -204,7 +210,7 @@ function createImageBitmap( if (options.resizeHeight === 0) { return PromiseReject( new DOMException( - "options.resizeWidth has to be greater than 0", + "options.resizeHeight has to be greater than 0", "InvalidStateError", ), ); @@ -212,139 +218,159 @@ function createImageBitmap( const imageBitmap = webidl.createBranded(ImageBitmap); - if (ObjectPrototypeIsPrototypeOf(ImageDataPrototype, image)) { - const processedImage = processImage( - image[_data], - image[_width], - image[_height], - sxOrOptions, - sy, - sw, - sh, - options, + // 3. + const isBlob = ObjectPrototypeIsPrototypeOf(BlobPrototype, image); + const isImageData = ObjectPrototypeIsPrototypeOf(ImageDataPrototype, image); + const isImageBitmap = ObjectPrototypeIsPrototypeOf( + ImageBitmapPrototype, + image, + ); + if (!isBlob && !isImageData && !isImageBitmap) { + return PromiseReject( + new DOMException( + `${prefix}: The provided value for 'image' is not of type '(${ + ArrayPrototypeJoin(imageBitmapSources, " or ") + })'`, + "InvalidStateError", + ), ); - imageBitmap[_bitmapData] = processedImage.data; - imageBitmap[_width] = processedImage.outputWidth; - imageBitmap[_height] = processedImage.outputHeight; - return PromiseResolve(imageBitmap); } - if (ObjectPrototypeIsPrototypeOf(BlobPrototype, image)) { - return (async () => { - const data = await image.arrayBuffer(); - const mimetype = sniffImage(image.type); - if (mimetype !== "image/png") { - throw new DOMException( - `Unsupported type '${image.type}'`, - "InvalidStateError", + + // 4. + return (async () => { + // + // For performance reasons, the arguments passed to op are represented as numbers that don't need to be serialized. + // + + let width = 0; + let height = 0; + // If the image doesn't have a MIME type, mark it as 0. + let mimeType = 0; + let imageBitmapSource, buf; + if (isBlob) { + imageBitmapSource = 0; + buf = new Uint8Array(await image.arrayBuffer()); + const mimeTypeString = sniffImage(image.type); + + if (mimeTypeString === "image/png") { + mimeType = 1; + } else if (mimeTypeString === "image/jpeg") { + mimeType = 2; + } else if (mimeTypeString === "image/gif") { + mimeType = 3; + // NOTE: Temporarily not supported due to build size concerns + // https://github.com/denoland/deno/pull/25517#issuecomment-2626044644 + return PromiseReject( + new DOMException( + "The MIME type of source image is not supported currently", + "InvalidStateError", + ), + ); + } else if (mimeTypeString === "image/bmp") { + mimeType = 4; + } else if (mimeTypeString === "image/x-icon") { + mimeType = 5; + } else if (mimeTypeString === "image/webp") { + mimeType = 6; + // NOTE: Temporarily not supported due to build size concerns + // https://github.com/denoland/deno/pull/25517#issuecomment-2626044644 + return PromiseReject( + new DOMException( + "The MIME type of source image is not supported currently", + "InvalidStateError", + ), + ); + } else if (mimeTypeString === "") { + return PromiseReject( + new DOMException( + `The MIME type of source image is not specified\n +hint: When you want to get a "Blob" from "fetch", make sure to go through a file server that returns the appropriate content-type response header, + and specify the URL to the file server like "await(await fetch('http://localhost:8000/sample.png').blob()". + Alternatively, if you are reading a local file using 'Deno.readFile' etc., + set the appropriate MIME type like "new Blob([await Deno.readFile('sample.png')], { type: 'image/png' })".\n`, + "InvalidStateError", + ), + ); + } else { + return PromiseReject( + new DOMException( + `The the MIME type ${mimeTypeString} of source image is not a supported format\n +info: The following MIME types are supported. +docs: https://mimesniff.spec.whatwg.org/#image-type-pattern-matching-algorithm\n`, + "InvalidStateError", + ), ); } - const { data: imageData, width, height } = op_image_decode_png( - new Uint8Array(data), - ); - const processedImage = processImage( - imageData, - width, - height, - sxOrOptions, - sy, - sw, - sh, - options, - ); - imageBitmap[_bitmapData] = processedImage.data; - imageBitmap[_width] = processedImage.outputWidth; - imageBitmap[_height] = processedImage.outputHeight; - return imageBitmap; - })(); - } else { - return PromiseReject(new TypeError("Invalid or unsupported image value")); - } -} + } else if (isImageData) { + width = image[_width]; + height = image[_height]; + imageBitmapSource = 1; + buf = new Uint8Array(TypedArrayPrototypeGetBuffer(image[_data])); + } else if (isImageBitmap) { + width = image[_width]; + height = image[_height]; + imageBitmapSource = 2; + buf = new Uint8Array(TypedArrayPrototypeGetBuffer(image[_bitmapData])); + } -function processImage(input, width, height, sx, sy, sw, sh, options) { - let sourceRectangle; - - if ( - sx !== undefined && sy !== undefined && sw !== undefined && sh !== undefined - ) { - sourceRectangle = [ - [sx, sy], - [sx + sw, sy], - [sx + sw, sy + sh], - [sx, sy + sh], - ]; - } else { - sourceRectangle = [ - [0, 0], - [width, 0], - [width, height], - [0, height], - ]; - } - const widthOfSourceRect = sourceRectangle[1][0] - sourceRectangle[0][0]; - const heightOfSourceRect = sourceRectangle[3][1] - sourceRectangle[0][1]; - - let outputWidth; - if (options.resizeWidth !== undefined) { - outputWidth = options.resizeWidth; - } else if (options.resizeHeight !== undefined) { - outputWidth = MathCeil( - (widthOfSourceRect * options.resizeHeight) / heightOfSourceRect, - ); - } else { - outputWidth = widthOfSourceRect; - } + // If those options are not provided, assign 0 to mean undefined(None). + const _sx = typeof sxOrOptions === "number" ? sxOrOptions : 0; + const _sy = sy ?? 0; + const _sw = sw ?? 0; + const _sh = sh ?? 0; - let outputHeight; - if (options.resizeHeight !== undefined) { - outputHeight = options.resizeHeight; - } else if (options.resizeWidth !== undefined) { - outputHeight = MathCeil( - (heightOfSourceRect * options.resizeWidth) / widthOfSourceRect, - ); - } else { - outputHeight = heightOfSourceRect; - } + // If those options are not provided, assign 0 to mean undefined(None). + const resizeWidth = options.resizeWidth ?? 0; + const resizeHeight = options.resizeHeight ?? 0; - if (options.colorSpaceConversion === "none") { - throw new TypeError( - "Cannot create image: invalid colorSpaceConversion option, 'none' is not supported", - ); - } + // If the imageOrientation option is set "from-image" or not set, assign 0. + const imageOrientation = options.imageOrientation === "flipY" ? 1 : 0; - /* - * The cropping works differently than the spec specifies: - * The spec states to create an infinite surface and place the top-left corner - * of the image a 0,0 and crop based on sourceRectangle. - * - * We instead create a surface the size of sourceRectangle, and position - * the image at the correct location, which is the inverse of the x & y of - * sourceRectangle's top-left corner. - */ - const data = op_image_process( - new Uint8Array(TypedArrayPrototypeGetBuffer(input)), - { + // If the premultiplyAlpha option is "default" or not set, assign 0. + let premultiplyAlpha = 0; + if (options.premultiplyAlpha === "premultiply") { + premultiplyAlpha = 1; + } else if (options.premultiplyAlpha === "none") { + premultiplyAlpha = 2; + } + + // If the colorSpaceConversion option is "default" or not set, assign 0. + const colorSpaceConversion = options.colorSpaceConversion === "none" + ? 1 + : 0; + + // If the resizeQuality option is "low" or not set, assign 0. + let resizeQuality = 0; + if (options.resizeQuality === "pixelated") { + resizeQuality = 1; + } else if (options.resizeQuality === "medium") { + resizeQuality = 2; + } else if (options.resizeQuality === "high") { + resizeQuality = 3; + } + + const processedImage = op_create_image_bitmap( + buf, width, height, - surfaceWidth: widthOfSourceRect, - surfaceHeight: heightOfSourceRect, - inputX: sourceRectangle[0][0] * -1, // input_x - inputY: sourceRectangle[0][1] * -1, // input_y - outputWidth, - outputHeight, - resizeQuality: options.resizeQuality, - flipY: options.imageOrientation === "flipY", - premultiply: options.premultiplyAlpha === "default" - ? null - : (options.premultiplyAlpha === "premultiply"), - }, - ); - - return { - data, - outputWidth, - outputHeight, - }; + _sx, + _sy, + _sw, + _sh, + imageOrientation, + premultiplyAlpha, + colorSpaceConversion, + resizeWidth, + resizeHeight, + resizeQuality, + imageBitmapSource, + mimeType, + ); + imageBitmap[_bitmapData] = processedImage[0]; + imageBitmap[_width] = processedImage[1]; + imageBitmap[_height] = processedImage[2]; + return imageBitmap; + })(); } function getBitmapData(imageBitmap) { diff --git a/ext/canvas/Cargo.toml b/ext/canvas/Cargo.toml index 87f809c6b262ba..d21797314ebf0e 100644 --- a/ext/canvas/Cargo.toml +++ b/ext/canvas/Cargo.toml @@ -14,9 +14,21 @@ description = "OffscreenCanvas implementation for Deno" path = "lib.rs" [dependencies] +bytemuck = "1.17.1" deno_core.workspace = true deno_error.workspace = true +deno_terminal.workspace = true deno_webgpu.workspace = true -image = { version = "0.24.7", default-features = false, features = ["png"] } +image = { version = "0.25.4", default-features = false, features = ["png", "jpeg", "bmp", "ico" # NOTE: Temporarily not supported due to build size concerns + # https://github.com/denoland/deno/pull/25517#issuecomment-2626044644 + # "webp", "gif" +] } +# NOTE: The qcms is a color space conversion crate which parses ICC profiles that used in Gecko, +# however it supports only 8-bit color depth currently. +# https://searchfox.org/mozilla-central/rev/f09e3f9603a08b5b51bf504846091579bc2ff531/gfx/qcms/src/transform.rs#130-137 +# It seems to be failed to build for aarch64-unknown-linux-gnu with pkg-config. +# https://github.com/kornelski/rust-lcms2-sys/blob/b8e9c3efcf266b88600318fb519c073b9ebb61b7/README.md#L26 +lcms2 = { version = "6.1.0", features = ["static"] } +num-traits = { version = "0.2.19" } serde = { workspace = true, features = ["derive"] } thiserror.workspace = true diff --git a/ext/canvas/README.md b/ext/canvas/README.md index cf013677e786b1..0303d1f739ad22 100644 --- a/ext/canvas/README.md +++ b/ext/canvas/README.md @@ -1,3 +1,32 @@ # deno_canvas Extension that implements various OffscreenCanvas related APIs. + +## Image processing architecture in Rust + +```mermaid +flowchart LR + Input["input binary
( &[u8] )"] + II["intermediate image
( DynamicImage )"] + Ops["processing pixel
( ImageBuffer< P, S > )"] + Output["output binary
( Box<[u8]> )"] + Input --> II + II --> Ops --> II + II --> Output +``` + +The architecture of image processing in Rust is rely on the structure of +[image](https://github.com/image-rs/image) crate.\ +If the input is a image of binary, it convert to an intermediate image +(`DynamicImage` in `image`) with using a decoder corresponding to its image +formats.\ +After converting to an intermediate image, it can process various way for +example, to use the pixel processong operation +[imageops](https://github.com/image-rs/image?tab=readme-ov-file#image-processing-functions) +supplied by `image`.\ +On the other hand, there can also to implement your own pixel processong +operation to refer to +[the implementation of imageops as here](https://github.com/image-rs/image/blob/4afe9572b5c867cf4d07cd88107e8c49354de9f3/src/imageops/colorops.rs#L156-L182) +or [image_ops.rs module](./image_ops.rs).\ +You can treat any bit depth that supported by `image` with generics in the +processing pixel layer. diff --git a/ext/canvas/image_ops.rs b/ext/canvas/image_ops.rs new file mode 100644 index 00000000000000..80fe2ef81ec68b --- /dev/null +++ b/ext/canvas/image_ops.rs @@ -0,0 +1,609 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use bytemuck::cast_slice; +use bytemuck::cast_slice_mut; +use image::ColorType; +use image::DynamicImage; +use image::GenericImageView; +use image::ImageBuffer; +use image::Luma; +use image::LumaA; +use image::Pixel; +use image::Primitive; +use image::Rgb; +use image::Rgba; +use lcms2::PixelFormat; +use lcms2::Pod; +use lcms2::Profile; +use lcms2::Transform; +use num_traits::NumCast; +use num_traits::SaturatingMul; + +use crate::CanvasError; + +pub(crate) trait PremultiplyAlpha { + fn premultiply_alpha(&self) -> Self; +} + +impl PremultiplyAlpha for LumaA { + fn premultiply_alpha(&self) -> Self { + let max_t = T::DEFAULT_MAX_VALUE; + + let mut pixel = [self.0[0], self.0[1]]; + let alpha_index = pixel.len() - 1; + let alpha = pixel[alpha_index]; + let normalized_alpha = alpha.to_f32().unwrap() / max_t.to_f32().unwrap(); + + if normalized_alpha == 0.0 { + return LumaA([pixel[0], pixel[alpha_index]]); + } + + for rgb in pixel.iter_mut().take(alpha_index) { + *rgb = NumCast::from((rgb.to_f32().unwrap() * normalized_alpha).round()) + .unwrap() + } + + LumaA([pixel[0], pixel[alpha_index]]) + } +} + +impl PremultiplyAlpha for Rgba { + fn premultiply_alpha(&self) -> Self { + let max_t = T::DEFAULT_MAX_VALUE; + + let mut pixel = [self.0[0], self.0[1], self.0[2], self.0[3]]; + let alpha_index = pixel.len() - 1; + let alpha = pixel[alpha_index]; + let normalized_alpha = alpha.to_f32().unwrap() / max_t.to_f32().unwrap(); + + if normalized_alpha == 0.0 { + return Rgba([pixel[0], pixel[1], pixel[2], pixel[alpha_index]]); + } + + for rgb in pixel.iter_mut().take(alpha_index) { + *rgb = NumCast::from((rgb.to_f32().unwrap() * normalized_alpha).round()) + .unwrap() + } + + Rgba([pixel[0], pixel[1], pixel[2], pixel[alpha_index]]) + } +} + +fn process_premultiply_alpha(image: &I) -> ImageBuffer> +where + I: GenericImageView, + P: Pixel + PremultiplyAlpha + 'static, + S: Primitive + 'static, +{ + let (width, height) = image.dimensions(); + let mut out = ImageBuffer::new(width, height); + + for (x, y, pixel) in image.pixels() { + let pixel = pixel.premultiply_alpha(); + + out.put_pixel(x, y, pixel); + } + + out +} + +/// Premultiply the alpha channel of the image. +pub(crate) fn premultiply_alpha( + image: DynamicImage, +) -> Result { + match image { + DynamicImage::ImageLumaA8(image) => { + Ok(process_premultiply_alpha(&image).into()) + } + DynamicImage::ImageLumaA16(image) => { + Ok(process_premultiply_alpha(&image).into()) + } + DynamicImage::ImageRgba8(image) => { + Ok(process_premultiply_alpha(&image).into()) + } + DynamicImage::ImageRgba16(image) => { + Ok(process_premultiply_alpha(&image).into()) + } + DynamicImage::ImageRgb32F(_) => { + Err(CanvasError::UnsupportedColorType(image.color())) + } + DynamicImage::ImageRgba32F(_) => { + Err(CanvasError::UnsupportedColorType(image.color())) + } + // If the image does not have an alpha channel, return the image as is. + _ => Ok(image), + } +} + +pub(crate) trait UnpremultiplyAlpha { + /// To determine if the image is premultiplied alpha, + /// checking premultiplied RGBA value is one where any of the R/G/B channel values exceeds the alpha channel value.\ + /// https://www.w3.org/TR/webgpu/#color-spaces + fn is_premultiplied_alpha(&self) -> bool; + fn unpremultiply_alpha(&self) -> Self; +} + +impl UnpremultiplyAlpha for Rgba { + fn is_premultiplied_alpha(&self) -> bool { + let max_t = T::DEFAULT_MAX_VALUE; + + let pixel = [self.0[0], self.0[1], self.0[2]]; + let alpha_index = self.0.len() - 1; + let alpha = self.0[alpha_index]; + + match pixel.iter().max() { + Some(rgb_max) => rgb_max < &max_t.saturating_mul(&alpha), + // usually doesn't reach here + None => false, + } + } + + fn unpremultiply_alpha(&self) -> Self { + let max_t = T::DEFAULT_MAX_VALUE; + + let mut pixel = [self.0[0], self.0[1], self.0[2], self.0[3]]; + let alpha_index = pixel.len() - 1; + let alpha = pixel[alpha_index]; + + for rgb in pixel.iter_mut().take(alpha_index) { + *rgb = NumCast::from( + (rgb.to_f32().unwrap() + / (alpha.to_f32().unwrap() / max_t.to_f32().unwrap())) + .round(), + ) + .unwrap(); + } + + Rgba([pixel[0], pixel[1], pixel[2], pixel[alpha_index]]) + } +} + +impl UnpremultiplyAlpha for LumaA { + fn is_premultiplied_alpha(&self) -> bool { + let max_t = T::DEFAULT_MAX_VALUE; + + let pixel = [self.0[0]]; + let alpha_index = self.0.len() - 1; + let alpha = self.0[alpha_index]; + + pixel[0] < max_t.saturating_mul(&alpha) + } + + fn unpremultiply_alpha(&self) -> Self { + let max_t = T::DEFAULT_MAX_VALUE; + + let mut pixel = [self.0[0], self.0[1]]; + let alpha_index = pixel.len() - 1; + let alpha = pixel[alpha_index]; + + for rgb in pixel.iter_mut().take(alpha_index) { + *rgb = NumCast::from( + (rgb.to_f32().unwrap() + / (alpha.to_f32().unwrap() / max_t.to_f32().unwrap())) + .round(), + ) + .unwrap(); + } + + LumaA([pixel[0], pixel[alpha_index]]) + } +} + +fn is_premultiplied_alpha(image: &I) -> bool +where + I: GenericImageView, + P: Pixel + UnpremultiplyAlpha + 'static, + S: Primitive + 'static, +{ + image + .pixels() + .any(|(_, _, pixel)| pixel.is_premultiplied_alpha()) +} + +fn process_unpremultiply_alpha(image: &I) -> ImageBuffer> +where + I: GenericImageView, + P: Pixel + UnpremultiplyAlpha + 'static, + S: Primitive + 'static, +{ + let (width, height) = image.dimensions(); + let mut out = ImageBuffer::new(width, height); + + for (x, y, pixel) in image.pixels() { + let pixel = pixel.unpremultiply_alpha(); + + out.put_pixel(x, y, pixel); + } + + out +} + +/// Invert the premultiplied alpha channel of the image. +pub(crate) fn unpremultiply_alpha( + image: DynamicImage, +) -> Result { + match image { + DynamicImage::ImageLumaA8(image) => Ok(if is_premultiplied_alpha(&image) { + process_unpremultiply_alpha(&image).into() + } else { + image.into() + }), + DynamicImage::ImageLumaA16(image) => { + Ok(if is_premultiplied_alpha(&image) { + process_unpremultiply_alpha(&image).into() + } else { + image.into() + }) + } + DynamicImage::ImageRgba8(image) => Ok(if is_premultiplied_alpha(&image) { + process_unpremultiply_alpha(&image).into() + } else { + image.into() + }), + DynamicImage::ImageRgba16(image) => Ok(if is_premultiplied_alpha(&image) { + process_unpremultiply_alpha(&image).into() + } else { + image.into() + }), + DynamicImage::ImageRgb32F(_) => { + Err(CanvasError::UnsupportedColorType(image.color())) + } + DynamicImage::ImageRgba32F(_) => { + Err(CanvasError::UnsupportedColorType(image.color())) + } + // If the image does not have an alpha channel, return the image as is. + _ => Ok(image), + } +} + +pub(crate) trait SliceToPixel { + fn slice_to_pixel(pixel: &[u8]) -> Self; +} + +impl SliceToPixel for Luma { + fn slice_to_pixel(pixel: &[u8]) -> Self { + let pixel: &[T] = cast_slice(pixel); + let pixel = [pixel[0]]; + + Luma(pixel) + } +} + +impl SliceToPixel for LumaA { + fn slice_to_pixel(pixel: &[u8]) -> Self { + let pixel: &[T] = cast_slice(pixel); + let pixel = [pixel[0], pixel[1]]; + + LumaA(pixel) + } +} + +impl SliceToPixel for Rgb { + fn slice_to_pixel(pixel: &[u8]) -> Self { + let pixel: &[T] = cast_slice(pixel); + let pixel = [pixel[0], pixel[1], pixel[2]]; + + Rgb(pixel) + } +} + +impl SliceToPixel for Rgba { + fn slice_to_pixel(pixel: &[u8]) -> Self { + let pixel: &[T] = cast_slice(pixel); + let pixel = [pixel[0], pixel[1], pixel[2], pixel[3]]; + + Rgba(pixel) + } +} + +pub(crate) trait TransformColorProfile { + fn transform_color_profile( + &mut self, + transformer: &Transform, + ) -> P + where + P: Pixel + SliceToPixel + 'static, + S: Primitive + 'static; +} + +macro_rules! impl_transform_color_profile { + ($type:ty) => { + impl TransformColorProfile for $type { + fn transform_color_profile( + &mut self, + transformer: &Transform, + ) -> P + where + P: Pixel + SliceToPixel + 'static, + S: Primitive + 'static, + { + let mut pixel = cast_slice_mut(self.0.as_mut_slice()); + transformer.transform_in_place(&mut pixel); + + P::slice_to_pixel(&pixel) + } + } + }; +} + +impl_transform_color_profile!(Luma); +impl_transform_color_profile!(Luma); +impl_transform_color_profile!(LumaA); +impl_transform_color_profile!(LumaA); +impl_transform_color_profile!(Rgb); +impl_transform_color_profile!(Rgb); +impl_transform_color_profile!(Rgba); +impl_transform_color_profile!(Rgba); + +fn process_icc_profile_conversion( + image: &I, + color: ColorType, + input_icc_profile: Profile, + output_icc_profile: Profile, +) -> Result>, CanvasError> +where + I: GenericImageView, + P: Pixel + SliceToPixel + TransformColorProfile + 'static, + S: Primitive + 'static, +{ + let (width, height) = image.dimensions(); + let mut out = ImageBuffer::new(width, height); + let pixel_format = match color { + ColorType::L8 => Ok(PixelFormat::GRAY_8), + ColorType::L16 => Ok(PixelFormat::GRAY_16), + ColorType::La8 => Ok(PixelFormat::GRAYA_8), + ColorType::La16 => Ok(PixelFormat::GRAYA_16), + ColorType::Rgb8 => Ok(PixelFormat::RGB_8), + ColorType::Rgb16 => Ok(PixelFormat::RGB_16), + ColorType::Rgba8 => Ok(PixelFormat::RGBA_8), + ColorType::Rgba16 => Ok(PixelFormat::RGBA_16), + _ => Err(CanvasError::UnsupportedColorType(color)), + }?; + let transformer = Transform::new( + &input_icc_profile, + pixel_format, + &output_icc_profile, + pixel_format, + output_icc_profile.header_rendering_intent(), + ) + .map_err(CanvasError::Lcms)?; + + for (x, y, mut pixel) in image.pixels() { + let pixel = pixel.transform_color_profile(&transformer); + + out.put_pixel(x, y, pixel); + } + + Ok(out) +} + +/// Convert the color space of the image from the ICC profile to sRGB. +pub(crate) fn to_srgb_from_icc_profile( + image: DynamicImage, + icc_profile: Option>, +) -> Result { + match icc_profile { + // If there is no color profile information, return the image as is. + None => Ok(image), + Some(icc_profile) => match Profile::new_icc(&icc_profile) { + // If the color profile information is invalid, return the image as is. + Err(_) => Ok(image), + Ok(icc_profile) => { + let srgb_icc_profile = Profile::new_srgb(); + let color = image.color(); + match image { + DynamicImage::ImageLuma8(image) => Ok( + process_icc_profile_conversion( + &image, + color, + icc_profile, + srgb_icc_profile, + )? + .into(), + ), + DynamicImage::ImageLuma16(image) => Ok( + process_icc_profile_conversion( + &image, + color, + icc_profile, + srgb_icc_profile, + )? + .into(), + ), + DynamicImage::ImageLumaA8(image) => Ok( + process_icc_profile_conversion( + &image, + color, + icc_profile, + srgb_icc_profile, + )? + .into(), + ), + DynamicImage::ImageLumaA16(image) => Ok( + process_icc_profile_conversion( + &image, + color, + icc_profile, + srgb_icc_profile, + )? + .into(), + ), + DynamicImage::ImageRgb8(image) => Ok( + process_icc_profile_conversion( + &image, + color, + icc_profile, + srgb_icc_profile, + )? + .into(), + ), + DynamicImage::ImageRgb16(image) => Ok( + process_icc_profile_conversion( + &image, + color, + icc_profile, + srgb_icc_profile, + )? + .into(), + ), + DynamicImage::ImageRgba8(image) => Ok( + process_icc_profile_conversion( + &image, + color, + icc_profile, + srgb_icc_profile, + )? + .into(), + ), + DynamicImage::ImageRgba16(image) => Ok( + process_icc_profile_conversion( + &image, + color, + icc_profile, + srgb_icc_profile, + )? + .into(), + ), + DynamicImage::ImageRgb32F(_) => { + Err(CanvasError::UnsupportedColorType(image.color())) + } + DynamicImage::ImageRgba32F(_) => { + Err(CanvasError::UnsupportedColorType(image.color())) + } + _ => Err(CanvasError::UnsupportedColorType(image.color())), + } + } + }, + } +} + +/// Create an image buffer from raw bytes. +fn process_image_buffer_from_raw_bytes( + width: u32, + height: u32, + buffer: &[u8], + bytes_per_pixel: usize, +) -> ImageBuffer> +where + P: Pixel + SliceToPixel + 'static, + S: Primitive + 'static, +{ + let mut out = ImageBuffer::new(width, height); + for (index, buffer) in buffer.chunks_exact(bytes_per_pixel).enumerate() { + let pixel = P::slice_to_pixel(buffer); + + out.put_pixel(index as u32, index as u32, pixel); + } + + out +} + +pub(crate) fn create_image_from_raw_bytes( + width: u32, + height: u32, + buffer: &[u8], +) -> Result { + let total_pixels = (width * height) as usize; + // avoid to divide by zero + let bytes_per_pixel = buffer + .len() + .checked_div(total_pixels) + .ok_or(CanvasError::InvalidSizeZero(width, height))?; + // convert from a bytes per pixel to the color type of the image + // https://github.com/image-rs/image/blob/2c986d353333d2604f0c3f1fcef262cc763c0001/src/color.rs#L38-L49 + match bytes_per_pixel { + 1 => Ok(DynamicImage::ImageLuma8( + process_image_buffer_from_raw_bytes( + width, + height, + buffer, + bytes_per_pixel, + ), + )), + 2 => Ok( + // NOTE: ImageLumaA8 is also the same bytes per pixel. + DynamicImage::ImageLuma16(process_image_buffer_from_raw_bytes( + width, + height, + buffer, + bytes_per_pixel, + )), + ), + 3 => Ok(DynamicImage::ImageRgb8( + process_image_buffer_from_raw_bytes( + width, + height, + buffer, + bytes_per_pixel, + ), + )), + 4 => Ok( + // NOTE: ImageLumaA16 is also the same bytes per pixel. + DynamicImage::ImageRgba8(process_image_buffer_from_raw_bytes( + width, + height, + buffer, + bytes_per_pixel, + )), + ), + 6 => Ok(DynamicImage::ImageRgb16( + process_image_buffer_from_raw_bytes( + width, + height, + buffer, + bytes_per_pixel, + ), + )), + 8 => Ok(DynamicImage::ImageRgba16( + process_image_buffer_from_raw_bytes( + width, + height, + buffer, + bytes_per_pixel, + ), + )), + 12 => Err(CanvasError::UnsupportedColorType(ColorType::Rgb32F)), + 16 => Err(CanvasError::UnsupportedColorType(ColorType::Rgba32F)), + _ => Err(CanvasError::UnsupportedColorType(ColorType::L8)), + } +} + +#[cfg(test)] +mod tests { + use image::Rgba; + + use super::*; + + #[test] + fn test_premultiply_alpha() { + let rgba = Rgba::([255, 128, 0, 128]); + let rgba = rgba.premultiply_alpha(); + assert_eq!(rgba, Rgba::([128, 64, 0, 128])); + + let rgba = Rgba::([255, 255, 255, 255]); + let rgba = rgba.premultiply_alpha(); + assert_eq!(rgba, Rgba::([255, 255, 255, 255])); + } + + #[test] + fn test_unpremultiply_alpha() { + let rgba = Rgba::([127, 0, 0, 127]); + let rgba = rgba.unpremultiply_alpha(); + assert_eq!(rgba, Rgba::([255, 0, 0, 127])); + } + + #[test] + fn test_process_image_buffer_from_raw_bytes() { + let buffer = &[255, 255, 0, 0, 0, 0, 255, 255]; + let color = ColorType::Rgba16; + let bytes_per_pixel = color.bytes_per_pixel() as usize; + let image = DynamicImage::ImageRgba16(process_image_buffer_from_raw_bytes( + 1, + 1, + buffer, + bytes_per_pixel, + )) + .to_rgba16(); + assert_eq!(image.get_pixel(0, 0), &Rgba::([65535, 0, 0, 65535])); + } +} diff --git a/ext/canvas/lib.deno_canvas.d.ts b/ext/canvas/lib.deno_canvas.d.ts index 84d3cbdd42a449..92c0f7fb9620f1 100644 --- a/ext/canvas/lib.deno_canvas.d.ts +++ b/ext/canvas/lib.deno_canvas.d.ts @@ -42,7 +42,7 @@ type ResizeQuality = "high" | "low" | "medium" | "pixelated"; * used to create an `ImageBitmap`. * * @category Canvas */ -type ImageBitmapSource = Blob | ImageData; +type ImageBitmapSource = Blob | ImageData | ImageBitmap; /** * The options of {@linkcode createImageBitmap}. diff --git a/ext/canvas/lib.rs b/ext/canvas/lib.rs index 91b4e44afe6c80..83aee3cb8102ed 100644 --- a/ext/canvas/lib.rs +++ b/ext/canvas/lib.rs @@ -2,158 +2,49 @@ use std::path::PathBuf; -use deno_core::op2; -use deno_core::ToJsBuffer; -use image::imageops::FilterType; +mod image_ops; +mod op_create_image_bitmap; use image::ColorType; -use image::ImageDecoder; -use image::Pixel; -use image::RgbaImage; -use serde::Deserialize; -use serde::Serialize; +use op_create_image_bitmap::op_create_image_bitmap; #[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum CanvasError { + /// Image formats that is 32-bit depth are not supported currently due to the following reasons: + /// - e.g. OpenEXR, it's not covered by the spec. + /// - JPEG XL supported by WebKit, but it cannot be called a standard today. + /// https://github.com/whatwg/mimesniff/issues/143 + /// #[class(type)] - #[error("Color type '{0:?}' not supported")] + #[error("Unsupported color type and bit depth: '{0:?}'")] UnsupportedColorType(ColorType), + #[class("DOMExceptionInvalidStateError")] + #[error("Cannot decode image '{0}'")] + InvalidImage(image::ImageError), + #[class("DOMExceptionInvalidStateError")] + #[error("The chunk data is not big enough with the specified width: {0} and height: {1}")] + NotBigEnoughChunk(u32, u32), + #[class("DOMExceptionInvalidStateError")] + #[error("The width: {0} or height: {1} could not be zero")] + InvalidSizeZero(u32, u32), + #[class(generic)] + #[error(transparent)] + Lcms(#[from] lcms2::Error), #[class(generic)] #[error(transparent)] Image(#[from] image::ImageError), } -#[derive(Debug, Deserialize)] -#[serde(rename_all = "snake_case")] -enum ImageResizeQuality { - Pixelated, - Low, - Medium, - High, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -struct ImageProcessArgs { - width: u32, - height: u32, - surface_width: u32, - surface_height: u32, - input_x: i64, - input_y: i64, - output_width: u32, - output_height: u32, - resize_quality: ImageResizeQuality, - flip_y: bool, - premultiply: Option, -} - -#[op2] -#[serde] -fn op_image_process( - #[buffer] buf: &[u8], - #[serde] args: ImageProcessArgs, -) -> ToJsBuffer { - let view = - RgbaImage::from_vec(args.width, args.height, buf.to_vec()).unwrap(); - - let surface = if !(args.width == args.surface_width - && args.height == args.surface_height - && args.input_x == 0 - && args.input_y == 0) - { - let mut surface = RgbaImage::new(args.surface_width, args.surface_height); - - image::imageops::overlay(&mut surface, &view, args.input_x, args.input_y); - - surface - } else { - view - }; - - let filter_type = match args.resize_quality { - ImageResizeQuality::Pixelated => FilterType::Nearest, - ImageResizeQuality::Low => FilterType::Triangle, - ImageResizeQuality::Medium => FilterType::CatmullRom, - ImageResizeQuality::High => FilterType::Lanczos3, - }; - - let mut image_out = image::imageops::resize( - &surface, - args.output_width, - args.output_height, - filter_type, - ); - - if args.flip_y { - image::imageops::flip_vertical_in_place(&mut image_out); +impl CanvasError { + /// Convert an [`image::ImageError`] to an [`CanvasError::InvalidImage`]. + fn image_error_to_invalid_image(error: image::ImageError) -> Self { + CanvasError::InvalidImage(error) } - - // ignore 9. - - if let Some(premultiply) = args.premultiply { - let is_not_premultiplied = image_out.pixels().any(|pixel| { - (pixel.0[0].max(pixel.0[1]).max(pixel.0[2])) > (255 * pixel.0[3]) - }); - - if premultiply { - if is_not_premultiplied { - for pixel in image_out.pixels_mut() { - let alpha = pixel.0[3]; - pixel.apply_without_alpha(|channel| { - (channel as f32 * (alpha as f32 / 255.0)) as u8 - }) - } - } - } else if !is_not_premultiplied { - for pixel in image_out.pixels_mut() { - let alpha = pixel.0[3]; - pixel.apply_without_alpha(|channel| { - (channel as f32 / (alpha as f32 / 255.0)) as u8 - }) - } - } - } - - image_out.to_vec().into() -} - -#[derive(Debug, Serialize)] -struct DecodedPng { - data: ToJsBuffer, - width: u32, - height: u32, -} - -#[op2] -#[serde] -fn op_image_decode_png( - #[buffer] buf: &[u8], -) -> Result { - let png = image::codecs::png::PngDecoder::new(buf)?; - - let (width, height) = png.dimensions(); - - // TODO(@crowlKats): maybe use DynamicImage https://docs.rs/image/0.24.7/image/enum.DynamicImage.html ? - if png.color_type() != ColorType::Rgba8 { - return Err(CanvasError::UnsupportedColorType(png.color_type())); - } - - // read_image will assert that the buffer is the correct size, so we need to fill it with zeros - let mut png_data = vec![0_u8; png.total_bytes() as usize]; - - png.read_image(&mut png_data)?; - - Ok(DecodedPng { - data: png_data.into(), - width, - height, - }) } deno_core::extension!( deno_canvas, deps = [deno_webidl, deno_web, deno_webgpu], - ops = [op_image_process, op_image_decode_png], + ops = [op_create_image_bitmap], lazy_loaded_esm = ["01_image.js"], ); diff --git a/ext/canvas/op_create_image_bitmap.rs b/ext/canvas/op_create_image_bitmap.rs new file mode 100644 index 00000000000000..cc184dd1f702f7 --- /dev/null +++ b/ext/canvas/op_create_image_bitmap.rs @@ -0,0 +1,563 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::io::BufReader; +use std::io::Cursor; + +use deno_core::op2; +use deno_core::JsBuffer; +use deno_core::ToJsBuffer; +use image::codecs::bmp::BmpDecoder; +// use image::codecs::gif::GifDecoder; +use image::codecs::ico::IcoDecoder; +use image::codecs::jpeg::JpegDecoder; +use image::codecs::png::PngDecoder; +// use image::codecs::webp::WebPDecoder; +use image::imageops::overlay; +use image::imageops::FilterType; +use image::metadata::Orientation; +use image::DynamicImage; +use image::ImageDecoder; +use image::RgbaImage; + +use crate::image_ops::create_image_from_raw_bytes; +use crate::image_ops::premultiply_alpha as process_premultiply_alpha; +use crate::image_ops::to_srgb_from_icc_profile; +use crate::image_ops::unpremultiply_alpha; +use crate::CanvasError; + +#[derive(Debug, PartialEq)] +enum ImageBitmapSource { + Blob, + ImageData, + ImageBitmap, +} + +#[derive(Debug, PartialEq)] +enum ImageOrientation { + FlipY, + FromImage, +} + +#[derive(Debug, PartialEq)] +enum PremultiplyAlpha { + Default, + Premultiply, + None, +} + +#[derive(Debug, PartialEq)] +enum ColorSpaceConversion { + Default, + None, +} + +#[derive(Debug, PartialEq)] +enum ResizeQuality { + Pixelated, + Low, + Medium, + High, +} + +#[derive(Debug, PartialEq)] +enum MimeType { + NoMatch, + Png, + Jpeg, + Gif, + Bmp, + Ico, + Webp, +} + +type DecodeBitmapDataReturn = + (DynamicImage, u32, u32, Option, Option>); + +fn decode_bitmap_data( + buf: &[u8], + width: u32, + height: u32, + image_bitmap_source: &ImageBitmapSource, + mime_type: MimeType, +) -> Result { + let (image, width, height, orientation, icc_profile) = + match image_bitmap_source { + ImageBitmapSource::Blob => { + // + // About the animated image + // > Blob .4 + // > ... If this is an animated image, imageBitmap's bitmap data must only be taken from + // > the default image of the animation (the one that the format defines is to be used when animation is + // > not supported or is disabled), or, if there is no such image, the first frame of the animation. + // https://html.spec.whatwg.org/multipage/imagebitmap-and-animations.html + // + // see also browser implementations: (The implementation of Gecko and WebKit is hard to read.) + // https://source.chromium.org/chromium/chromium/src/+/bdbc054a6cabbef991904b5df9066259505cc686:third_party/blink/renderer/platform/image-decoders/image_decoder.h;l=175-189 + // + let (image, orientation, icc_profile) = match mime_type { + MimeType::Png => { + // If PngDecoder decodes an animated image, it returns the default image if one is set, or the first frame if not. + let mut decoder = PngDecoder::new(BufReader::new(Cursor::new(buf))) + .map_err(CanvasError::image_error_to_invalid_image)?; + let orientation = decoder.orientation()?; + let icc_profile = decoder.icc_profile()?; + ( + DynamicImage::from_decoder(decoder) + .map_err(CanvasError::image_error_to_invalid_image)?, + orientation, + icc_profile, + ) + } + MimeType::Jpeg => { + let mut decoder = + JpegDecoder::new(BufReader::new(Cursor::new(buf))) + .map_err(CanvasError::image_error_to_invalid_image)?; + let orientation = decoder.orientation()?; + let icc_profile = decoder.icc_profile()?; + ( + DynamicImage::from_decoder(decoder) + .map_err(CanvasError::image_error_to_invalid_image)?, + orientation, + icc_profile, + ) + } + MimeType::Gif => { + // NOTE: Temporarily not supported due to build size concerns + // https://github.com/denoland/deno/pull/25517#issuecomment-2626044644 + unimplemented!(); + // The GifDecoder decodes the first frame. + // let mut decoder = GifDecoder::new(BufReader::new(Cursor::new(buf))) + // .map_err(CanvasError::image_error_to_invalid_image)?; + // let orientation = decoder.orientation()?; + // let icc_profile = decoder.icc_profile()?; + // ( + // DynamicImage::from_decoder(decoder) + // .map_err(CanvasError::image_error_to_invalid_image)?, + // orientation, + // icc_profile, + // ) + } + MimeType::Bmp => { + let mut decoder = BmpDecoder::new(BufReader::new(Cursor::new(buf))) + .map_err(CanvasError::image_error_to_invalid_image)?; + let orientation = decoder.orientation()?; + let icc_profile = decoder.icc_profile()?; + ( + DynamicImage::from_decoder(decoder) + .map_err(CanvasError::image_error_to_invalid_image)?, + orientation, + icc_profile, + ) + } + MimeType::Ico => { + let mut decoder = IcoDecoder::new(BufReader::new(Cursor::new(buf))) + .map_err(CanvasError::image_error_to_invalid_image)?; + let orientation = decoder.orientation()?; + let icc_profile = decoder.icc_profile()?; + ( + DynamicImage::from_decoder(decoder) + .map_err(CanvasError::image_error_to_invalid_image)?, + orientation, + icc_profile, + ) + } + MimeType::Webp => { + // NOTE: Temporarily not supported due to build size concerns + // https://github.com/denoland/deno/pull/25517#issuecomment-2626044644 + unimplemented!(); + // The WebPDecoder decodes the first frame. + // let mut decoder = + // WebPDecoder::new(BufReader::new(Cursor::new(buf))) + // .map_err(CanvasError::image_error_to_invalid_image)?; + // let orientation = decoder.orientation()?; + // let icc_profile = decoder.icc_profile()?; + // ( + // DynamicImage::from_decoder(decoder) + // .map_err(CanvasError::image_error_to_invalid_image)?, + // orientation, + // icc_profile, + // ) + } + // This pattern is unreachable due to current block is already checked by the ImageBitmapSource above. + MimeType::NoMatch => unreachable!(), + }; + + let width = image.width(); + let height = image.height(); + + (image, width, height, Some(orientation), icc_profile) + } + ImageBitmapSource::ImageData => { + // > 4.12.5.1.15 Pixel manipulation + // > imagedata.data + // > Returns the one-dimensional array containing the data in RGBA order, as integers in the range 0 to 255. + // https://html.spec.whatwg.org/multipage/canvas.html#pixel-manipulation + let image = match RgbaImage::from_raw(width, height, buf.into()) { + Some(image) => image.into(), + None => { + return Err(CanvasError::NotBigEnoughChunk(width, height)); + } + }; + + (image, width, height, None, None) + } + ImageBitmapSource::ImageBitmap => { + let image = create_image_from_raw_bytes(width, height, buf)?; + + (image, width, height, None, None) + } + }; + + Ok((image, width, height, orientation, icc_profile)) +} + +/// According to the spec, it's not clear how to handle the color space conversion. +/// +/// Therefore, if you interpret the specification description from the implementation and wpt results, it will be as follows. +/// +/// Let val be the value of the colorSpaceConversion member of options, and then run these substeps: +/// 1. If val is "default", to convert to the sRGB color space. +/// 2. If val is "none", to use the decoded image data as is. +/// +/// related issue in whatwg +/// https://github.com/whatwg/html/issues/10578 +/// +/// reference in wpt +/// https://github.com/web-platform-tests/wpt/blob/d575dc75ede770df322fbc5da3112dcf81f192ec/html/canvas/element/manual/imagebitmap/createImageBitmap-colorSpaceConversion.html#L18 +/// https://wpt.live/html/canvas/element/manual/imagebitmap/createImageBitmap-colorSpaceConversion.html +fn apply_color_space_conversion( + image: DynamicImage, + icc_profile: Option>, + color_space_conversion: &ColorSpaceConversion, +) -> Result { + match color_space_conversion { + // return the decoded image as is. + ColorSpaceConversion::None => Ok(image), + ColorSpaceConversion::Default => { + to_srgb_from_icc_profile(image, icc_profile) + } + } +} + +fn apply_premultiply_alpha( + image: DynamicImage, + image_bitmap_source: &ImageBitmapSource, + premultiply_alpha: &PremultiplyAlpha, +) -> Result { + match premultiply_alpha { + // 1. + PremultiplyAlpha::Default => Ok(image), + + // https://html.spec.whatwg.org/multipage/canvas.html#convert-from-premultiplied + + // 2. + PremultiplyAlpha::Premultiply => process_premultiply_alpha(image), + // 3. + PremultiplyAlpha::None => { + // NOTE: It's not clear how to handle the case of ImageData. + // https://issues.chromium.org/issues/339759426 + // https://github.com/whatwg/html/issues/5365 + if *image_bitmap_source == ImageBitmapSource::ImageData { + return Ok(image); + } + + unpremultiply_alpha(image) + } + } +} + +#[derive(Debug, PartialEq)] +struct ParsedArgs { + resize_width: Option, + resize_height: Option, + sx: Option, + sy: Option, + sw: Option, + sh: Option, + image_orientation: ImageOrientation, + premultiply_alpha: PremultiplyAlpha, + color_space_conversion: ColorSpaceConversion, + resize_quality: ResizeQuality, + image_bitmap_source: ImageBitmapSource, + mime_type: MimeType, +} + +#[allow(clippy::too_many_arguments)] +fn parse_args( + sx: i32, + sy: i32, + sw: i32, + sh: i32, + image_orientation: u8, + premultiply_alpha: u8, + color_space_conversion: u8, + resize_width: u32, + resize_height: u32, + resize_quality: u8, + image_bitmap_source: u8, + mime_type: u8, +) -> ParsedArgs { + let resize_width = if resize_width == 0 { + None + } else { + Some(resize_width) + }; + let resize_height = if resize_height == 0 { + None + } else { + Some(resize_height) + }; + let sx = if sx == 0 { None } else { Some(sx) }; + let sy = if sy == 0 { None } else { Some(sy) }; + let sw = if sw == 0 { None } else { Some(sw) }; + let sh = if sh == 0 { None } else { Some(sh) }; + + // Their unreachable wildcard patterns are validated in JavaScript-side. + let image_orientation = match image_orientation { + 0 => ImageOrientation::FromImage, + 1 => ImageOrientation::FlipY, + _ => unreachable!(), + }; + let premultiply_alpha = match premultiply_alpha { + 0 => PremultiplyAlpha::Default, + 1 => PremultiplyAlpha::Premultiply, + 2 => PremultiplyAlpha::None, + _ => unreachable!(), + }; + let color_space_conversion = match color_space_conversion { + 0 => ColorSpaceConversion::Default, + 1 => ColorSpaceConversion::None, + _ => unreachable!(), + }; + let resize_quality = match resize_quality { + 0 => ResizeQuality::Low, + 1 => ResizeQuality::Pixelated, + 2 => ResizeQuality::Medium, + 3 => ResizeQuality::High, + _ => unreachable!(), + }; + let image_bitmap_source = match image_bitmap_source { + 0 => ImageBitmapSource::Blob, + 1 => ImageBitmapSource::ImageData, + 2 => ImageBitmapSource::ImageBitmap, + _ => unreachable!(), + }; + let mime_type = match mime_type { + 0 => MimeType::NoMatch, + 1 => MimeType::Png, + 2 => MimeType::Jpeg, + 3 => MimeType::Gif, + 4 => MimeType::Bmp, + 5 => MimeType::Ico, + 6 => MimeType::Webp, + _ => unreachable!(), + }; + ParsedArgs { + resize_width, + resize_height, + sx, + sy, + sw, + sh, + image_orientation, + premultiply_alpha, + color_space_conversion, + resize_quality, + image_bitmap_source, + mime_type, + } +} + +#[op2] +#[serde] +#[allow(clippy::too_many_arguments)] +pub(super) fn op_create_image_bitmap( + #[buffer] buf: JsBuffer, + width: u32, + height: u32, + sx: i32, + sy: i32, + sw: i32, + sh: i32, + image_orientation: u8, + premultiply_alpha: u8, + color_space_conversion: u8, + resize_width: u32, + resize_height: u32, + resize_quality: u8, + image_bitmap_source: u8, + mime_type: u8, +) -> Result<(ToJsBuffer, u32, u32), CanvasError> { + let ParsedArgs { + resize_width, + resize_height, + sx, + sy, + sw, + sh, + image_orientation, + premultiply_alpha, + color_space_conversion, + resize_quality, + image_bitmap_source, + mime_type, + } = parse_args( + sx, + sy, + sw, + sh, + image_orientation, + premultiply_alpha, + color_space_conversion, + resize_width, + resize_height, + resize_quality, + image_bitmap_source, + mime_type, + ); + + // 6. Switch on image: + let (image, width, height, orientation, icc_profile) = + decode_bitmap_data(&buf, width, height, &image_bitmap_source, mime_type)?; + + // crop bitmap data + // 2. + #[rustfmt::skip] + let source_rectangle: [[i32; 2]; 4] = + if let (Some(sx), Some(sy), Some(sw), Some(sh)) = (sx, sy, sw, sh) { + [ + [sx, sy], + [sx + sw, sy], + [sx + sw, sy + sh], + [sx, sy + sh] + ] + } else { + [ + [0, 0], + [width as i32, 0], + [width as i32, height as i32], + [0, height as i32], + ] + }; + + /* + * The cropping works differently than the spec specifies: + * The spec states to create an infinite surface and place the top-left corner + * of the image a 0,0 and crop based on sourceRectangle. + * + * We instead create a surface the size of sourceRectangle, and position + * the image at the correct location, which is the inverse of the x & y of + * sourceRectangle's top-left corner. + */ + let input_x = -(source_rectangle[0][0] as i64); + let input_y = -(source_rectangle[0][1] as i64); + + let surface_width = (source_rectangle[1][0] - source_rectangle[0][0]) as u32; + let surface_height = (source_rectangle[3][1] - source_rectangle[0][1]) as u32; + + // 3. + let output_width = if let Some(resize_width) = resize_width { + resize_width + } else if let Some(resize_height) = resize_height { + (surface_width * resize_height).div_ceil(surface_height) + } else { + surface_width + }; + + // 4. + let output_height = if let Some(resize_height) = resize_height { + resize_height + } else if let Some(resize_width) = resize_width { + (surface_height * resize_width).div_ceil(surface_width) + } else { + surface_height + }; + + // 5. + let image = if !(width == surface_width + && height == surface_height + && input_x == 0 + && input_y == 0) + { + let mut surface = + DynamicImage::new(surface_width, surface_height, image.color()); + overlay(&mut surface, &image, input_x, input_y); + + surface + } else { + image + }; + + // 7. + let filter_type = match resize_quality { + ResizeQuality::Pixelated => FilterType::Nearest, + ResizeQuality::Low => FilterType::Triangle, + ResizeQuality::Medium => FilterType::CatmullRom, + ResizeQuality::High => FilterType::Lanczos3, + }; + // should use resize_exact + // https://github.com/image-rs/image/issues/1220#issuecomment-632060015 + let mut image = image.resize_exact(output_width, output_height, filter_type); + + // 8. + let image = match image_bitmap_source { + ImageBitmapSource::Blob => { + // Note: According to browser behavior and wpt results, if Exif contains image orientation, + // it applies the rotation from it before following the value of imageOrientation. + // This is not stated in the spec but in MDN currently. + // https://github.com/mdn/content/pull/34366 + + // SAFETY: The orientation is always Some if the image is from a Blob. + let orientation = orientation.unwrap(); + DynamicImage::apply_orientation(&mut image, orientation); + + match image_orientation { + ImageOrientation::FlipY => image.flipv(), + ImageOrientation::FromImage => image, + } + } + ImageBitmapSource::ImageData | ImageBitmapSource::ImageBitmap => { + match image_orientation { + ImageOrientation::FlipY => image.flipv(), + ImageOrientation::FromImage => image, + } + } + }; + + // 9. + let image = + apply_color_space_conversion(image, icc_profile, &color_space_conversion)?; + + // 10. + let image = + apply_premultiply_alpha(image, &image_bitmap_source, &premultiply_alpha)?; + + Ok((image.into_bytes().into(), output_width, output_height)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_args() { + let parsed_args = parse_args(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); + assert_eq!( + parsed_args, + ParsedArgs { + resize_width: None, + resize_height: None, + sx: None, + sy: None, + sw: None, + sh: None, + image_orientation: ImageOrientation::FromImage, + premultiply_alpha: PremultiplyAlpha::Default, + color_space_conversion: ColorSpaceConversion::Default, + resize_quality: ResizeQuality::Low, + image_bitmap_source: ImageBitmapSource::Blob, + mime_type: MimeType::NoMatch, + } + ); + } +} diff --git a/ext/web/01_mimesniff.js b/ext/web/01_mimesniff.js index 9a687a830562ee..97ff17ea314314 100644 --- a/ext/web/01_mimesniff.js +++ b/ext/web/01_mimesniff.js @@ -395,6 +395,10 @@ const ImageTypePatternTable = [ /** * Ref: https://mimesniff.spec.whatwg.org/#image-type-pattern-matching-algorithm + * NOTE: Some browsers have implementation-defined image formats. + * For example, The AVIF image format is supported by all browsers today. + * However, the standardization seems to have hard going. + * See: https://github.com/whatwg/mimesniff/issues/143 * @param {Uint8Array} input * @returns {string | undefined} */ diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index 2971fd2c007ae5..190de549d1bca0 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -361,6 +361,12 @@ core.registerErrorBuilder( return new DOMException(msg, "DataError"); }, ); +core.registerErrorBuilder( + "DOMExceptionInvalidStateError", + function DOMExceptionInvalidStateError(msg) { + return new DOMException(msg, "InvalidStateError"); + }, +); function runtimeStart( denoVersion, diff --git a/tests/testdata/image/1x1-2f-animated-has-def.png b/tests/testdata/image/1x1-2f-animated-has-def.png new file mode 100644 index 0000000000000000000000000000000000000000..d460137ce83541cf6b400dc85dcccf3ef7fa89eb GIT binary patch literal 233 zcmeAS@N?(olHy`uVBq!ia0vp^j3CUx1|;Q0k8}blj>O~;A0W*P#0=-&?m7UZ1Uy|F zLn;^O~;A0W*P#0=-&?m7UZq|zW_ zAR({@5CaSt8RQrk90ROtfII#sA!Xt|7tBjsdPldIrplKw-t7EG$MqS_h;AtoT2O#lXbm S!iYtZ0W6D45-barv<3ipl@L|{ literal 0 HcmV?d00001 diff --git a/tests/testdata/image/1x1-3f-lossless-animated-semi-transparent.webp b/tests/testdata/image/1x1-3f-lossless-animated-semi-transparent.webp new file mode 100644 index 0000000000000000000000000000000000000000..15d584d109e573eb3768494b6b795c8e804cb4fb GIT binary patch literal 188 zcmWIYbaUInz`zjh>J$(bU=hIuWD5Z?1UUM6`mzC;|AByk!O_pxO#>zcrBfIfn1K3x i_Zo5i847+;M1bZq{>Nt@!mGF&{GbS@xyAN3C`Sg%0B>R6I@~d literal 0 HcmV?d00001 diff --git a/tests/testdata/image/1x1-red8.bmp b/tests/testdata/image/1x1-red8.bmp new file mode 100644 index 0000000000000000000000000000000000000000..c28d7968f81957dcb715fed9d9de82aea62ddb58 GIT binary patch literal 126 vcmZ?rtz&?IDj<~u#EfvPz`zV-vj8zB_|L!qK_IFBPVPa)m}&@#{QnOCMGy&Q literal 0 HcmV?d00001 diff --git a/tests/testdata/image/1x1-red8.gif b/tests/testdata/image/1x1-red8.gif new file mode 100644 index 0000000000000000000000000000000000000000..0e5a2d361d355d72f7b16e61351c522b672382a3 GIT binary patch literal 49 ocmZ?wbhEHbWMp7uXkdT>#h)x3Af^t80L%Obu^5<`To@Uw0h$5@f&c&j literal 0 HcmV?d00001 diff --git a/tests/testdata/image/1x1-red8.ico b/tests/testdata/image/1x1-red8.ico new file mode 100644 index 0000000000000000000000000000000000000000..4cdfe144bd9f7e3bd92f5955e0c16e0caa9e0a85 GIT binary patch literal 95 zcmZQzU<5%%1|U#i@C1@#K-?MN=g!L|#RX*YdV0770cns5Mh-S0DKC1Y6G#box;TbZ jFfy_-{QA%EpMl{&D+A;IUojmQfpQFECr+Nabot8F zYu9hwy!G(W<0ns_J%91?)yGetzkL1n{m0K=Ab&A3FhjfrBq1I{^A|8W7@1gDm|56C z{$gY*2V!PH7FI<=HX+AA_QXPAC8I_T5vPd@Hy-3vHV*nAnpAX=OH9S&q3TDF*T6m_ k&SOnv`3&wcguiYv@Gvt1lM%BZgFVBe*U$Lx`2W8N07nYW<^TWy literal 0 HcmV?d00001 diff --git a/tests/testdata/image/1x1-red8.png b/tests/testdata/image/1x1-red8.png new file mode 100644 index 0000000000000000000000000000000000000000..8783fe799acaafe6e1ab44c34235122ace9f54e2 GIT binary patch literal 73 zcmeAS@N?(olHy`uVBq!ia0vp^j3CUx1|;Q0k8}bl0Z$jlkP1ddR)$~y8U8ac{AXog V{QoPa<04Rw!PC{xWt~$(69C@x5WN5Z literal 0 HcmV?d00001 diff --git a/tests/testdata/image/1x1-red8.webp b/tests/testdata/image/1x1-red8.webp new file mode 100644 index 0000000000000000000000000000000000000000..1c35f348fbfa4dfb87d9af9706be8a1d36c2624a GIT binary patch literal 34 mcmWIYbaRtpU|&ZX<|Nmdl!15tw55 d|G%Drq29@t?Pq56TV9|-22WQ%mvv4FO#tF66*d3> literal 0 HcmV?d00001 diff --git a/tests/testdata/image/squares_6.jpg b/tests/testdata/image/squares_6.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f197760a111e07c54419d1993130aa1fee5982e8 GIT binary patch literal 1227 zcmdUrKS%>H6vkgJcV}{UZF6l!s30zac2z-fu%H$#R8Xpj;-VlbNL@s6a&vN$x(KBp z6?Jy3P{FyVi+=|TE>1eQ%9;2M|KQ}Lej&LZ-+k}p5%1Ew1CdG&B!LhBA{Tg7nCe@Z zn*m6tAr1fx3=#$^iGV4O7#MF1kRtW(JWQJXjO?LBK3Hazr2DbG8|VS5DXEi6>87s4 zOw%wqvn~30b6MlL`cuTnz{p_&v1LA!^)cNvdWI$x_lJ%UNn38B7g5_P zCZ7@Y^N|IC_*-q4@?E7Df5x53<9tB*JY1nw&51!Sk;xONQjAG;y%>J@OnvupqW literal 0 HcmV?d00001 diff --git a/tests/testdata/image/wide-gamut-pattern.png b/tests/testdata/image/wide-gamut-pattern.png new file mode 100644 index 0000000000000000000000000000000000000000..f35cd4a2e1628bf0b6cd0bb58ea1c3b0d51f0e45 GIT binary patch literal 5320 zcma)=S2P?9w1sCFb=0UqFh&_=)KN$8ov2}SB1RV_I+2LpTM#WIzYr~IbkVyQAvz%# zz4vl+U+>d>*xz1jKb^JC%Q-tnUssKkn1L7o0FY{^D;xZC>i-o5{y#20?$QMS9?hba zl=L->j18=Xg#-ny^`Ba+8f$9<08X#>vVDz==g<4;qW1Uq$*`&a=$?|EnM^?hd@3(5 zufZ*poJ^qrDoM%Aq)17Z?SNmjFF&~uU%E*UU+OscDzjm=Tt98$J??wXycXQ|9zZCU zVrrnTY76L`=jt4nH-6nUvJe5h;It~Ew$M=b6Ym&>K zhIcJ*8RkddBbf6tA6^_Byjxx0Js>>eP4-1zU*pL`3Y6x!Bs%FP1sT-SB${WD3g&=G zY2a9*dvZerBGI(45m~YrdNTm#k>9EI1i-kdL4P4Sd3b0hk1F~8j+b~6W3qkk^sR{= zlj`H{chWQg@e4tU;pEi>OB3->gjjeTdEEvH+I~Qfh#p9#o~yW{Yg!w-f`1q>LOxk$ zio{juG-iLbLLOWs8U*9ST?x07R1&&kg%g4*(Jc@Q zl+VX2T6aU(=86{oAY4wb*0WGu$=b zm({D+T>xESJ>+tND03QdX>%|^*dW4?Ky`$Iwx+9kaDjg~C`~zrJ$4;F$jmMNg$m7# zg+1oXW-_BNqnX8#y|5jg2>J;R=2*|vDL`nG^F;%(d^ zBq5@A?6|_XEN>*0_>@Sy*`dNvfnsiZggwog1oT5p+F&yGhcD^qWbE7Q7_&6V9Fh~iM=Q={pJ^>cGk{fED)GsZMVHs;HZ!|%n9ssvRsR1#H!-A&x#`+$AvecgQ> zW<_QqW@J7n-#nivA8ch}1-A-Yl3vnXB9+CF^^!%k`?ou{d$oIqI);+o=iW-*?%ghf z4nfMGuXukzWgu0MZl#cwVT<3I`FZ`T852A&P$USBXAj~9MS+qcw<3++wnj1&))T_P z_Fyn^E?5nm1tuqccvM97l=g^r@DZBQjJB7ShFX*^ffi2pazwKCa(@z_VKZj`#aqv- z!(G9;2g!phvbP04xoMv89p3SpDX?Z~359g?6ho=l4PhAQiD9OJiPf}{m{Nn8p9-0& zuJV{Fze1(1yCyT|INXDs1!B!32Mhb!Sz1^`QJ7pcQBhs2{-v~V>&vD5QQn{CM@6aW zsc8mf2E~&fg&V#_e@l4z@fiHqj0QmiqycA0WH4lC7{BL9D*NJG=bW+K#Gt=sAYu?_ zP_EBt*kZJ4ByNyoRHe^i80F%LrbjEIjhy3MaCF>ts7C2Wokum`4)9PoxB_W@W?nl- zKc_g?FgG|?RiEW^=)>-7;Uny0?knb_;Y;Ha;^Xbx;(Kwrb%r>LILqHA#aeE8Z|83H zZ_jLxGuSXhGe|I$FxD}+GNdx{JYGWZA$G;3h3v$Ig=59aL~}&?glWb7gvJp2f=fb0 zqCXKtLfj&c#Z`o85cMKDqHdy#!o|XdB49CIQAQCpF*-z}xSqI&$b+qdOVM*m$ERo^ zw-_|HJJ#`P;9LK37ENwOjB)EPmvT`TVHcVm)*XW#B1RuZAx6_&H8B=9T~|prxNDjl z%C&LNY0qcZeg4-xe3ojSca~r_YgSLvMp7N=kEE6&L9!uZC7Pu+B+ezRB#w}?$mf#2 z$UF&eDA~x{_63emn_rQ2x^gw5~V&lo`;!4}T(x&}-+B#;#+yBkY zySs!aas1$v5RQ_tb@{rpF<*M;nPOxdrvsnbrkz-#SV<3c?~E>;Nj^R`Pc8 zL7iTO@`Wgyrz=Jjw#lmc+WOuT3W6ThTAsFt>+l~ahDJM&`Xwo1O2Q*TZlY{32l+2@ z9kM$L{2%PyI6s`b#eSImaO?K&?uQbxA)tStQf&Ney--{>7ii23YiU(BPS|INqj;vkG;agjIf}{UgT2Mmg&AbsE53vEh3t5)IPt{s zSQZ^t%?UkqJ$mgPttky{4JP#;I+L2sSyN$j7nSFW=`96mN2$JPn6#*U$|aGeEME`l zNWNY^4zA~%8DhW0>S4Z;38JH-6~cQ#Qen;3*X5^cHwG8bgI5lPj-}5RhUrJx#}K0< z!xjT}gD;26#^yp;uUU5$g2bE~RQ&?aFO{~V{K&OfLc`cEx2B65n5LLUKg56dV)4#m zW+7*xTN>Ni)Y{(PlLTkUl3p638zLV*;OXQR#x3K2^Z%21FdsF2@>E7&3AJX4F^@yJSfUKURhWt(gGSS; zX9dO`=Zd>3lX2PjOYi1Mp#o;sOM$Z&vHQP9O)aaqt%BO!gOqOj&UCb(3dP>`7}RZ} zuE(<2s;H!>s3;Yp1<_bXQQy>9SD#uR*cj@scEZ16Y0c=3dN%Kb)C;n>v50a~Gw%=l zPw|D-=4#O51&!r^rI=*`OSIf&@YbFqwtZ1%9y*;idva1d-5+Z;Wfis5pjTI*D5rBC zbN>8n?`-F6hHQn@pZqjzqFHxTZapJ=fT7hWQHLlqa%&zrn+3o>U}yPfeWZJ z0vukg999(o_^#Irrd29q=`4%^os;(_7nkCj4=3gW4`}s=psq@OhXEqILL5XmnIM3( z6Gs9-w}PuEPci_&E5!E+C#uHL?F55f!;BN6P`qN?HNrFk4Df_ z<47dZr5%IuQ&s}q@%LfOMEOKZ(ALMnQo#hb2DdsmzzF#WM@4$&5bWDD<#^?NWr66i z7-QCrT!<~#5i~$8R>xWOI^KunV;kaIz)P!R&*RX4Pl7WtoVx z<+j~6lz}uyF$dS&cMfMRM9Q~Wu^Ac&zkYOWh#&HfSe0GXyDyMao0H(PqvIn-j}7L3 z)9PmmnS@TWPt%A!ZeVIq@{>8|A@d|5yD7b+zx$I4^MaFyKN z(jLX$&%GSI-o1JpeC#>wU)Ys6UX+xS9G8rhRFt49F&0TpvRvMlpXE0UrcD}j5*79v z7CyGN2G`(dcPXs4vDMmr_!FD@ZBpd9(Q~@L?lgWGpbU>P=d!=f^v>?iED#`>7dU^9}h~G(VY$yZq+t0muEKq zwhqsy9JJBF!~Q>%d)P4!<3{6k(!UW>wok3Kt&Jf@!Nehn9$U+$ zr;%6Y*J2Aj-?2Wmy-7Xe!&SotbIZ#e0^!`O>^2g~P(dC~wrDPxn3L2qRtbpze&$3) zOCznKvSO+dtx~VzK_q`z@9azT-W4&3X-UO@-H`7nzs9LE)lMi($Z~v=!IA#;tpVoubJP=a0|q?EV-% zGBt2AShn15v3i9{^-8TsD^D9gKraRb9|Xl34K1c)jmD->Uu4h@=rJ@Yw%Vud^v9{& z*0MPEK8s|U2X5%zN2_t)>(81cl@`3p+(#=;PtGMBAF@@zL#@`;bSD z+h=rx>zG@(ndzDEw&XD3a(KQAN!An7)`jff4JGj*?L9 z^-JMFajZnP$W;JwfM)PeP??jn9Yc`0pOj5=aG?7;Pt^cUhtGkIUYR~7PyU1lMuT9i zw9z>(?@_t`eVEBf%JFz_eWQ6~eB!b`b6v#?b^vdKZNZ{oaGY|SW?WYsO`M2Ula{bn zyH>B3{c!29-LU)c_u-J?P2OPM23{=h6W*Zmk#f~?uktFBndyOPqnU`Ap&8B&TY1px zJ$w^&(3s{s=6iD1bJlluMtV&8E1@u9GT}%kU1wFNe>8qHakKz+ijqQ6qKr|PWucC^ zkXKjvXLolOi*DC$dwYj_LwkcPPI7O&O7e3DsiyKP%)D-QV_l6O~q_wDXy$R+UNHCAC)VYKTMGFP-x zbY?rbI@}!z-G%;yMir+Q_Z9!O_U~X1mAuou`E}}Y_I~Br@$V~%y(X;x8R$XzPY;&; ztmMc6OTB4yI<5td_SAa%*1r`1mTUs`#S%iBt>;pGn3R`#UFuJ2;;@ zks7T787jg*Q*tKplNp-ycXmenvK7RGH(5j|F>O%#pe@KcImj@}J)ND(AE~w~gQs4n z<6lvbE2s(Ikq_rNIIB&o=2Dd8;$9d+2w_4vPIA6Vg23&Zfb=F}4FSQJ8m6Yv&g4UC zSSfK2tvt!P$cIilVDqhLVNRO-SG=AL{j&Siyqq4fcwk&^Cl1EgIHG^YU^RXOQxk0&)jh z|B{Q(?L$)Lr)~sgQeR7EUEE79{VW};(U{HtMElS}%a-u|z|rQCMIm)jnfqPT$^)Vh zk62#D!I4(g10TOn@#8ML2Ia!*_)qd+6G6dST=UZ^7N%| { + await t.step('"ImageData" imageOrientation: "flipY"', async () => { + const data = generateNumberedData(9); + const imageData = new ImageData(data, 3, 3); + const imageBitmap = await createImageBitmap(imageData, { + imageOrientation: "flipY", + }); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([ + 7, 0, 0, 1, 8, 0, 0, 1, 9, 0, 0, 1, + 4, 0, 0, 1, 5, 0, 0, 1, 6, 0, 0, 1, + 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, + ])); + }); + + const imageData = new Blob( + [await Deno.readFile(`${prefix}/squares_6.jpg`)], + { type: "image/jpeg" }, + ); + const WIDTH = 320; + const CHANNELS = 3; + const TARGET_PIXEL_X = 40; + const START = TARGET_PIXEL_X * WIDTH * CHANNELS; + const END = START + CHANNELS; + // reference: + // https://github.com/web-platform-tests/wpt/blob/a1f4bbf4c6e1a9a861a145a34cd097ea260b5a49/html/canvas/element/manual/imagebitmap/createImageBitmap-exif-orientation.html#L30 + await t.step('"Blob" imageOrientation: "from-image"', async () => { + const imageBitmap = await createImageBitmap(imageData); + // @ts-ignore: Deno[Deno.internal].core allowed + const targetPixel = Deno[Deno.internal].getBitmapData(imageBitmap).slice( + START, + END, + ); + assertEquals(targetPixel, new Uint8Array([253, 0, 0])); + }); + // reference: + // https://github.com/web-platform-tests/wpt/blob/a1f4bbf4c6e1a9a861a145a34cd097ea260b5a49/html/canvas/element/manual/imagebitmap/createImageBitmap-exif-orientation.html#L55 + await t.step('"Blob" imageOrientation: "flipY"', async () => { + const imageBitmap = await createImageBitmap(imageData, { + imageOrientation: "flipY", + }); + // @ts-ignore: Deno[Deno.internal].core allowed + const targetPixel = Deno[Deno.internal].getBitmapData(imageBitmap).slice( + START, + END, + ); + assertEquals(targetPixel, new Uint8Array([253, 127, 127])); }); - // @ts-ignore: Deno[Deno.internal].core allowed - // deno-fmt-ignore - assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([ - 7, 0, 0, 1, 8, 0, 0, 1, 9, 0, 0, 1, - 4, 0, 0, 1, 5, 0, 0, 1, 6, 0, 0, 1, - 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, - ])); }); -Deno.test(async function imageBitmapFromBlob() { - const path = "tests/testdata/image/1x1-white.png"; - const imageData = new Blob([await Deno.readFile(path)], { - type: "image/png", +Deno.test("imageBitmapPremultiplyAlpha", async (t) => { + const imageData = new ImageData( + new Uint8ClampedArray([ + 255, + 255, + 0, + 153, + ]), + 1, + 1, + ); + await t.step('"ImageData" premultiplyAlpha: "default"', async () => { + const imageBitmap = await createImageBitmap(imageData, { + premultiplyAlpha: "default", + }); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([ + 255, 255, 0, 153, + ])); + }); + await t.step('"ImageData" premultiplyAlpha: "premultiply"', async () => { + const imageBitmap = await createImageBitmap(imageData, { + premultiplyAlpha: "premultiply", + }); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([ + 153, 153, 0, 153 + ])); + }); + await t.step('"ImageData" premultiplyAlpha: "none"', async () => { + const imageBitmap = await createImageBitmap(imageData, { + premultiplyAlpha: "none", + }); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([ + 255, 255, 0, 153, + ])); + }); + await t.step('"Blob" premultiplyAlpha: "none"', async () => { + const imageData = new Blob( + [await Deno.readFile(`${prefix}/2x2-transparent8.png`)], + { type: "image/png" }, + ); + const imageBitmap = await createImageBitmap(imageData, { + premultiplyAlpha: "none", + }); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([ + 255, 0, 0, 255, 0, 255, 0, 255, + 0, 0, 255, 255, 255, 0, 0, 127 + ])); + }); +}); + +Deno.test("imageBitmapFromBlob", async (t) => { + await t.step("8-bit png", async () => { + const imageData = new Blob( + [await Deno.readFile(`${prefix}/1x1-red8.png`)], + { type: "image/png" }, + ); + const imageBitmap = await createImageBitmap(imageData); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255])); + }); + await t.step("16-bit png", async () => { + const imageData = new Blob( + [await Deno.readFile(`${prefix}/1x1-red16.png`)], + { type: "image/png" }, + ); + const imageBitmap = await createImageBitmap(imageData); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), + // deno-fmt-ignore + new Uint8Array( + [ + 255, 255, // R + 0, 0, // G + 0, 0, // B + 255, 255 // A + ] + ) + ); + }); + await t.step("8-bit jpeg", async () => { + const imageData = new Blob( + [await Deno.readFile(`${prefix}/1x1-red8.jpeg`)], + { type: "image/jpeg" }, + ); + const imageBitmap = await createImageBitmap(imageData); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([254, 0, 0])); + }); + await t.step("8-bit bmp", async () => { + const imageData = new Blob( + [await Deno.readFile(`${prefix}/1x1-red8.bmp`)], + { type: "image/bmp" }, + ); + const imageBitmap = await createImageBitmap(imageData); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255])); + }); + await t.step("8-bit gif", async () => { + const imageData = new Blob( + [await Deno.readFile(`${prefix}/1x1-red8.gif`)], + { type: "image/gif" }, + ); + await assertRejects(() => createImageBitmap(imageData), DOMException); + // TODO(Hajime-san): remove the comment out when the implementation is ready + // const imageBitmap = await createImageBitmap(imageData); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + // assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255])); + }); + await t.step("8-bit webp", async () => { + const imageData = new Blob( + [await Deno.readFile(`${prefix}/1x1-red8.webp`)], + { type: "image/webp" }, + ); + await assertRejects(() => createImageBitmap(imageData), DOMException); + // TODO(Hajime-san): remove the comment out when the implementation is ready + // const imageBitmap = await createImageBitmap(imageData); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + // assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255])); + }); + await t.step("8-bit ico", async () => { + const imageData = new Blob( + [await Deno.readFile(`${prefix}/1x1-red8.ico`)], + { type: "image/x-icon" }, + ); + const imageBitmap = await createImageBitmap(imageData); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255])); + }); + await t.step("flotat-32-bit exr", async () => { + // image/x-exr is a known mimetype for OpenEXR + // https://www.digipres.org/formats/sources/fdd/formats/#fdd000583 + const imageData = new Blob([ + await Deno.readFile(`${prefix}/1x1-red32f.exr`), + ], { type: "image/x-exr" }); + await assertRejects(() => createImageBitmap(imageData), DOMException); + }); +}); + +Deno.test("imageBitmapFromBlobAnimatedImage", async (t) => { + await t.step("animated png has a default image", async () => { + // the chunk of animated apng is below (2 frames, 1x1, 8-bit, RGBA), default [255, 0, 0, 255] image + // [ 0, 255, 0, 255, + // 0, 0, 255, 255 ] + const imageData = new Blob([ + await Deno.readFile(`${prefix}/1x1-2f-animated-has-def.png`), + ], { type: "image/png" }); + const imageBitmap = await createImageBitmap(imageData); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255])); + }); + await t.step("animated png does not have any default image", async () => { + // the chunk of animated apng is below (3 frames, 1x1, 8-bit, RGBA) + // [ 255, 0, 0, 255, + // 0, 255, 0, 255, + // 0, 0, 255, 255 ] + const imageData = new Blob([ + await Deno.readFile(`${prefix}/1x1-3f-animated-no-def.png`), + ], { type: "image/png" }); + const imageBitmap = await createImageBitmap(imageData); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255])); + }); + await t.step("animated webp", async () => { + // the chunk of animated webp is below (3 frames, 1x1, 8-bit, RGBA) + // + // [ 255, 0, 0, 127, + // 0, 255, 0, 127, + // 0, 0, 255, 127 ] + const imageData = new Blob([ + await Deno.readFile( + `${prefix}/1x1-3f-lossless-animated-semi-transparent.webp`, + ), + ], { type: "image/webp" }); + await assertRejects(() => createImageBitmap(imageData), DOMException); + // TODO(Hajime-san): remove the comment out when the implementation is ready + // const imageBitmap = await createImageBitmap(imageData); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + // assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 127])); + }); + await t.step("animated gif", async () => { + // the chunk of animated gif is below (3 frames, 1x1, 8-bit, RGBA) + // [ 255, 0, 0, 255, + // 0, 255, 0, 255, + // 0, 0, 255, 255 ] + const imageData = new Blob([ + await Deno.readFile(`${prefix}/1x1-3f-animated.gif`), + ], { type: "image/gif" }); + await assertRejects(() => createImageBitmap(imageData), DOMException); + // TODO(Hajime-san): remove the comment out when the implementation is ready + // const imageBitmap = await createImageBitmap(imageData); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + // assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255])); + }); +}); + +/** + * extract high bytes from Uint16Array + */ +function extractHighBytes(array: Uint8Array): Uint8Array { + const highBytes = new Uint8Array(array.length / 2); + for (let i = 0, j = 1; i < array.length; i++, j += 2) { + highBytes[i] = array[j]; + } + return highBytes; +} + +Deno.test("imageBitmapFromBlobColorspaceConversion", async (t) => { + // reference: + // https://github.com/web-platform-tests/wpt/blob/d575dc75ede770df322fbc5da3112dcf81f192ec/html/canvas/element/manual/imagebitmap/createImageBitmap-colorSpaceConversion.html#L18 + // https://wpt.fyi/results/html/canvas/element/manual/imagebitmap/createImageBitmap-colorSpaceConversion.html?label=experimental&label=master&aligned + await t.step('"Blob" colorSpaceConversion: "none"', async () => { + const imageData = new Blob([ + await Deno.readFile(`${prefix}/wide-gamut-pattern.png`), + ], { type: "image/png" }); + const imageBitmap = await createImageBitmap(imageData, { + colorSpaceConversion: "none", + }); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + const firstPixel = extractHighBytes(Deno[Deno.internal].getBitmapData(imageBitmap)).slice(0, 4); + // picking the high bytes of the first pixel + assertEquals(firstPixel, new Uint8Array([123, 0, 27, 255])); + }); + await t.step('"Blob" colorSpaceConversion: "default"', async () => { + const imageData = new Blob([ + await Deno.readFile(`${prefix}/wide-gamut-pattern.png`), + ], { type: "image/png" }); + const imageBitmap = await createImageBitmap(imageData, { + colorSpaceConversion: "default", + }); + // @ts-ignore: Deno[Deno.internal].core allowed + // deno-fmt-ignore + const firstPixel = extractHighBytes(Deno[Deno.internal].getBitmapData(imageBitmap)).slice(0, 4); + // picking the high bytes of the first pixel + assertEquals(firstPixel, new Uint8Array([255, 0, 0, 255])); }); - const imageBitmap = await createImageBitmap(imageData); - // @ts-ignore: Deno[Deno.internal].core allowed - // deno-fmt-ignore - assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255,255,255,255])); }); From 8a07d38a535043ea6e6ec02c30b182f9064c970b Mon Sep 17 00:00:00 2001 From: HasanAlrimawi <141642411+HasanAlrimawi@users.noreply.github.com> Date: Wed, 5 Feb 2025 17:49:10 +0200 Subject: [PATCH 05/17] chore: move bench test to spec test (#27970) --- tests/integration/bench_tests.rs | 15 --------------- tests/specs/bench/file_protocol/__test__.jsonc | 8 ++++++++ .../bench/file_protocol}/file_protocol.out | 4 ++-- .../bench/file_protocol}/file_protocol.ts | 0 4 files changed, 10 insertions(+), 17 deletions(-) create mode 100644 tests/specs/bench/file_protocol/__test__.jsonc rename tests/{testdata/bench => specs/bench/file_protocol}/file_protocol.out (80%) rename tests/{testdata/bench => specs/bench/file_protocol}/file_protocol.ts (100%) diff --git a/tests/integration/bench_tests.rs b/tests/integration/bench_tests.rs index a87fbd28c943d7..b462cd465ebf77 100644 --- a/tests/integration/bench_tests.rs +++ b/tests/integration/bench_tests.rs @@ -2,7 +2,6 @@ use serde_json::json; use test_util as util; -use url::Url; use util::assert_contains; use util::assert_not_contains; use util::TestContext; @@ -22,20 +21,6 @@ fn recursive_permissions_pledge() { ); } -#[test] -fn file_protocol() { - let file_url = - Url::from_file_path(util::testdata_path().join("bench/file_protocol.ts")) - .unwrap() - .to_string(); - let context = TestContext::default(); - context - .new_command() - .args(format!("bench bench/file_protocol.ts {file_url}")) - .run() - .assert_matches_file("bench/file_protocol.out"); -} - #[test] fn conditionally_loads_type_graph() { let context = TestContext::default(); diff --git a/tests/specs/bench/file_protocol/__test__.jsonc b/tests/specs/bench/file_protocol/__test__.jsonc new file mode 100644 index 00000000000000..a9abbef0f910bb --- /dev/null +++ b/tests/specs/bench/file_protocol/__test__.jsonc @@ -0,0 +1,8 @@ +{ + "args": [ + "bench", + "file_protocol.ts" + ], + "output": "file_protocol.out", + "exitCode": 0 +} diff --git a/tests/testdata/bench/file_protocol.out b/tests/specs/bench/file_protocol/file_protocol.out similarity index 80% rename from tests/testdata/bench/file_protocol.out rename to tests/specs/bench/file_protocol/file_protocol.out index fbe4e9d0a4481e..6c002c86b89b71 100644 --- a/tests/testdata/bench/file_protocol.out +++ b/tests/specs/bench/file_protocol/file_protocol.out @@ -1,8 +1,8 @@ -Check file://[WILDCARD]/bench/file_protocol.ts +Check file://[WILDCARD]/file_protocol.ts CPU | [WILDCARD] Runtime | Deno [WILDCARD] ([WILDCARD]) -[WILDCARD]/bench/file_protocol.ts +[WILDCARD]/file_protocol.ts benchmark time/iter (avg) iter/s (min … max) p75 p99 p995 ----------- ----------------------------- --------------------- -------------------------- diff --git a/tests/testdata/bench/file_protocol.ts b/tests/specs/bench/file_protocol/file_protocol.ts similarity index 100% rename from tests/testdata/bench/file_protocol.ts rename to tests/specs/bench/file_protocol/file_protocol.ts From f08ca6414b9fd16b571332674a50aec520f66e7f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 5 Feb 2025 16:59:24 +0100 Subject: [PATCH 06/17] feat(lint): add JavaScript plugin support (#27203) This commit adds an unstable lint plugin API. Plugins are specified in the `deno.json` file under `lint.plugins` option like so: ``` { "lint": { "plugins": [ "./plugins/my-plugin.ts", "jsr:@deno/lint-plugin1", "npm:@deno/lint-plugin2" ] } } ``` The API is considered unstable and might be subject to changes in the future. Plugin API was modelled after ESLint API for the most part, but there are no guarantees for compatibility. The AST format exposed to plugins is closely modelled after the AST that `typescript-eslint` uses. Lint plugins use the visitor pattern and can add diagnostics like so: ``` export default { name: "lint-plugin", rules: { "plugin-rule": { create(context) { return { Identifier(node) { if (node.name === "a") { context.report({ node, message: "should be b", fix(fixer) { return fixer.replaceText(node, "_b"); }, }); } }, }; }, }, }, } satisfies Deno.lint.Plugin; ``` Besides reporting errors (diagnostics) plugins can provide automatic fixes that use text replacement to apply changes. --------- Co-authored-by: Marvin Hagemeister Co-authored-by: David Sherret --- Cargo.lock | 8 +- cli/Cargo.toml | 2 +- cli/args/flags.rs | 1 + cli/args/mod.rs | 42 +- cli/js/40_lint.js | 241 +- cli/js/40_lint_types.d.ts | 35 +- cli/lsp/analysis.rs | 4 +- cli/lsp/config.rs | 36 +- cli/lsp/diagnostics.rs | 6 +- cli/ops/lint.rs | 225 +- cli/schemas/lint-rules.v1.json | 254 +- cli/tools/lint/ast_buffer/buffer.rs | 19 +- cli/tools/lint/ast_buffer/mod.rs | 9 +- cli/tools/lint/ast_buffer/swc.rs | 7 +- cli/tools/lint/ast_buffer/ts_estree.rs | 5 + cli/tools/lint/linter.rs | 235 +- cli/tools/lint/mod.rs | 92 +- cli/tools/lint/plugins.rs | 543 +++ cli/tools/lint/reporters.rs | 16 +- cli/tools/lint/rules/mod.rs | 12 +- cli/tools/test/mod.rs | 2 +- cli/tsc/dts/lib.deno.unstable.d.ts | 133 + cli/util/text_encoding.rs | 89 + runtime/js/99_main.js | 11 +- tests/integration/js_unit_tests.rs | 12 +- tests/specs/lint/lint_plugin/__test__.jsonc | 17 + tests/specs/lint/lint_plugin/a.ts | 1 + tests/specs/lint/lint_plugin/deno.json | 5 + .../specs/lint/lint_plugin/deno_exclude.json | 10 + tests/specs/lint/lint_plugin/lint.out | 2 + tests/specs/lint/lint_plugin/lint_exclude.out | 1 + tests/specs/lint/lint_plugin/lint_fixed.out | 1 + tests/specs/lint/lint_plugin/plugin.ts | 22 + .../lint/lint_plugin_fix_error/__test__.jsonc | 6 + .../lint/lint_plugin_fix_error/deno.json | 5 + .../specs/lint/lint_plugin_fix_error/fix.out | 11 + .../specs/lint/lint_plugin_fix_error/main.ts | 2 + .../lint/lint_plugin_fix_error/plugin.ts | 20 + .../lint_plugin_infinite_edits/__test__.jsonc | 6 + .../lint/lint_plugin_infinite_edits/deno.json | 5 + .../lint/lint_plugin_infinite_edits/fix.out | 12 + .../lint/lint_plugin_infinite_edits/main.ts | 2 + .../lint/lint_plugin_infinite_edits/plugin.ts | 20 + .../lint/lint_plugin_utf16/__test__.jsonc | 22 + tests/specs/lint/lint_plugin_utf16/deno.json | 5 + tests/specs/lint/lint_plugin_utf16/fix.out | 1 + tests/specs/lint/lint_plugin_utf16/fixed.out | 2 + tests/specs/lint/lint_plugin_utf16/lint.out | 11 + tests/specs/lint/lint_plugin_utf16/main.ts | 2 + tests/specs/lint/lint_plugin_utf16/plugin.ts | 22 + .../__snapshots__/lint_plugin_test.ts.snap | 3022 ++++++++--------- tests/unit/lint_plugin_test.ts | 41 +- tests/unit/ops_test.ts | 2 +- 53 files changed, 3521 insertions(+), 1796 deletions(-) create mode 100644 cli/tools/lint/plugins.rs create mode 100644 tests/specs/lint/lint_plugin/__test__.jsonc create mode 100644 tests/specs/lint/lint_plugin/a.ts create mode 100644 tests/specs/lint/lint_plugin/deno.json create mode 100644 tests/specs/lint/lint_plugin/deno_exclude.json create mode 100644 tests/specs/lint/lint_plugin/lint.out create mode 100644 tests/specs/lint/lint_plugin/lint_exclude.out create mode 100644 tests/specs/lint/lint_plugin/lint_fixed.out create mode 100644 tests/specs/lint/lint_plugin/plugin.ts create mode 100644 tests/specs/lint/lint_plugin_fix_error/__test__.jsonc create mode 100644 tests/specs/lint/lint_plugin_fix_error/deno.json create mode 100644 tests/specs/lint/lint_plugin_fix_error/fix.out create mode 100644 tests/specs/lint/lint_plugin_fix_error/main.ts create mode 100644 tests/specs/lint/lint_plugin_fix_error/plugin.ts create mode 100644 tests/specs/lint/lint_plugin_infinite_edits/__test__.jsonc create mode 100644 tests/specs/lint/lint_plugin_infinite_edits/deno.json create mode 100644 tests/specs/lint/lint_plugin_infinite_edits/fix.out create mode 100644 tests/specs/lint/lint_plugin_infinite_edits/main.ts create mode 100644 tests/specs/lint/lint_plugin_infinite_edits/plugin.ts create mode 100644 tests/specs/lint/lint_plugin_utf16/__test__.jsonc create mode 100644 tests/specs/lint/lint_plugin_utf16/deno.json create mode 100644 tests/specs/lint/lint_plugin_utf16/fix.out create mode 100644 tests/specs/lint/lint_plugin_utf16/fixed.out create mode 100644 tests/specs/lint/lint_plugin_utf16/lint.out create mode 100644 tests/specs/lint/lint_plugin_utf16/main.ts create mode 100644 tests/specs/lint/lint_plugin_utf16/plugin.ts diff --git a/Cargo.lock b/Cargo.lock index e64d40feba7b02..b701073a76db85 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2010,9 +2010,9 @@ dependencies = [ [[package]] name = "deno_lint" -version = "0.70.0" +version = "0.71.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac94db8d8597b96c92d30a68b11d4bec6822dcbb3e8675ab1e0136816a301a34" +checksum = "810d0f4b19cd44061bbe7252ad37cf7a81753540f97f88e1548ac9f03b3a18cc" dependencies = [ "anyhow", "deno_ast", @@ -4187,9 +4187,9 @@ dependencies = [ [[package]] name = "hstr" -version = "0.2.9" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a9de2bdef6354361892492bab5e316b2d78a0ee9971db4d36da9b1eb0e11999" +checksum = "dae404c0c5d4e95d4858876ab02eecd6a196bb8caa42050dfa809938833fc412" dependencies = [ "hashbrown 0.14.5", "new_debug_unreachable", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index dee17bd34474a2..1e8d75628c2415 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -74,7 +74,7 @@ deno_doc = { version = "=0.164.0", features = ["rust", "comrak"] } deno_error.workspace = true deno_graph = { version = "=0.87.2" } deno_lib.workspace = true -deno_lint = { version = "0.70.0" } +deno_lint = { version = "0.71.0" } deno_lockfile.workspace = true deno_media_type = { workspace = true, features = ["data_url", "decoding", "module_specifier"] } deno_npm.workspace = true diff --git a/cli/args/flags.rs b/cli/args/flags.rs index 7a817486def07f..96adff8696d6d2 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -499,6 +499,7 @@ impl DenoSubcommand { | Self::Jupyter(_) | Self::Repl(_) | Self::Bench(_) + | Self::Lint(_) | Self::Lsp ) } diff --git a/cli/args/mod.rs b/cli/args/mod.rs index f0d59299d0202a..d59a4e4d362c88 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -366,6 +366,7 @@ pub struct LintOptions { pub rules: LintRulesConfig, pub files: FilePatterns, pub fix: bool, + pub plugins: Vec, } impl Default for LintOptions { @@ -380,20 +381,41 @@ impl LintOptions { rules: Default::default(), files: FilePatterns::new_with_base(base), fix: false, + plugins: vec![], } } - pub fn resolve(lint_config: LintConfig, lint_flags: &LintFlags) -> Self { - Self { + pub fn resolve( + dir_path: PathBuf, + lint_config: LintConfig, + lint_flags: &LintFlags, + ) -> Result { + let rules = resolve_lint_rules_options( + lint_config.options.rules, + lint_flags.maybe_rules_tags.clone(), + lint_flags.maybe_rules_include.clone(), + lint_flags.maybe_rules_exclude.clone(), + ); + + let plugins = { + let plugin_specifiers = lint_config.options.plugins; + let mut plugins = Vec::with_capacity(plugin_specifiers.len()); + for plugin in &plugin_specifiers { + // TODO(bartlomieju): handle import-mapped specifiers + let url = resolve_url_or_path(plugin, &dir_path)?; + plugins.push(url); + } + // ensure stability for hasher + plugins.sort_unstable(); + plugins + }; + + Ok(Self { files: lint_config.files, - rules: resolve_lint_rules_options( - lint_config.options.rules, - lint_flags.maybe_rules_tags.clone(), - lint_flags.maybe_rules_include.clone(), - lint_flags.maybe_rules_exclude.clone(), - ), + rules, fix: lint_flags.fix, - } + plugins, + }) } } @@ -759,7 +781,7 @@ impl CliOptions { .resolve_lint_config_for_members(&cli_arg_patterns)?; let mut result = Vec::with_capacity(member_configs.len()); for (ctx, config) in member_configs { - let options = LintOptions::resolve(config, lint_flags); + let options = LintOptions::resolve(ctx.dir_path(), config, lint_flags)?; result.push((ctx, options)); } Ok(result) diff --git a/cli/js/40_lint.js b/cli/js/40_lint.js index 9f85f0871df2ea..d8e99c7b0a9107 100644 --- a/cli/js/40_lint.js +++ b/cli/js/40_lint.js @@ -10,9 +10,14 @@ import { import { core, internals } from "ext:core/mod.js"; const { + op_lint_get_source, + op_lint_report, op_lint_create_serialized_ast, + op_is_cancelled, } = core.ops; +let doReport = op_lint_report; + // Keep these in sync with Rust const AST_IDX_INVALID = 0; const AST_GROUP_TYPE = 1; @@ -72,29 +77,133 @@ const PropFlags = { /** @typedef {import("./40_lint_types.d.ts").VisitorFn} VisitorFn */ /** @typedef {import("./40_lint_types.d.ts").CompiledVisitor} CompiledVisitor */ /** @typedef {import("./40_lint_types.d.ts").LintState} LintState */ -/** @typedef {import("./40_lint_types.d.ts").RuleContext} RuleContext */ -/** @typedef {import("./40_lint_types.d.ts").NodeFacade} NodeFacade */ -/** @typedef {import("./40_lint_types.d.ts").LintPlugin} LintPlugin */ /** @typedef {import("./40_lint_types.d.ts").TransformFn} TransformFn */ /** @typedef {import("./40_lint_types.d.ts").MatchContext} MatchContext */ -/** @typedef {import("./40_lint_types.d.ts").Node} Node */ /** @type {LintState} */ const state = { plugins: [], installedPlugins: new Set(), + ignoredRules: new Set(), }; +function resetState() { + state.plugins = []; + state.installedPlugins.clear(); + state.ignoredRules.clear(); +} + +/** + * This implementation calls into Rust to check if Tokio's cancellation token + * has already been canceled. + */ +class CancellationToken { + isCancellationRequested() { + return op_is_cancelled(); + } +} + +/** @implements {Deno.lint.Fixer} */ +class Fixer { + /** + * @param {Deno.lint.Node} node + * @param {string} text + */ + insertTextAfter(node, text) { + return { + range: /** @type {[number, number]} */ ([node.range[1], node.range[1]]), + text, + }; + } + + /** + * @param {Deno.lint.Node["range"]} range + * @param {string} text + */ + insertTextAfterRange(range, text) { + return { + range: /** @type {[number, number]} */ ([range[1], range[1]]), + text, + }; + } + + /** + * @param {Deno.lint.Node} node + * @param {string} text + */ + insertTextBefore(node, text) { + return { + range: /** @type {[number, number]} */ ([node.range[0], node.range[0]]), + text, + }; + } + + /** + * @param {Deno.lint.Node["range"]} range + * @param {string} text + */ + insertTextBeforeRange(range, text) { + return { + range: /** @type {[number, number]} */ ([range[0], range[0]]), + text, + }; + } + + /** + * @param {Deno.lint.Node} node + */ + remove(node) { + return { + range: node.range, + text: "", + }; + } + + /** + * @param {Deno.lint.Node["range"]} range + */ + removeRange(range) { + return { + range, + text: "", + }; + } + + /** + * @param {Deno.lint.Node} node + * @param {string} text + */ + replaceText(node, text) { + return { + range: node.range, + text, + }; + } + + /** + * @param {Deno.lint.Node["range"]} range + * @param {string} text + */ + replaceTextRange(range, text) { + return { + range, + text, + }; + } +} + /** * Every rule gets their own instance of this class. This is the main * API lint rules interact with. - * @implements {RuleContext} + * @implements {Deno.lint.RuleContext} */ export class Context { id; fileName; + #source = null; + /** * @param {string} id * @param {string} fileName @@ -103,18 +212,85 @@ export class Context { this.id = id; this.fileName = fileName; } + + source() { + if (this.#source === null) { + this.#source = op_lint_get_source(); + } + return /** @type {*} */ (this.#source); + } + + /** + * @param {Deno.lint.ReportData} data + */ + report(data) { + const range = data.node ? data.node.range : data.range ? data.range : null; + if (range == null) { + throw new Error( + "Either `node` or `range` must be provided when reporting an error", + ); + } + + const start = range[0]; + const end = range[1]; + + let fix; + + if (typeof data.fix === "function") { + const fixer = new Fixer(); + fix = data.fix(fixer); + } + + doReport( + this.id, + data.message, + data.hint, + start, + end, + fix, + ); + } } /** - * @param {LintPlugin} plugin + * @param {Deno.lint.Plugin[]} plugins + * @param {string[]} exclude + */ +export function installPlugins(plugins, exclude) { + if (Array.isArray(exclude)) { + for (let i = 0; i < exclude.length; i++) { + state.ignoredRules.add(exclude[i]); + } + } + + return plugins.map((plugin) => installPlugin(plugin)); +} + +/** + * @param {Deno.lint.Plugin} plugin */ -export function installPlugin(plugin) { +function installPlugin(plugin) { if (typeof plugin !== "object") { throw new Error("Linter plugin must be an object"); } if (typeof plugin.name !== "string") { throw new Error("Linter plugin name must be a string"); } + if (!/^[a-z-]+$/.test(plugin.name)) { + throw new Error( + "Linter plugin name must only contain lowercase letters (a-z) or hyphens (-).", + ); + } + if (plugin.name.startsWith("-") || plugin.name.endsWith("-")) { + throw new Error( + "Linter plugin name must start and end with a lowercase letter.", + ); + } + if (plugin.name.includes("--")) { + throw new Error( + "Linter plugin name must not have consequtive hyphens.", + ); + } if (typeof plugin.rules !== "object") { throw new Error("Linter plugin rules must be an object"); } @@ -123,6 +299,11 @@ export function installPlugin(plugin) { } state.plugins.push(plugin); state.installedPlugins.add(plugin.name); + + return { + name: plugin.name, + ruleNames: Object.keys(plugin.rules), + }; } /** @@ -285,7 +466,7 @@ function readType(buf, idx) { /** * @param {AstContext} ctx * @param {number} idx - * @returns {Node["range"]} + * @returns {Deno.lint.Node["range"]} */ function readSpan(ctx, idx) { let offset = ctx.spansOffset + (idx * SPAN_SIZE); @@ -765,6 +946,12 @@ export function runPluginsForFile(fileName, serializedAst) { for (const name of Object.keys(plugin.rules)) { const rule = plugin.rules[name]; const id = `${plugin.name}/${name}`; + + // Check if this rule is excluded + if (state.ignoredRules.has(id)) { + continue; + } + const ctx = new Context(id, fileName); const visitor = rule.create(ctx); @@ -852,10 +1039,11 @@ export function runPluginsForFile(fileName, serializedAst) { visitors.push({ info, matcher }); } + const token = new CancellationToken(); // Traverse ast with all visitors at the same time to avoid traversing // multiple times. try { - traverse(ctx, visitors, ctx.rootOffset); + traverse(ctx, visitors, ctx.rootOffset, token); } finally { ctx.nodes.clear(); @@ -870,9 +1058,11 @@ export function runPluginsForFile(fileName, serializedAst) { * @param {AstContext} ctx * @param {CompiledVisitor[]} visitors * @param {number} idx + * @param {CancellationToken} cancellationToken */ -function traverse(ctx, visitors, idx) { +function traverse(ctx, visitors, idx, cancellationToken) { if (idx === AST_IDX_INVALID) return; + if (cancellationToken.isCancellationRequested()) return; const { buf } = ctx; const nodeType = readType(ctx.buf, idx); @@ -905,12 +1095,12 @@ function traverse(ctx, visitors, idx) { try { const childIdx = readChild(buf, idx); if (childIdx > AST_IDX_INVALID) { - traverse(ctx, visitors, childIdx); + traverse(ctx, visitors, childIdx, cancellationToken); } const nextIdx = readNext(buf, idx); if (nextIdx > AST_IDX_INVALID) { - traverse(ctx, visitors, nextIdx); + traverse(ctx, visitors, nextIdx, cancellationToken); } } finally { if (exits !== null) { @@ -1064,8 +1254,12 @@ function _dump(ctx) { } } -// TODO(bartlomieju): this is temporary, until we get plugins plumbed through -// the CLI linter +// These are captured by Rust and called when plugins need to be loaded +// or run. +internals.installPlugins = installPlugins; +internals.runPluginsForFile = runPluginsForFile; +internals.resetState = resetState; + /** * @param {LintPlugin} plugin * @param {string} fileName @@ -1074,16 +1268,25 @@ function _dump(ctx) { function runLintPlugin(plugin, fileName, sourceText) { installPlugin(plugin); + const diagnostics = []; + doReport = (id, message, hint, start, end, fix) => { + diagnostics.push({ + id, + message, + hint, + range: [start, end], + fix, + }); + }; try { const serializedAst = op_lint_create_serialized_ast(fileName, sourceText); runPluginsForFile(fileName, serializedAst); } finally { - // During testing we don't want to keep plugins around - state.installedPlugins.clear(); + resetState(); } + doReport = op_lint_report; + return diagnostics; } -// TODO(bartlomieju): this is temporary, until we get plugins plumbed through -// the CLI linter -internals.runLintPlugin = runLintPlugin; +Deno.lint.runPlugin = runLintPlugin; diff --git a/cli/js/40_lint_types.d.ts b/cli/js/40_lint_types.d.ts index f07d16581e8e49..662cfc930f7889 100644 --- a/cli/js/40_lint_types.d.ts +++ b/cli/js/40_lint_types.d.ts @@ -1,17 +1,11 @@ // Copyright 2018-2025 the Deno authors. MIT license. -export interface NodeFacade { - type: string; - range: [number, number]; - [key: string]: unknown; -} - export interface AstContext { buf: Uint8Array; strTable: Map; strTableOffset: number; rootOffset: number; - nodes: Map; + nodes: Map; spansOffset: number; propsOffset: number; strByType: number[]; @@ -21,32 +15,11 @@ export interface AstContext { matcher: MatchContext; } -export interface Node { - range: Range; -} - -export type Range = [number, number]; - -// TODO(@marvinhagemeister) Remove once we land "official" types -export interface RuleContext { - id: string; -} - -// TODO(@marvinhagemeister) Remove once we land "official" types -export interface LintRule { - create(ctx: RuleContext): Record void>; - destroy?(ctx: RuleContext): void; -} - -// TODO(@marvinhagemeister) Remove once we land "official" types -export interface LintPlugin { - name: string; - rules: Record; -} - export interface LintState { - plugins: LintPlugin[]; + plugins: Deno.lint.Plugin[]; installedPlugins: Set; + /** format: `/` */ + ignoredRules: Set; } export type VisitorFn = (node: unknown) => void; diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index fd7337df263dc8..16a815e08ff7bb 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -38,6 +38,7 @@ use node_resolver::ResolutionMode; use once_cell::sync::Lazy; use regex::Regex; use text_lines::LineAndColumnIndex; +use tokio_util::sync::CancellationToken; use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types::Position; use tower_lsp::lsp_types::Range; @@ -186,8 +187,9 @@ fn as_lsp_range( pub fn get_lint_references( parsed_source: &deno_ast::ParsedSource, linter: &CliLinter, + token: CancellationToken, ) -> Result, AnyError> { - let lint_diagnostics = linter.lint_with_ast(parsed_source); + let lint_diagnostics = linter.lint_with_ast(parsed_source, token)?; Ok( lint_diagnostics diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index e4570031fb7884..b76c54c7ed6b5f 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -1629,14 +1629,46 @@ impl ConfigData { sloppy_imports_resolver.clone(), Some(resolver.clone()), ); + + let lint_options = LintOptions::resolve( + member_dir.dir_path(), + (*lint_config).clone(), + &LintFlags::default(), + ) + .inspect_err(|err| lsp_warn!(" Failed to resolve linter options: {}", err)) + .ok() + .unwrap_or_default(); + let mut plugin_runner = None; + if !lint_options.plugins.is_empty() { + fn logger_printer(msg: &str, _is_err: bool) { + lsp_log!("pluggin runner - {}", msg); + } + let logger = crate::tools::lint::PluginLogger::new(logger_printer); + let plugin_load_result = + crate::tools::lint::create_runner_and_load_plugins( + lint_options.plugins.clone(), + logger, + lint_options.rules.exclude.clone(), + ) + .await; + match plugin_load_result { + Ok(runner) => { + plugin_runner = Some(Arc::new(runner)); + } + Err(err) => { + lsp_warn!("Failed to load lint plugins: {}", err); + } + } + } + let linter = Arc::new(CliLinter::new(CliLinterOptions { configured_rules: lint_rule_provider.resolve_lint_rules( - LintOptions::resolve((*lint_config).clone(), &LintFlags::default()) - .rules, + lint_options.rules, member_dir.maybe_deno_json().map(|c| c.as_ref()), ), fix: false, deno_lint_config, + maybe_plugin_runner: plugin_runner, })); ConfigData { diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index dee672d17a73a4..7283f2cf8f684a 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -1021,6 +1021,8 @@ fn generate_lint_diagnostics( default_jsx_factory: None, default_jsx_fragment_factory: None, }, + // TODO(bartlomieju): handle linter plugins here before landing + maybe_plugin_runner: None, })), ) }); @@ -1032,6 +1034,7 @@ fn generate_lint_diagnostics( &document, &lint_config, &linter, + token.clone(), ), }, }); @@ -1043,6 +1046,7 @@ fn generate_document_lint_diagnostics( document: &Document, lint_config: &LintConfig, linter: &CliLinter, + token: CancellationToken, ) -> Vec { if !lint_config.files.matches_specifier(document.specifier()) { return Vec::new(); @@ -1050,7 +1054,7 @@ fn generate_document_lint_diagnostics( match document.maybe_parsed_source() { Some(Ok(parsed_source)) => { if let Ok(references) = - analysis::get_lint_references(parsed_source, linter) + analysis::get_lint_references(parsed_source, linter, token) { references .into_iter() diff --git a/cli/ops/lint.rs b/cli/ops/lint.rs index c13cb21a53d43f..820a64db43e670 100644 --- a/cli/ops/lint.rs +++ b/cli/ops/lint.rs @@ -3,11 +3,187 @@ use deno_ast::MediaType; use deno_ast::ModuleSpecifier; use deno_ast::ParseDiagnostic; +use deno_ast::SourceRange; +use deno_ast::SourceTextInfo; +use deno_ast::SourceTextProvider; use deno_core::op2; +use deno_core::OpState; +use deno_lint::diagnostic::LintDiagnostic; +use deno_lint::diagnostic::LintDiagnosticDetails; +use deno_lint::diagnostic::LintDiagnosticRange; +use deno_lint::diagnostic::LintFix; +use deno_lint::diagnostic::LintFixChange; +use tokio_util::sync::CancellationToken; use crate::tools::lint; +use crate::tools::lint::PluginLogger; +use crate::util::text_encoding::Utf16Map; -deno_core::extension!(deno_lint, ops = [op_lint_create_serialized_ast,],); +deno_core::extension!( + deno_lint_ext, + ops = [ + op_lint_create_serialized_ast, + op_lint_report, + op_lint_get_source, + op_is_cancelled + ], + options = { + logger: PluginLogger, + }, + // TODO(bartlomieju): this should only be done, + // if not in the "test worker". + middleware = |op| match op.name { + "op_print" => op_print(), + _ => op, + }, + state = |state, options| { + state.put(options.logger); + state.put(LintPluginContainer::default()); + }, +); + +deno_core::extension!( + deno_lint_ext_for_test, + ops = [op_lint_create_serialized_ast, op_is_cancelled], + state = |state| { + state.put(LintPluginContainer::default()); + }, +); + +#[derive(Default)] +pub struct LintPluginContainer { + pub diagnostics: Vec, + pub source_text_info: Option, + pub utf_16_map: Option, + pub specifier: Option, + pub token: CancellationToken, +} + +impl LintPluginContainer { + pub fn set_cancellation_token( + &mut self, + maybe_token: Option, + ) { + let token = maybe_token.unwrap_or_default(); + self.token = token; + } + + pub fn set_info_for_file( + &mut self, + specifier: ModuleSpecifier, + source_text_info: SourceTextInfo, + utf16_map: Utf16Map, + ) { + self.specifier = Some(specifier); + self.utf_16_map = Some(utf16_map); + self.source_text_info = Some(source_text_info); + } + + fn report( + &mut self, + id: String, + message: String, + hint: Option, + start_utf16: usize, + end_utf16: usize, + fix: Option, + ) -> Result<(), LintReportError> { + fn out_of_range_err( + map: &Utf16Map, + start_utf16: usize, + end_utf16: usize, + ) -> LintReportError { + LintReportError::IncorrectRange { + start: start_utf16, + end: end_utf16, + source_end: map.text_content_length_utf16().into(), + } + } + + fn utf16_to_utf8_range( + utf16_map: &Utf16Map, + source_text_info: &SourceTextInfo, + start_utf16: usize, + end_utf16: usize, + ) -> Result { + let Some(start) = + utf16_map.utf16_to_utf8_offset((start_utf16 as u32).into()) + else { + return Err(out_of_range_err(utf16_map, start_utf16, end_utf16)); + }; + let Some(end) = utf16_map.utf16_to_utf8_offset((end_utf16 as u32).into()) + else { + return Err(out_of_range_err(utf16_map, start_utf16, end_utf16)); + }; + let start_pos = source_text_info.start_pos(); + Ok(SourceRange::new( + start_pos + start.into(), + start_pos + end.into(), + )) + } + + let source_text_info = self.source_text_info.as_ref().unwrap(); + let utf16_map = self.utf_16_map.as_ref().unwrap(); + let specifier = self.specifier.clone().unwrap(); + let diagnostic_range = + utf16_to_utf8_range(utf16_map, source_text_info, start_utf16, end_utf16)?; + let range = LintDiagnosticRange { + range: diagnostic_range, + description: None, + text_info: source_text_info.clone(), + }; + + let mut fixes: Vec = vec![]; + + if let Some(fix) = fix { + let fix_range = utf16_to_utf8_range( + utf16_map, + source_text_info, + fix.range.0, + fix.range.1, + )?; + fixes.push(LintFix { + changes: vec![LintFixChange { + new_text: fix.text.into(), + range: fix_range, + }], + description: format!("Fix this {} problem", id).into(), + }); + } + + let lint_diagnostic = LintDiagnostic { + specifier, + range: Some(range), + details: LintDiagnosticDetails { + message, + code: id, + hint, + fixes, + custom_docs_url: None, + info: vec![], + }, + }; + self.diagnostics.push(lint_diagnostic); + Ok(()) + } +} + +#[op2(fast)] +pub fn op_print(state: &mut OpState, #[string] msg: &str, is_err: bool) { + let logger = state.borrow::(); + + if is_err { + logger.error(msg); + } else { + logger.log(msg); + } +} + +#[op2(fast)] +fn op_is_cancelled(state: &mut OpState) -> bool { + let container = state.borrow::(); + container.token.is_cancelled() +} #[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum LintError { @@ -41,5 +217,50 @@ fn op_lint_create_serialized_ast( scope_analysis: false, maybe_syntax: None, })?; - Ok(lint::serialize_ast_to_buffer(&parsed_source)) + let utf16_map = Utf16Map::new(parsed_source.text().as_ref()); + Ok(lint::serialize_ast_to_buffer(&parsed_source, &utf16_map)) +} + +#[derive(serde::Deserialize)] +struct LintReportFix { + text: String, + range: (usize, usize), +} + +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum LintReportError { + #[class(type)] + #[error("Invalid range [{start}, {end}], the source has a range of [0, {source_end}]")] + IncorrectRange { + start: usize, + end: usize, + source_end: u32, + }, +} + +#[op2] +fn op_lint_report( + state: &mut OpState, + #[string] id: String, + #[string] message: String, + #[string] hint: Option, + #[smi] start_utf16: usize, + #[smi] end_utf16: usize, + #[serde] fix: Option, +) -> Result<(), LintReportError> { + let container = state.borrow_mut::(); + container.report(id, message, hint, start_utf16, end_utf16, fix)?; + Ok(()) +} + +#[op2] +#[string] +fn op_lint_get_source(state: &mut OpState) -> String { + let container = state.borrow_mut::(); + container + .source_text_info + .as_ref() + .unwrap() + .text_str() + .to_string() } diff --git a/cli/schemas/lint-rules.v1.json b/cli/schemas/lint-rules.v1.json index 87bd4e26003ac9..90c1230b410602 100644 --- a/cli/schemas/lint-rules.v1.json +++ b/cli/schemas/lint-rules.v1.json @@ -1,127 +1,135 @@ { "$schema": "http://json-schema.org/draft-07/schema#", - "enum": [ - "adjacent-overload-signatures", - "ban-ts-comment", - "ban-types", - "ban-unknown-rule-code", - "ban-untagged-ignore", - "ban-untagged-todo", - "ban-unused-ignore", - "camelcase", - "constructor-super", - "default-param-last", - "eqeqeq", - "explicit-function-return-type", - "explicit-module-boundary-types", - "for-direction", - "fresh-handler-export", - "fresh-server-event-handlers", - "getter-return", - "guard-for-in", - "jsx-boolean-value", - "jsx-button-has-type", - "jsx-curly-braces", - "jsx-key", - "jsx-no-children-prop", - "jsx-no-comment-text-nodes", - "jsx-no-duplicate-props", - "jsx-no-unescaped-entities", - "jsx-no-useless-fragment", - "jsx-props-no-spread-multi", - "jsx-void-dom-elements-no-children", - "no-array-constructor", - "no-async-promise-executor", - "no-await-in-loop", - "no-await-in-sync-fn", - "no-boolean-literal-for-arguments", - "no-case-declarations", - "no-class-assign", - "no-compare-neg-zero", - "no-cond-assign", - "no-console", - "no-const-assign", - "no-constant-condition", - "no-control-regex", - "no-debugger", - "no-delete-var", - "no-deprecated-deno-api", - "no-dupe-args", - "no-dupe-class-members", - "no-dupe-else-if", - "no-dupe-keys", - "no-duplicate-case", - "no-empty", - "no-empty-character-class", - "no-empty-enum", - "no-empty-interface", - "no-empty-pattern", - "no-eval", - "no-ex-assign", - "no-explicit-any", - "no-external-import", - "no-extra-boolean-cast", - "no-extra-non-null-assertion", - "no-fallthrough", - "no-func-assign", - "no-global-assign", - "no-implicit-declare-namespace-export", - "no-import-assertions", - "no-import-assign", - "no-inferrable-types", - "no-inner-declarations", - "no-invalid-regexp", - "no-invalid-triple-slash-reference", - "no-irregular-whitespace", - "no-misused-new", - "no-namespace", - "no-new-symbol", - "no-node-globals", - "no-non-null-asserted-optional-chain", - "no-non-null-assertion", - "no-obj-calls", - "no-octal", - "no-process-global", - "no-prototype-builtins", - "no-redeclare", - "no-regex-spaces", - "no-self-assign", - "no-self-compare", - "no-setter-return", - "no-shadow-restricted-names", - "no-sloppy-imports", - "no-slow-types", - "no-sparse-arrays", - "no-sync-fn-in-async-fn", - "no-this-alias", - "no-this-before-super", - "no-throw-literal", - "no-top-level-await", - "no-undef", - "no-unreachable", - "no-unsafe-finally", - "no-unsafe-negation", - "no-unused-labels", - "no-unused-vars", - "no-useless-rename", - "no-var", - "no-window", - "no-window-prefix", - "no-with", - "prefer-as-const", - "prefer-ascii", - "prefer-const", - "prefer-namespace-keyword", - "prefer-primordials", - "react-no-danger", - "react-no-danger-with-children", - "react-rules-of-hooks", - "require-await", - "require-yield", - "single-var-declarator", - "triple-slash-reference", - "use-isnan", - "valid-typeof", - "verbatim-module-syntax" + "oneOf": [ + { + "type": "string", + "pattern": "^[a-z0-9-]+\\/[a-z0-9-]+$" + }, + { + "enum": [ + "adjacent-overload-signatures", + "ban-ts-comment", + "ban-types", + "ban-unknown-rule-code", + "ban-untagged-ignore", + "ban-untagged-todo", + "ban-unused-ignore", + "camelcase", + "constructor-super", + "default-param-last", + "eqeqeq", + "explicit-function-return-type", + "explicit-module-boundary-types", + "for-direction", + "fresh-handler-export", + "fresh-server-event-handlers", + "getter-return", + "guard-for-in", + "jsx-boolean-value", + "jsx-button-has-type", + "jsx-curly-braces", + "jsx-key", + "jsx-no-children-prop", + "jsx-no-comment-text-nodes", + "jsx-no-duplicate-props", + "jsx-no-unescaped-entities", + "jsx-no-useless-fragment", + "jsx-props-no-spread-multi", + "jsx-void-dom-elements-no-children", + "no-array-constructor", + "no-async-promise-executor", + "no-await-in-loop", + "no-await-in-sync-fn", + "no-boolean-literal-for-arguments", + "no-case-declarations", + "no-class-assign", + "no-compare-neg-zero", + "no-cond-assign", + "no-console", + "no-const-assign", + "no-constant-condition", + "no-control-regex", + "no-debugger", + "no-delete-var", + "no-deprecated-deno-api", + "no-dupe-args", + "no-dupe-class-members", + "no-dupe-else-if", + "no-dupe-keys", + "no-duplicate-case", + "no-empty", + "no-empty-character-class", + "no-empty-enum", + "no-empty-interface", + "no-empty-pattern", + "no-eval", + "no-ex-assign", + "no-explicit-any", + "no-external-import", + "no-extra-boolean-cast", + "no-extra-non-null-assertion", + "no-fallthrough", + "no-func-assign", + "no-global-assign", + "no-implicit-declare-namespace-export", + "no-import-assertions", + "no-import-assign", + "no-inferrable-types", + "no-inner-declarations", + "no-invalid-regexp", + "no-invalid-triple-slash-reference", + "no-irregular-whitespace", + "no-misused-new", + "no-namespace", + "no-new-symbol", + "no-node-globals", + "no-non-null-asserted-optional-chain", + "no-non-null-assertion", + "no-obj-calls", + "no-octal", + "no-process-global", + "no-prototype-builtins", + "no-redeclare", + "no-regex-spaces", + "no-self-assign", + "no-self-compare", + "no-setter-return", + "no-shadow-restricted-names", + "no-sloppy-imports", + "no-slow-types", + "no-sparse-arrays", + "no-sync-fn-in-async-fn", + "no-this-alias", + "no-this-before-super", + "no-throw-literal", + "no-top-level-await", + "no-undef", + "no-unreachable", + "no-unsafe-finally", + "no-unsafe-negation", + "no-unused-labels", + "no-unused-vars", + "no-useless-rename", + "no-var", + "no-window", + "no-window-prefix", + "no-with", + "prefer-as-const", + "prefer-ascii", + "prefer-const", + "prefer-namespace-keyword", + "prefer-primordials", + "react-no-danger", + "react-no-danger-with-children", + "react-rules-of-hooks", + "require-await", + "require-yield", + "single-var-declarator", + "triple-slash-reference", + "use-isnan", + "valid-typeof", + "verbatim-module-syntax" + ] + } ] } diff --git a/cli/tools/lint/ast_buffer/buffer.rs b/cli/tools/lint/ast_buffer/buffer.rs index a884ee24f9e535..b3e4926f241918 100644 --- a/cli/tools/lint/ast_buffer/buffer.rs +++ b/cli/tools/lint/ast_buffer/buffer.rs @@ -6,6 +6,8 @@ use deno_ast::swc::common::Span; use deno_ast::swc::common::DUMMY_SP; use indexmap::IndexMap; +use crate::util::text_encoding::Utf16Map; + /// Each property has this flag to mark what kind of value it holds- /// Plain objects and arrays are not supported yet, but could be easily /// added if needed. @@ -212,6 +214,15 @@ impl SerializeCtx { self.root_idx = idx; } + pub fn map_utf8_spans_to_utf16(&mut self, map: &Utf16Map) { + for value in &mut self.spans { + *value = map + .utf8_to_utf16_offset((*value).into()) + .unwrap_or_else(|| panic!("Failed converting '{value}' to utf16.")) + .into(); + } + } + /// Allocate a node's header fn field_header

(&mut self, prop: P, prop_flags: PropFlags) where @@ -274,7 +285,13 @@ impl SerializeCtx { where K: Into + Display + Clone, { - self.append_inner(kind, span.lo.0, span.hi.0) + let (start, end) = if *span == DUMMY_SP { + (0, 0) + } else { + // -1 is because swc stores spans 1-indexed + (span.lo.0 - 1, span.hi.0 - 1) + }; + self.append_inner(kind, start, end) } pub fn append_inner( diff --git a/cli/tools/lint/ast_buffer/mod.rs b/cli/tools/lint/ast_buffer/mod.rs index fc4045fb60cc8a..b5611af2288f1b 100644 --- a/cli/tools/lint/ast_buffer/mod.rs +++ b/cli/tools/lint/ast_buffer/mod.rs @@ -3,11 +3,16 @@ use deno_ast::ParsedSource; use swc::serialize_swc_to_buffer; +use crate::util::text_encoding::Utf16Map; + mod buffer; mod swc; mod ts_estree; -pub fn serialize_ast_to_buffer(parsed_source: &ParsedSource) -> Vec { +pub fn serialize_ast_to_buffer( + parsed_source: &ParsedSource, + utf16_map: &Utf16Map, +) -> Vec { // TODO: We could support multiple languages here - serialize_swc_to_buffer(parsed_source) + serialize_swc_to_buffer(parsed_source, utf16_map) } diff --git a/cli/tools/lint/ast_buffer/swc.rs b/cli/tools/lint/ast_buffer/swc.rs index 925d1bcd17c032..385035d023ea1c 100644 --- a/cli/tools/lint/ast_buffer/swc.rs +++ b/cli/tools/lint/ast_buffer/swc.rs @@ -93,8 +93,12 @@ use super::buffer::NodeRef; use super::ts_estree::AstNode; use super::ts_estree::TsEsTreeBuilder; use super::ts_estree::TsKeywordKind; +use crate::util::text_encoding::Utf16Map; -pub fn serialize_swc_to_buffer(parsed_source: &ParsedSource) -> Vec { +pub fn serialize_swc_to_buffer( + parsed_source: &ParsedSource, + utf16_map: &Utf16Map, +) -> Vec { let mut ctx = TsEsTreeBuilder::new(); let program = &parsed_source.program(); @@ -125,6 +129,7 @@ pub fn serialize_swc_to_buffer(parsed_source: &ParsedSource) -> Vec { } } + ctx.map_utf8_spans_to_utf16(utf16_map); ctx.serialize() } diff --git a/cli/tools/lint/ast_buffer/ts_estree.rs b/cli/tools/lint/ast_buffer/ts_estree.rs index 340f9f3225510f..f5e89a2bede37d 100644 --- a/cli/tools/lint/ast_buffer/ts_estree.rs +++ b/cli/tools/lint/ast_buffer/ts_estree.rs @@ -10,6 +10,7 @@ use deno_ast::view::TruePlusMinus; use super::buffer::AstBufSerializer; use super::buffer::NodeRef; use super::buffer::SerializeCtx; +use crate::util::text_encoding::Utf16Map; #[derive(Debug, Clone, PartialEq)] pub enum AstNode { @@ -488,6 +489,10 @@ impl TsEsTreeBuilder { } } + pub fn map_utf8_spans_to_utf16(&mut self, map: &Utf16Map) { + self.ctx.map_utf8_spans_to_utf16(map); + } + pub fn write_program( &mut self, span: &Span, diff --git a/cli/tools/lint/linter.rs b/cli/tools/lint/linter.rs index 5d6f8452744847..1b7b999594a13e 100644 --- a/cli/tools/lint/linter.rs +++ b/cli/tools/lint/linter.rs @@ -1,32 +1,45 @@ // Copyright 2018-2025 the Deno authors. MIT license. +use std::borrow::Cow; use std::collections::HashSet; use std::path::Path; +use std::path::PathBuf; +use std::sync::Arc; +use ::tokio_util::sync::CancellationToken; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; use deno_ast::ParsedSource; use deno_ast::SourceTextInfo; use deno_core::anyhow::Context; use deno_core::error::AnyError; +use deno_core::futures::FutureExt as _; +use deno_core::parking_lot::Mutex; use deno_graph::ModuleGraph; use deno_lint::diagnostic::LintDiagnostic; +use deno_lint::linter::ExternalLinterCb; +use deno_lint::linter::ExternalLinterResult; use deno_lint::linter::LintConfig as DenoLintConfig; use deno_lint::linter::LintFileOptions; use deno_lint::linter::Linter as DenoLintLinter; use deno_lint::linter::LinterOptions; use deno_path_util::fs::atomic_write_file_with_retries; +use deno_runtime::tokio_util; +use super::plugins; +use super::plugins::PluginHostProxy; use super::rules::FileOrPackageLintRule; use super::rules::PackageLintRule; use super::ConfiguredRules; use crate::sys::CliSys; use crate::util::fs::specifier_from_file_path; +use crate::util::text_encoding::Utf16Map; pub struct CliLinterOptions { pub configured_rules: ConfiguredRules, pub fix: bool, pub deno_lint_config: DenoLintConfig, + pub maybe_plugin_runner: Option>, } #[derive(Debug)] @@ -35,6 +48,7 @@ pub struct CliLinter { package_rules: Vec>, linter: DenoLintLinter, deno_lint_config: DenoLintConfig, + maybe_plugin_runner: Option>, } impl CliLinter { @@ -62,6 +76,7 @@ impl CliLinter { custom_ignore_diagnostic_directive: None, }), deno_lint_config: options.deno_lint_config, + maybe_plugin_runner: options.maybe_plugin_runner, } } @@ -84,10 +99,22 @@ impl CliLinter { pub fn lint_with_ast( &self, parsed_source: &ParsedSource, - ) -> Vec { - self - .linter - .lint_with_ast(parsed_source, self.deno_lint_config.clone()) + token: CancellationToken, + ) -> Result, AnyError> { + let external_linter_container = ExternalLinterContainer::new( + self.maybe_plugin_runner.clone(), + Some(token), + ); + + let d = self.linter.lint_with_ast( + parsed_source, + self.deno_lint_config.clone(), + external_linter_container.get_callback(), + ); + if let Some(err) = external_linter_container.take_error() { + return Err(err); + } + Ok(d) } pub fn lint_file( @@ -105,18 +132,34 @@ impl CliLinter { MediaType::from_specifier(&specifier) }; + let external_linter_container = + ExternalLinterContainer::new(self.maybe_plugin_runner.clone(), None); + if self.fix { - self.lint_file_and_fix(&specifier, media_type, source_code, file_path) + self.lint_file_and_fix( + &specifier, + media_type, + source_code, + file_path, + external_linter_container, + ) } else { - self + let (source, diagnostics) = self .linter .lint_file(LintFileOptions { specifier, media_type, source_code, config: self.deno_lint_config.clone(), + external_linter: external_linter_container.get_callback(), }) - .map_err(AnyError::from) + .map_err(AnyError::from)?; + + if let Some(err) = external_linter_container.take_error() { + return Err(err); + } + + Ok((source, diagnostics)) } } @@ -126,6 +169,7 @@ impl CliLinter { media_type: MediaType, source_code: String, file_path: &Path, + external_linter_container: ExternalLinterContainer, ) -> Result<(ParsedSource, Vec), deno_core::anyhow::Error> { // initial lint let (source, diagnostics) = self.linter.lint_file(LintFileOptions { @@ -133,8 +177,13 @@ impl CliLinter { media_type, source_code, config: self.deno_lint_config.clone(), + external_linter: external_linter_container.get_callback(), })?; + if let Some(err) = external_linter_container.take_error() { + return Err(err); + } + // Try applying fixes repeatedly until the file has none left or // a maximum number of iterations is reached. This is necessary // because lint fixes may overlap and so we can't always apply @@ -148,8 +197,9 @@ impl CliLinter { media_type, &self.linter, self.deno_lint_config.clone(), - source.text_info_lazy(), + &source, &diagnostics, + &external_linter_container, )?; match change { Some(change) => { @@ -165,7 +215,7 @@ impl CliLinter { log::warn!( concat!( "Reached maximum number of fix iterations for '{}'. There's ", - "probably a bug in Deno. Please fix this file manually.", + "probably a bug in the lint rule. Please fix this file manually.", ), specifier, ); @@ -193,23 +243,81 @@ fn apply_lint_fixes_and_relint( media_type: MediaType, linter: &DenoLintLinter, config: DenoLintConfig, - text_info: &SourceTextInfo, + original_source: &ParsedSource, diagnostics: &[LintDiagnostic], + external_linter_container: &ExternalLinterContainer, ) -> Result)>, AnyError> { + let text_info = original_source.text_info_lazy(); let Some(new_text) = apply_lint_fixes(text_info, diagnostics) else { return Ok(None); }; - linter - .lint_file(LintFileOptions { + + let lint_with_text = |new_text: String| { + let (source, diagnostics) = linter.lint_file(LintFileOptions { specifier: specifier.clone(), source_code: new_text, media_type, - config, - }) - .map(Some) - .context( - "An applied lint fix caused a syntax error. Please report this bug.", - ) + config: config.clone(), + external_linter: external_linter_container.get_callback(), + })?; + let mut new_diagnostics = source.diagnostics().clone(); + new_diagnostics.retain(|d| !original_source.diagnostics().contains(d)); + if let Some(diagnostic) = new_diagnostics.pop() { + return Err(AnyError::from(diagnostic)); + } + Ok((source, diagnostics)) + }; + + let (source, diagnostics) = match lint_with_text(new_text) { + Ok(result) => result, + Err(err) => { + let utf16_map = Utf16Map::new(text_info.text_str()); + // figure out which diagnostic caused a syntax error + let mut diagnostics = diagnostics.to_vec(); + while let Some(last_diagnostic) = diagnostics.pop() { + let Some(lint_fix) = last_diagnostic.details.fixes.first() else { + continue; + }; + let success = match apply_lint_fixes(text_info, &diagnostics) { + Some(new_text) => lint_with_text(new_text).is_ok(), + None => true, + }; + if success { + let mut changes_text = String::new(); + for change in &lint_fix.changes { + let utf8_start = + (change.range.start - text_info.range().start) as u32; + let utf8_end = (change.range.end - text_info.range().start) as u32; + let utf16_start = utf16_map + .utf8_to_utf16_offset(utf8_start.into()) + .unwrap_or(utf8_start.into()); + let utf16_end = utf16_map + .utf8_to_utf16_offset(utf8_end.into()) + .unwrap_or(utf8_end.into()); + changes_text.push_str(&format!( + "Range: [{}, {}]\n", + u32::from(utf16_start), + u32::from(utf16_end) + )); + changes_text.push_str(&format!("Text: {:?}\n\n", &change.new_text)); + } + return Err(err).context(format!( + "The '{}' rule caused a syntax error applying '{}'.\n\n{}", + last_diagnostic.details.code, lint_fix.description, changes_text + )); + } + } + return Err(err).context( + "A lint fix caused a syntax error. This is a bug in a lint rule.", + ); + } + }; + + if let Some(err) = external_linter_container.take_error() { + return Err(err); + } + + Ok(Some((source, diagnostics))) } fn apply_lint_fixes( @@ -258,3 +366,94 @@ fn apply_lint_fixes( deno_ast::apply_text_changes(text_info.text_str(), quick_fixes); Some(new_text) } + +fn run_plugins( + plugin_runner: Arc, + parsed_source: ParsedSource, + file_path: PathBuf, + maybe_token: Option, +) -> Result { + let source_text_info = parsed_source.text_info_lazy().clone(); + let plugin_info = plugin_runner + .get_plugin_rules() + .into_iter() + .map(Cow::from) + .collect(); + + let fut = async move { + let utf16_map = Utf16Map::new(parsed_source.text().as_ref()); + let serialized_ast = + plugin_runner.serialize_ast(&parsed_source, &utf16_map)?; + + plugins::run_rules_for_ast( + &plugin_runner, + &file_path, + serialized_ast, + source_text_info, + utf16_map, + maybe_token, + ) + .await + } + .boxed_local(); + + let plugin_diagnostics = tokio_util::create_and_run_current_thread(fut)?; + + Ok(ExternalLinterResult { + diagnostics: plugin_diagnostics, + rules: plugin_info, + }) +} + +struct ExternalLinterContainer { + cb: Option, + error: Option>>>, +} + +impl ExternalLinterContainer { + pub fn new( + maybe_plugin_runner: Option>, + maybe_token: Option, + ) -> Self { + let mut s = Self { + cb: None, + error: None, + }; + if let Some(plugin_runner) = maybe_plugin_runner { + s.error = Some(Arc::new(Mutex::new(None))); + let error_ = s.error.clone(); + let cb = Arc::new(move |parsed_source: ParsedSource| { + let token_ = maybe_token.clone(); + let file_path = + match deno_path_util::url_to_file_path(parsed_source.specifier()) { + Ok(path) => path, + Err(err) => { + *error_.as_ref().unwrap().lock() = Some(err.into()); + return None; + } + }; + + let r = + run_plugins(plugin_runner.clone(), parsed_source, file_path, token_); + + match r { + Ok(d) => Some(d), + Err(err) => { + *error_.as_ref().unwrap().lock() = Some(err); + None + } + } + }); + s.cb = Some(cb); + } + s + } + + pub fn get_callback(&self) -> Option { + self.cb.clone() + } + + pub fn take_error(&self) -> Option { + self.error.as_ref().and_then(|e| e.lock().take()) + } +} diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs index 49a5c6896bb081..74c46d4c189112 100644 --- a/cli/tools/lint/mod.rs +++ b/cli/tools/lint/mod.rs @@ -26,6 +26,7 @@ use deno_core::serde_json; use deno_core::unsync::future::LocalFutureExt; use deno_core::unsync::future::SharedLocal; use deno_graph::ModuleGraph; +use deno_lib::util::hash::FastInsecureHasher; use deno_lint::diagnostic::LintDiagnostic; use log::debug; use reporters::create_reporter; @@ -55,6 +56,7 @@ use crate::util::sync::AtomicFlag; mod ast_buffer; mod linter; +mod plugins; mod reporters; mod rules; @@ -62,6 +64,8 @@ mod rules; pub use ast_buffer::serialize_ast_to_buffer; pub use linter::CliLinter; pub use linter::CliLinterOptions; +pub use plugins::create_runner_and_load_plugins; +pub use plugins::PluginLogger; pub use rules::collect_no_slow_type_diagnostics; pub use rules::ConfiguredRules; pub use rules::LintRuleProvider; @@ -282,18 +286,52 @@ impl WorkspaceLinter { ) -> Result<(), AnyError> { self.file_count += paths.len(); + let exclude = lint_options.rules.exclude.clone(); + + let plugin_specifiers = lint_options.plugins.clone(); let lint_rules = self.lint_rule_provider.resolve_lint_rules_err_empty( lint_options.rules, member_dir.maybe_deno_json().map(|c| c.as_ref()), )?; - let maybe_incremental_cache = - lint_rules.incremental_cache_state().map(|state| { - Arc::new(IncrementalCache::new( - self.caches.lint_incremental_cache_db(), - CacheDBHash::from_hashable(&state), - &paths, - )) - }); + let mut maybe_incremental_cache = None; + + // TODO(bartlomieju): how do we decide if plugins support incremental cache? + if lint_rules.supports_incremental_cache() { + let mut hasher = FastInsecureHasher::new_deno_versioned(); + hasher.write_hashable(lint_rules.incremental_cache_state()); + if !plugin_specifiers.is_empty() { + hasher.write_hashable(&plugin_specifiers); + } + let state_hash = hasher.finish(); + + maybe_incremental_cache = Some(Arc::new(IncrementalCache::new( + self.caches.lint_incremental_cache_db(), + CacheDBHash::new(state_hash), + &paths, + ))); + } + + #[allow(clippy::print_stdout)] + #[allow(clippy::print_stderr)] + fn logger_printer(msg: &str, is_err: bool) { + if is_err { + eprint!("{}", msg); + } else { + print!("{}", msg); + } + } + + let mut plugin_runner = None; + if !plugin_specifiers.is_empty() { + let logger = plugins::PluginLogger::new(logger_printer); + let runner = plugins::create_runner_and_load_plugins( + plugin_specifiers, + logger, + exclude, + ) + .await?; + plugin_runner = Some(Arc::new(runner)); + } let linter = Arc::new(CliLinter::new(CliLinterOptions { configured_rules: lint_rules, @@ -301,6 +339,7 @@ impl WorkspaceLinter { deno_lint_config: self .tsconfig_resolver .deno_lint_config(member_dir.dir_url())?, + maybe_plugin_runner: plugin_runner, })); let has_error = self.has_error.clone(); @@ -543,7 +582,8 @@ fn lint_stdin( .to_lint_config(FilePatterns::new_with_base(start_dir.dir_path()))?; let deno_lint_config = tsconfig_resolver.deno_lint_config(start_dir.dir_url())?; - let lint_options = LintOptions::resolve(lint_config, &lint_flags); + let lint_options = + LintOptions::resolve(start_dir.dir_path(), lint_config, &lint_flags)?; let configured_rules = lint_rule_provider.resolve_lint_rules_err_empty( lint_options.rules, start_dir.maybe_deno_json().map(|c| c.as_ref()), @@ -561,6 +601,7 @@ fn lint_stdin( fix: false, configured_rules, deno_lint_config, + maybe_plugin_runner: None, }); let r = linter @@ -624,13 +665,24 @@ mod tests { use super::*; + #[derive(Serialize, Deserialize)] + struct RulesPattern { + r#type: String, + pattern: String, + } + + #[derive(Serialize, Deserialize)] + struct RulesEnum { + r#enum: Vec, + } + #[derive(Serialize, Deserialize)] struct RulesSchema { #[serde(rename = "$schema")] schema: String, - #[serde(rename = "enum")] - rules: Vec, + #[serde(rename = "oneOf")] + one_of: (RulesPattern, RulesEnum), } fn get_all_rules() -> Vec { @@ -661,25 +713,25 @@ mod tests { const UPDATE_ENV_VAR_NAME: &str = "UPDATE_EXPECTED"; + let rules_list = schema.one_of.1.r#enum; + if std::env::var(UPDATE_ENV_VAR_NAME).ok().is_none() { assert_eq!( - schema.rules, all_rules, + rules_list, all_rules, "Lint rules schema file not up to date. Run again with {}=1 to update the expected output", UPDATE_ENV_VAR_NAME ); return; } + let new_schema = RulesSchema { + schema: schema.schema, + one_of: (schema.one_of.0, RulesEnum { r#enum: all_rules }), + }; + std::fs::write( &rules_schema_path, - format!( - "{}\n", - serde_json::to_string_pretty(&RulesSchema { - schema: schema.schema, - rules: all_rules, - }) - .unwrap(), - ), + format!("{}\n", serde_json::to_string_pretty(&new_schema).unwrap(),), ) .unwrap(); } diff --git a/cli/tools/lint/plugins.rs b/cli/tools/lint/plugins.rs new file mode 100644 index 00000000000000..3d7acf4f242f04 --- /dev/null +++ b/cli/tools/lint/plugins.rs @@ -0,0 +1,543 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::path::Path; +use std::path::PathBuf; +use std::rc::Rc; +use std::sync::Arc; + +use ::tokio_util::sync::CancellationToken; +use deno_ast::ModuleSpecifier; +use deno_ast::ParsedSource; +use deno_ast::SourceTextInfo; +use deno_core::anyhow::bail; +use deno_core::error::AnyError; +use deno_core::error::CoreError; +use deno_core::error::JsError; +use deno_core::futures::FutureExt; +use deno_core::parking_lot::Mutex; +use deno_core::resolve_url_or_path; +use deno_core::v8; +use deno_core::PollEventLoopOptions; +use deno_lint::diagnostic::LintDiagnostic; +use deno_runtime::deno_permissions::Permissions; +use deno_runtime::deno_permissions::PermissionsContainer; +use deno_runtime::tokio_util; +use deno_runtime::worker::MainWorker; +use deno_runtime::WorkerExecutionMode; +use tokio::sync::mpsc::channel; +use tokio::sync::mpsc::Receiver; +use tokio::sync::mpsc::Sender; + +use crate::args::DenoSubcommand; +use crate::args::Flags; +use crate::args::LintFlags; +use crate::factory::CliFactory; +use crate::ops::lint::LintPluginContainer; +use crate::tools::lint::serialize_ast_to_buffer; +use crate::util::text_encoding::Utf16Map; + +#[derive(Debug)] +pub enum PluginHostRequest { + LoadPlugins { + specifiers: Vec, + exclude_rules: Option>, + }, + Run { + serialized_ast: Vec, + file_path: PathBuf, + source_text_info: SourceTextInfo, + utf16_map: Utf16Map, + maybe_token: Option, + }, +} + +pub enum PluginHostResponse { + // TODO: write to structs + LoadPlugin(Result, AnyError>), + Run(Result, AnyError>), +} + +impl std::fmt::Debug for PluginHostResponse { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::LoadPlugin(_arg0) => f.debug_tuple("LoadPlugin").finish(), + Self::Run(_arg0) => f.debug_tuple("Run").finish(), + } + } +} + +#[derive(Clone, Debug)] +pub struct PluginLogger { + print: fn(&str, bool), +} + +impl PluginLogger { + pub fn new(print: fn(&str, bool)) -> Self { + Self { print } + } + + pub fn log(&self, msg: &str) { + (self.print)(msg, false); + } + + pub fn error(&self, msg: &str) { + (self.print)(msg, true); + } +} + +macro_rules! v8_static_strings { + ($($ident:ident = $str:literal),* $(,)?) => { + $( + pub static $ident: deno_core::FastStaticString = deno_core::ascii_str!($str); + )* + }; +} + +v8_static_strings! { + DEFAULT = "default", + INSTALL_PLUGINS = "installPlugins", + RUN_PLUGINS_FOR_FILE = "runPluginsForFile", +} + +#[derive(Debug)] +pub struct PluginHostProxy { + tx: Sender, + rx: Arc>>, + pub(crate) plugin_info: Arc>>, + #[allow(unused)] + join_handle: std::thread::JoinHandle>, +} + +impl PluginHostProxy { + pub fn get_plugin_rules(&self) -> Vec { + let infos = self.plugin_info.lock(); + + let mut all_names = vec![]; + + for info in infos.iter() { + all_names.extend_from_slice(&info.get_rules()); + } + + all_names + } +} + +pub struct PluginHost { + worker: MainWorker, + install_plugins_fn: Rc>, + run_plugins_for_file_fn: Rc>, + tx: Sender, + rx: Receiver, +} + +async fn create_plugin_runner_inner( + logger: PluginLogger, + rx_req: Receiver, + tx_res: Sender, +) -> Result { + let flags = Flags { + subcommand: DenoSubcommand::Lint(LintFlags::default()), + ..Default::default() + }; + let flags = Arc::new(flags); + let factory = CliFactory::from_flags(flags.clone()); + let cli_options = factory.cli_options()?; + let main_module = + resolve_url_or_path("./$deno$lint.mts", cli_options.initial_cwd()).unwrap(); + let perm_parser = factory.permission_desc_parser()?; + let permissions = Permissions::from_options( + perm_parser.as_ref(), + &cli_options.permissions_options(), + )?; + let permissions = PermissionsContainer::new(perm_parser.clone(), permissions); + // let npm_resolver = factory.npm_resolver().await?.clone(); + // let resolver = factory.resolver().await?.clone(); + let worker_factory = factory.create_cli_main_worker_factory().await?; + + let worker = worker_factory + .create_custom_worker( + // TODO(bartlomieju): add "lint" execution mode + WorkerExecutionMode::Run, + main_module.clone(), + permissions, + vec![crate::ops::lint::deno_lint_ext::init_ops(logger.clone())], + Default::default(), + ) + .await?; + + let mut worker = worker.into_main_worker(); + let runtime = &mut worker.js_runtime; + + let obj = runtime.execute_script("lint.js", "Deno[Deno.internal]")?; + + log::debug!("Lint plugins loaded, capturing default exports"); + let (install_plugins_fn, run_plugins_for_file_fn) = { + let scope = &mut runtime.handle_scope(); + let module_exports: v8::Local = + v8::Local::new(scope, obj).try_into().unwrap(); + + let install_plugins_fn_name = INSTALL_PLUGINS.v8_string(scope).unwrap(); + let install_plugins_fn_val = module_exports + .get(scope, install_plugins_fn_name.into()) + .unwrap(); + let install_plugins_fn: v8::Local = + install_plugins_fn_val.try_into().unwrap(); + + let run_plugins_for_file_fn_name = + RUN_PLUGINS_FOR_FILE.v8_string(scope).unwrap(); + let run_plugins_for_file_fn_val = module_exports + .get(scope, run_plugins_for_file_fn_name.into()) + .unwrap(); + let run_plugins_for_file_fn: v8::Local = + run_plugins_for_file_fn_val.try_into().unwrap(); + + ( + Rc::new(v8::Global::new(scope, install_plugins_fn)), + Rc::new(v8::Global::new(scope, run_plugins_for_file_fn)), + ) + }; + + Ok(PluginHost { + worker, + install_plugins_fn, + run_plugins_for_file_fn, + tx: tx_res, + rx: rx_req, + }) +} + +#[derive(Debug, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PluginInfo { + pub name: String, + pub rule_names: Vec, +} + +impl PluginInfo { + pub fn get_rules(&self) -> Vec { + let mut rules = Vec::with_capacity(self.rule_names.len()); + + for rule_name in &self.rule_names { + rules.push(format!("{}/{}", self.name, rule_name)); + } + + rules + } +} + +impl PluginHost { + fn create(logger: PluginLogger) -> Result { + let (tx_req, rx_req) = channel(10); + let (tx_res, rx_res) = channel(10); + + let logger_ = logger.clone(); + let join_handle = std::thread::spawn(move || { + let logger = logger_; + log::debug!("Lint PluginHost thread spawned"); + let start = std::time::Instant::now(); + let fut = async move { + let runner = + create_plugin_runner_inner(logger.clone(), rx_req, tx_res).await?; + log::debug!("Lint PlugibnHost running loop"); + runner.run_loop().await?; + log::debug!( + "Lint PluginHost thread finished, took {:?}", + std::time::Instant::now() - start + ); + Ok(()) + } + .boxed_local(); + tokio_util::create_and_run_current_thread(fut) + }); + + let proxy = PluginHostProxy { + tx: tx_req, + rx: Arc::new(tokio::sync::Mutex::new(rx_res)), + plugin_info: Arc::new(Mutex::new(vec![])), + join_handle, + }; + + Ok(proxy) + } + + async fn run_loop(mut self) -> Result<(), AnyError> { + log::debug!("Lint PluginHost is waiting for message"); + while let Some(req) = self.rx.recv().await { + log::debug!("Lint PluginHost has received a message"); + match req { + PluginHostRequest::LoadPlugins { + specifiers, + exclude_rules, + } => { + let r = self.load_plugins(specifiers, exclude_rules).await; + let _ = self.tx.send(PluginHostResponse::LoadPlugin(r)).await; + } + PluginHostRequest::Run { + serialized_ast, + file_path, + source_text_info, + utf16_map, + maybe_token, + } => { + let start = std::time::Instant::now(); + let r = match self.run_plugins( + &file_path, + serialized_ast, + source_text_info, + utf16_map, + maybe_token, + ) { + Ok(()) => Ok(self.take_diagnostics()), + Err(err) => Err(err), + }; + log::debug!( + "Running plugins lint rules took {:?}", + std::time::Instant::now() - start + ); + let _ = self.tx.send(PluginHostResponse::Run(r)).await; + } + } + } + log::debug!("Lint PluginHost run loop finished"); + Ok(()) + } + + fn take_diagnostics(&mut self) -> Vec { + let op_state = self.worker.js_runtime.op_state(); + let mut state = op_state.borrow_mut(); + let container = state.borrow_mut::(); + std::mem::take(&mut container.diagnostics) + } + + fn run_plugins( + &mut self, + file_path: &Path, + serialized_ast: Vec, + source_text_info: SourceTextInfo, + utf16_map: Utf16Map, + maybe_token: Option, + ) -> Result<(), AnyError> { + { + let state = self.worker.js_runtime.op_state(); + let mut state = state.borrow_mut(); + let container = state.borrow_mut::(); + container.set_info_for_file( + ModuleSpecifier::from_file_path(file_path).unwrap(), + source_text_info, + utf16_map, + ); + container.set_cancellation_token(maybe_token); + } + + let scope = &mut self.worker.js_runtime.handle_scope(); + let file_name_v8: v8::Local = + v8::String::new(scope, &file_path.display().to_string()) + .unwrap() + .into(); + + let store = v8::ArrayBuffer::new_backing_store_from_vec(serialized_ast); + let ast_buf = + v8::ArrayBuffer::with_backing_store(scope, &store.make_shared()); + let ast_bin_v8: v8::Local = + v8::Uint8Array::new(scope, ast_buf, 0, ast_buf.byte_length()) + .unwrap() + .into(); + let run_plugins_for_file = + v8::Local::new(scope, &*self.run_plugins_for_file_fn); + let undefined = v8::undefined(scope); + + let mut tc_scope = v8::TryCatch::new(scope); + let _run_plugins_result = run_plugins_for_file.call( + &mut tc_scope, + undefined.into(), + &[file_name_v8, ast_bin_v8], + ); + + if let Some(exception) = tc_scope.exception() { + let error = JsError::from_v8_exception(&mut tc_scope, exception); + let core_err = CoreError::Js(error); + return Err(core_err.into()); + } + drop(tc_scope); + Ok(()) + } + + async fn load_plugins( + &mut self, + plugin_specifiers: Vec, + exclude: Option>, + ) -> Result, AnyError> { + let mut load_futures = Vec::with_capacity(plugin_specifiers.len()); + for specifier in plugin_specifiers { + let mod_id = self + .worker + .js_runtime + .load_side_es_module(&specifier) + .await?; + let mod_future = + self.worker.js_runtime.mod_evaluate(mod_id).boxed_local(); + load_futures.push((mod_future, mod_id)); + } + + self + .worker + .js_runtime + .run_event_loop(PollEventLoopOptions::default()) + .await?; + + let mut plugin_handles = Vec::with_capacity(load_futures.len()); + + for (fut, mod_id) in load_futures { + fut.await?; + let module = self.worker.js_runtime.get_module_namespace(mod_id).unwrap(); + let scope = &mut self.worker.js_runtime.handle_scope(); + let module_local = v8::Local::new(scope, module); + let default_export_str = DEFAULT.v8_string(scope).unwrap(); + let default_export = + module_local.get(scope, default_export_str.into()).unwrap(); + let default_export_global = v8::Global::new(scope, default_export); + plugin_handles.push(default_export_global); + } + + let scope = &mut self.worker.js_runtime.handle_scope(); + let install_plugins_local = + v8::Local::new(scope, &*self.install_plugins_fn.clone()); + let exclude_v8: v8::Local = + exclude.map_or(v8::null(scope).into(), |v| { + let elems = v + .iter() + .map(|item| v8::String::new(scope, item).unwrap().into()) + .collect::>(); + + v8::Array::new_with_elements(scope, elems.as_slice()).into() + }); + + let undefined = v8::undefined(scope); + + let local_handles = { + let arr = v8::Array::new(scope, plugin_handles.len().try_into().unwrap()); + for (idx, plugin_handle) in plugin_handles.into_iter().enumerate() { + let handle = v8::Local::new(scope, plugin_handle); + arr + .set_index(scope, idx.try_into().unwrap(), handle) + .unwrap(); + } + arr + }; + let args = &[local_handles.into(), exclude_v8]; + + log::debug!("Installing lint plugins..."); + + let mut tc_scope = v8::TryCatch::new(scope); + let plugins_info_result = + install_plugins_local.call(&mut tc_scope, undefined.into(), args); + if let Some(exception) = tc_scope.exception() { + let error = JsError::from_v8_exception(&mut tc_scope, exception); + return Err(error.into()); + } + drop(tc_scope); + let plugins_info = plugins_info_result.unwrap(); + let infos: Vec = + deno_core::serde_v8::from_v8(scope, plugins_info)?; + log::debug!("Plugins installed: {}", infos.len()); + + Ok(infos) + } +} + +impl PluginHostProxy { + pub async fn load_plugins( + &self, + specifiers: Vec, + exclude_rules: Option>, + ) -> Result<(), AnyError> { + self + .tx + .send(PluginHostRequest::LoadPlugins { + specifiers, + exclude_rules, + }) + .await?; + let mut rx = self.rx.lock().await; + + if let Some(val) = rx.recv().await { + let PluginHostResponse::LoadPlugin(result) = val else { + unreachable!() + }; + let infos = result?; + *self.plugin_info.lock() = infos; + return Ok(()); + } + bail!("Plugin host has closed") + } + + pub async fn run_rules( + &self, + specifier: &Path, + serialized_ast: Vec, + source_text_info: SourceTextInfo, + utf16_map: Utf16Map, + maybe_token: Option, + ) -> Result, AnyError> { + self + .tx + .send(PluginHostRequest::Run { + serialized_ast, + file_path: specifier.to_path_buf(), + source_text_info, + utf16_map, + maybe_token, + }) + .await?; + let mut rx = self.rx.lock().await; + + if let Some(PluginHostResponse::Run(diagnostics_result)) = rx.recv().await { + return diagnostics_result; + } + bail!("Plugin host has closed") + } + + pub fn serialize_ast( + &self, + parsed_source: &ParsedSource, + utf16_map: &Utf16Map, + ) -> Result, AnyError> { + let start = std::time::Instant::now(); + let r = serialize_ast_to_buffer(parsed_source, utf16_map); + log::debug!( + "Serializing an AST took {:?}", + std::time::Instant::now() - start + ); + Ok(r) + } +} + +pub async fn create_runner_and_load_plugins( + plugin_specifiers: Vec, + logger: PluginLogger, + exclude: Option>, +) -> Result { + let host_proxy = PluginHost::create(logger)?; + host_proxy.load_plugins(plugin_specifiers, exclude).await?; + Ok(host_proxy) +} + +pub async fn run_rules_for_ast( + host_proxy: &PluginHostProxy, + specifier: &Path, + serialized_ast: Vec, + source_text_info: SourceTextInfo, + utf16_map: Utf16Map, + maybe_token: Option, +) -> Result, AnyError> { + let d = host_proxy + .run_rules( + specifier, + serialized_ast, + source_text_info, + utf16_map, + maybe_token, + ) + .await?; + Ok(d) +} diff --git a/cli/tools/lint/reporters.rs b/cli/tools/lint/reporters.rs index 24e04e840f7dcc..2aa50b6de81262 100644 --- a/cli/tools/lint/reporters.rs +++ b/cli/tools/lint/reporters.rs @@ -2,9 +2,11 @@ use deno_ast::diagnostics::Diagnostic; use deno_core::error::AnyError; +use deno_core::error::CoreError; use deno_core::serde_json; use deno_lint::diagnostic::LintDiagnostic; use deno_runtime::colors; +use deno_runtime::fmt_errors::format_js_error; use log::info; use serde::Serialize; @@ -53,7 +55,19 @@ impl LintReporter for PrettyLintReporter { fn visit_error(&mut self, file_path: &str, err: &AnyError) { log::error!("Error linting: {file_path}"); - log::error!(" {err}"); + let text = + if let Some(CoreError::Js(js_error)) = err.downcast_ref::() { + format_js_error(js_error) + } else { + format!("{err:#}") + }; + for line in text.split('\n') { + if line.is_empty() { + log::error!(""); + } else { + log::error!(" {}", line); + } + } } fn close(&mut self, check_count: usize) { diff --git a/cli/tools/lint/rules/mod.rs b/cli/tools/lint/rules/mod.rs index f8c65428aca6ad..9f2cee24fa9b21 100644 --- a/cli/tools/lint/rules/mod.rs +++ b/cli/tools/lint/rules/mod.rs @@ -122,16 +122,16 @@ impl CliLintRule { #[derive(Debug)] pub struct ConfiguredRules { - pub all_rule_codes: HashSet<&'static str>, + pub all_rule_codes: HashSet>, pub rules: Vec, } impl ConfiguredRules { - pub fn incremental_cache_state(&self) -> Option { - if self.rules.iter().any(|r| !r.supports_incremental_cache()) { - return None; - } + pub fn supports_incremental_cache(&self) -> bool { + self.rules.iter().all(|r| r.supports_incremental_cache()) + } + pub fn incremental_cache_state(&self) -> impl std::hash::Hash { // use a hash of the rule names in order to bust the cache let mut codes = self.rules.iter().map(|r| r.code()).collect::>(); // ensure this is stable by sorting it @@ -195,7 +195,7 @@ impl LintRuleProvider { let all_rules = self.all_rules(); let mut all_rule_names = HashSet::with_capacity(all_rules.len()); for rule in &all_rules { - all_rule_names.insert(rule.code()); + all_rule_names.insert(rule.code().into()); } let rules = filtered_rules( all_rules.into_iter(), diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs index cb49a9de95817e..697f99aa175c3f 100644 --- a/cli/tools/test/mod.rs +++ b/cli/tools/test/mod.rs @@ -626,7 +626,7 @@ async fn configure_main_worker( permissions_container, vec![ ops::testing::deno_test::init_ops(worker_sender.sender), - ops::lint::deno_lint::init_ops(), + ops::lint::deno_lint_ext_for_test::init_ops(), ], Stdio { stdin: StdioPipe::inherit(), diff --git a/cli/tsc/dts/lib.deno.unstable.d.ts b/cli/tsc/dts/lib.deno.unstable.d.ts index 6c901b864c79fb..8113f75809db0d 100644 --- a/cli/tsc/dts/lib.deno.unstable.d.ts +++ b/cli/tsc/dts/lib.deno.unstable.d.ts @@ -1344,6 +1344,139 @@ declare namespace Deno { export {}; // only export exports } + /** + * @category Linter + * @experimental + */ + export namespace lint { + /** + * @category Linter + * @experimental + */ + export type Range = [number, number]; + + /** + * @category Linter + * @experimental + */ + export interface Node { + type: string; + range: Range; + [key: string]: unknown; + } + + /** + * @category Linter + * @experimental + */ + export interface FixData { + range: Range; + text?: string; + } + + /** + * @category Linter + * @experimental + */ + export interface Fixer { + insertTextAfter(node: Node, text: string): FixData; + insertTextAfterRange(range: Range, text: string): FixData; + insertTextBefore(node: Node, text: string): FixData; + insertTextBeforeRange(range: Range, text: string): FixData; + remove(node: Node): FixData; + removeRange(range: Range): FixData; + replaceText(node: Node, text: string): FixData; + replaceTextRange(range: Range, text: string): FixData; + } + + /** + * @category Linter + * @experimental + */ + export interface ReportData { + node?: Node; + range?: Range; + message: string; + hint?: string; + fix?(fixer: Fixer): FixData; + } + + /** + * @category Linter + * @experimental + */ + export interface RuleContext { + id: string; + report(data: ReportData): void; + } + + /** + * @category Linter + * @experimental + */ + export interface Rule { + create(ctx: RuleContext): Record void>; + destroy?(ctx: RuleContext): void; + } + + /** + * In your plugins file do something like + * + * ```ts + * export default { + * name: "my-plugin", + * rules: { + * "no-foo": { + * create(ctx) { + * return { + * VariableDeclaration(node) {} + * } + * } + * } + * } + * } satisfies Deno.lint.Plugin + * ``` + * @category Linter + * @experimental + */ + export interface Plugin { + name: string; + rules: Record; + } + + /** + * @category Linter + * @experimental + */ + export interface Fix { + range: Range; + text?: string; + } + + /** + * @category Linter + * @experimental + */ + export interface Diagnostic { + id: string; + message: string; + hint?: string; + range: Range; + fix?: Fix; + } + + /** + * This API is a noop in `deno run`... + * @category Linter + * @experimental + */ + export function runPlugin( + plugin: Plugin, + fileName: string, + source: string, + ): Diagnostic[]; + } + export {}; // only export exports } diff --git a/cli/util/text_encoding.rs b/cli/util/text_encoding.rs index 4449202384e6a3..3a8044e13bde36 100644 --- a/cli/util/text_encoding.rs +++ b/cli/util/text_encoding.rs @@ -230,6 +230,52 @@ impl Utf16Map { column_index: col.into(), } } + + /// Convert a UTF-16 byte offset to UTF-8 byte offset + pub fn utf16_to_utf8_offset( + &self, + utf16_offset: TextSize, + ) -> Option { + if utf16_offset > self.text_content_length_utf16() { + return None; + } + let pos = self.position_utf16(utf16_offset); + let line_start_utf8 = self.utf8_offsets[pos.line_index]; + let col_utf8 = + self.utf16_to_utf8_col(pos.line_index as u32, pos.column_index as u32); + Some(line_start_utf8 + col_utf8) + } + + /// Convert a UTF-8 byte offset to UTF-16 byte offset + pub fn utf8_to_utf16_offset( + &self, + utf8_offset: TextSize, + ) -> Option { + if utf8_offset > *self.utf8_offsets.last()? { + return None; + } + let line = partition_point(&self.utf8_offsets, |&it| it <= utf8_offset) - 1; + let line_start_utf8 = self.utf8_offsets[line]; + let col_utf8 = utf8_offset - line_start_utf8; + let col_utf16 = self.utf8_to_utf16_col(line as u32, col_utf8); + Some(self.utf16_offsets[line] + TextSize::from(col_utf16)) + } + + fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> u32 { + let mut utf16_col = u32::from(col); + + if let Some(utf16_chars) = self.utf16_lines.get(&line) { + for c in utf16_chars { + if col > c.start { + utf16_col -= u32::from(c.len()) - c.len_utf16() as u32; + } else { + break; + } + } + } + + utf16_col + } } fn partition_point(slice: &[T], mut predicate: P) -> usize @@ -490,4 +536,47 @@ const C: char = \"メ メ\"; assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15)); } + + #[test] + fn test_offset_out_of_range() { + let text = "hello"; + let map = Utf16Map::new(text); + assert_eq!(map.utf8_to_utf16_offset(TextSize::from(10)), None); + assert_eq!(map.utf16_to_utf8_offset(TextSize::from(10)), None); + } + + #[test] + fn test_offset_basic_ascii() { + let text = "hello\nworld"; + let map = Utf16Map::new(text); + + let utf8_offset = TextSize::from(7); + let utf16_offset = map.utf8_to_utf16_offset(utf8_offset).unwrap(); + assert_eq!(utf16_offset, TextSize::from(7)); + + let result = map.utf16_to_utf8_offset(utf16_offset).unwrap(); + assert_eq!(result, utf8_offset); + } + + #[test] + fn test_offset_emoji() { + let text = "hi 👋\nbye"; + let map = Utf16Map::new(text); + + let utf8_offset = TextSize::from(3); + let utf16_offset = map.utf8_to_utf16_offset(utf8_offset).unwrap(); + assert_eq!(utf16_offset, TextSize::from(3)); + + let utf8_offset_after = TextSize::from(7); + let utf16_offset_after = + map.utf8_to_utf16_offset(utf8_offset_after).unwrap(); + assert_eq!(utf16_offset_after, TextSize::from(5)); + + for (utf8_offset, _) in text.char_indices() { + let utf8_offset = TextSize::from(utf8_offset as u32); + let utf16_offset = map.utf8_to_utf16_offset(utf8_offset).unwrap(); + let reverse_ut8_offset = map.utf16_to_utf8_offset(utf16_offset).unwrap(); + assert_eq!(reverse_ut8_offset, utf8_offset); + } + } } diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index 190de549d1bca0..5c7fab6aecec38 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -94,6 +94,7 @@ import { bootstrap as bootstrapOtel } from "ext:deno_telemetry/telemetry.ts"; if (Symbol.metadata) { throw "V8 supports Symbol.metadata now, no need to shim it"; } + ObjectDefineProperties(Symbol, { dispose: { __proto__: null, @@ -533,7 +534,10 @@ const NOT_IMPORTED_OPS = [ "op_base64_encode", // Used in the lint API + "op_lint_report", + "op_lint_get_source", "op_lint_create_serialized_ast", + "op_is_cancelled", // Related to `Deno.test()` API "op_test_event_step_result_failed", @@ -575,11 +579,14 @@ const finalDenoNs = { internal: internalSymbol, [internalSymbol]: internals, ...denoNs, - // Deno.test and Deno.bench are noops here, but kept for compatibility; so - // that they don't cause errors when used outside of `deno test`/`deno bench` + // Deno.test, Deno.bench, Deno.lint are noops here, but kept for compatibility; so + // that they don't cause errors when used outside of `deno test`/`deno bench`/`deno lint` // contexts. test: () => {}, bench: () => {}, + lint: { + runPlugin: () => {}, + }, }; ObjectDefineProperties(finalDenoNs, { diff --git a/tests/integration/js_unit_tests.rs b/tests/integration/js_unit_tests.rs index 9ecec8b426c844..4a61361837b576 100644 --- a/tests/integration/js_unit_tests.rs +++ b/tests/integration/js_unit_tests.rs @@ -165,10 +165,14 @@ fn js_unit_test(test: String) { let mut deno = deno .arg("-A") - .arg(util::tests_path().join("unit").join(format!("{test}.ts"))) - .piped_output() - .spawn() - .expect("failed to spawn script"); + .arg(util::tests_path().join("unit").join(format!("{test}.ts"))); + + // update the snapshots if when `UPDATE=1` + if std::env::var_os("UPDATE") == Some("1".into()) { + deno = deno.arg("--").arg("--update"); + } + + let mut deno = deno.piped_output().spawn().expect("failed to spawn script"); let now = Instant::now(); let stdout = deno.stdout.take().unwrap(); diff --git a/tests/specs/lint/lint_plugin/__test__.jsonc b/tests/specs/lint/lint_plugin/__test__.jsonc new file mode 100644 index 00000000000000..228923b2ac8c0f --- /dev/null +++ b/tests/specs/lint/lint_plugin/__test__.jsonc @@ -0,0 +1,17 @@ +{ + "steps": [ + { + "args": "lint a.ts", + "output": "lint.out", + "exitCode": 1 + }, + { + "args": "lint -c deno_exclude.json a.ts", + "output": "lint_exclude.out" + }, + { + "args": "lint --fix a.ts", + "output": "lint_fixed.out" + } + ] +} diff --git a/tests/specs/lint/lint_plugin/a.ts b/tests/specs/lint/lint_plugin/a.ts new file mode 100644 index 00000000000000..0366a968a76b74 --- /dev/null +++ b/tests/specs/lint/lint_plugin/a.ts @@ -0,0 +1 @@ +const _a = "foo"; diff --git a/tests/specs/lint/lint_plugin/deno.json b/tests/specs/lint/lint_plugin/deno.json new file mode 100644 index 00000000000000..57b9dcb3647975 --- /dev/null +++ b/tests/specs/lint/lint_plugin/deno.json @@ -0,0 +1,5 @@ +{ + "lint": { + "plugins": ["./plugin.ts"] + } +} diff --git a/tests/specs/lint/lint_plugin/deno_exclude.json b/tests/specs/lint/lint_plugin/deno_exclude.json new file mode 100644 index 00000000000000..cce33a873602fe --- /dev/null +++ b/tests/specs/lint/lint_plugin/deno_exclude.json @@ -0,0 +1,10 @@ +{ + "lint": { + "plugins": ["./plugin.ts"], + "rules": { + "exclude": [ + "test-plugin/my-rule" + ] + } + } +} diff --git a/tests/specs/lint/lint_plugin/lint.out b/tests/specs/lint/lint_plugin/lint.out new file mode 100644 index 00000000000000..56166426a7fb2b --- /dev/null +++ b/tests/specs/lint/lint_plugin/lint.out @@ -0,0 +1,2 @@ +[WILDCARD]Found 1 problem (1 fixable via --fix) +Checked 1 file diff --git a/tests/specs/lint/lint_plugin/lint_exclude.out b/tests/specs/lint/lint_plugin/lint_exclude.out new file mode 100644 index 00000000000000..c05ac45a1e7e51 --- /dev/null +++ b/tests/specs/lint/lint_plugin/lint_exclude.out @@ -0,0 +1 @@ +Checked 1 file diff --git a/tests/specs/lint/lint_plugin/lint_fixed.out b/tests/specs/lint/lint_plugin/lint_fixed.out new file mode 100644 index 00000000000000..c05ac45a1e7e51 --- /dev/null +++ b/tests/specs/lint/lint_plugin/lint_fixed.out @@ -0,0 +1 @@ +Checked 1 file diff --git a/tests/specs/lint/lint_plugin/plugin.ts b/tests/specs/lint/lint_plugin/plugin.ts new file mode 100644 index 00000000000000..0a54d73212438a --- /dev/null +++ b/tests/specs/lint/lint_plugin/plugin.ts @@ -0,0 +1,22 @@ +export default { + name: "test-plugin", + rules: { + "my-rule": { + create(context) { + return { + Identifier(node) { + if (node.name === "_a") { + context.report({ + node, + message: "should be _b", + fix(fixer) { + return fixer.replaceText(node, "_b"); + }, + }); + } + }, + }; + }, + }, + }, +}; diff --git a/tests/specs/lint/lint_plugin_fix_error/__test__.jsonc b/tests/specs/lint/lint_plugin_fix_error/__test__.jsonc new file mode 100644 index 00000000000000..57da106ff691fa --- /dev/null +++ b/tests/specs/lint/lint_plugin_fix_error/__test__.jsonc @@ -0,0 +1,6 @@ +{ + "tempDir": true, + "args": "lint --fix", + "output": "fix.out", + "exitCode": 1 +} diff --git a/tests/specs/lint/lint_plugin_fix_error/deno.json b/tests/specs/lint/lint_plugin_fix_error/deno.json new file mode 100644 index 00000000000000..57b9dcb3647975 --- /dev/null +++ b/tests/specs/lint/lint_plugin_fix_error/deno.json @@ -0,0 +1,5 @@ +{ + "lint": { + "plugins": ["./plugin.ts"] + } +} diff --git a/tests/specs/lint/lint_plugin_fix_error/fix.out b/tests/specs/lint/lint_plugin_fix_error/fix.out new file mode 100644 index 00000000000000..aed9d4df8abcf4 --- /dev/null +++ b/tests/specs/lint/lint_plugin_fix_error/fix.out @@ -0,0 +1,11 @@ +Error linting: [WILDLINE]main.ts + The 'test-plugin/my-rule' rule caused a syntax error applying 'Fix this test-plugin/my-rule problem'. + + Range: [14, 18] + Text: "garbage test test" + + : Expected a semicolon at file:///[WILDLINE]/main.ts:1:23 + + const value = garbage test test; + ~~~~ +Checked 2 files diff --git a/tests/specs/lint/lint_plugin_fix_error/main.ts b/tests/specs/lint/lint_plugin_fix_error/main.ts new file mode 100644 index 00000000000000..5a277eecbbcf4f --- /dev/null +++ b/tests/specs/lint/lint_plugin_fix_error/main.ts @@ -0,0 +1,2 @@ +const value = "𝄞"; +console.log(value); diff --git a/tests/specs/lint/lint_plugin_fix_error/plugin.ts b/tests/specs/lint/lint_plugin_fix_error/plugin.ts new file mode 100644 index 00000000000000..3df9f8655d869d --- /dev/null +++ b/tests/specs/lint/lint_plugin_fix_error/plugin.ts @@ -0,0 +1,20 @@ +export default { + name: "test-plugin", + rules: { + "my-rule": { + create(context) { + return { + VariableDeclarator(node) { + context.report({ + node: node.init, + message: 'should be equal to string "1"', + fix(fixer) { + return fixer.replaceText(node.init, "garbage test test"); + }, + }); + }, + }; + }, + }, + }, +}; diff --git a/tests/specs/lint/lint_plugin_infinite_edits/__test__.jsonc b/tests/specs/lint/lint_plugin_infinite_edits/__test__.jsonc new file mode 100644 index 00000000000000..57da106ff691fa --- /dev/null +++ b/tests/specs/lint/lint_plugin_infinite_edits/__test__.jsonc @@ -0,0 +1,6 @@ +{ + "tempDir": true, + "args": "lint --fix", + "output": "fix.out", + "exitCode": 1 +} diff --git a/tests/specs/lint/lint_plugin_infinite_edits/deno.json b/tests/specs/lint/lint_plugin_infinite_edits/deno.json new file mode 100644 index 00000000000000..57b9dcb3647975 --- /dev/null +++ b/tests/specs/lint/lint_plugin_infinite_edits/deno.json @@ -0,0 +1,5 @@ +{ + "lint": { + "plugins": ["./plugin.ts"] + } +} diff --git a/tests/specs/lint/lint_plugin_infinite_edits/fix.out b/tests/specs/lint/lint_plugin_infinite_edits/fix.out new file mode 100644 index 00000000000000..4dde757781a509 --- /dev/null +++ b/tests/specs/lint/lint_plugin_infinite_edits/fix.out @@ -0,0 +1,12 @@ +Reached maximum number of fix iterations for 'file:///[WILDLINE]/main.ts'. There's probably a bug in the lint rule. Please fix this file manually. +error[test-plugin/my-rule]: should be equal to string "1" + --> [WILDLINE]main.ts:1:15 + | +1 | const value = [WILDLINE]; + | [WILDLINE] + + docs: https://docs.deno.com/lint/rules/test-plugin/my-rule + + +Found 1 problem (1 fixable via --fix) +Checked 2 files diff --git a/tests/specs/lint/lint_plugin_infinite_edits/main.ts b/tests/specs/lint/lint_plugin_infinite_edits/main.ts new file mode 100644 index 00000000000000..5a277eecbbcf4f --- /dev/null +++ b/tests/specs/lint/lint_plugin_infinite_edits/main.ts @@ -0,0 +1,2 @@ +const value = "𝄞"; +console.log(value); diff --git a/tests/specs/lint/lint_plugin_infinite_edits/plugin.ts b/tests/specs/lint/lint_plugin_infinite_edits/plugin.ts new file mode 100644 index 00000000000000..5926fb868c1305 --- /dev/null +++ b/tests/specs/lint/lint_plugin_infinite_edits/plugin.ts @@ -0,0 +1,20 @@ +export default { + name: "test-plugin", + rules: { + "my-rule": { + create(context) { + return { + VariableDeclarator(node) { + context.report({ + node: node.init, + message: 'should be equal to string "1"', + fix(fixer) { + return fixer.replaceText(node.init, Date.now().toString()); + }, + }); + }, + }; + }, + }, + }, +}; diff --git a/tests/specs/lint/lint_plugin_utf16/__test__.jsonc b/tests/specs/lint/lint_plugin_utf16/__test__.jsonc new file mode 100644 index 00000000000000..e04db0eaf0483d --- /dev/null +++ b/tests/specs/lint/lint_plugin_utf16/__test__.jsonc @@ -0,0 +1,22 @@ +{ + "tests": { + "lint": { + "args": "lint", + "output": "lint.out", + "exitCode": 1 + }, + "fix": { + "tempDir": true, + "steps": [{ + "args": "lint --fix", + "output": "fix.out" + }, { + "args": [ + "eval", + "console.log(Deno.readTextFileSync('main.ts').trim())" + ], + "output": "fixed.out" + }] + } + } +} diff --git a/tests/specs/lint/lint_plugin_utf16/deno.json b/tests/specs/lint/lint_plugin_utf16/deno.json new file mode 100644 index 00000000000000..57b9dcb3647975 --- /dev/null +++ b/tests/specs/lint/lint_plugin_utf16/deno.json @@ -0,0 +1,5 @@ +{ + "lint": { + "plugins": ["./plugin.ts"] + } +} diff --git a/tests/specs/lint/lint_plugin_utf16/fix.out b/tests/specs/lint/lint_plugin_utf16/fix.out new file mode 100644 index 00000000000000..158c556c2968ff --- /dev/null +++ b/tests/specs/lint/lint_plugin_utf16/fix.out @@ -0,0 +1 @@ +Checked 2 files diff --git a/tests/specs/lint/lint_plugin_utf16/fixed.out b/tests/specs/lint/lint_plugin_utf16/fixed.out new file mode 100644 index 00000000000000..46538595af2352 --- /dev/null +++ b/tests/specs/lint/lint_plugin_utf16/fixed.out @@ -0,0 +1,2 @@ +const value = "1"; +console.log(value); diff --git a/tests/specs/lint/lint_plugin_utf16/lint.out b/tests/specs/lint/lint_plugin_utf16/lint.out new file mode 100644 index 00000000000000..cf78dcf9f3b31e --- /dev/null +++ b/tests/specs/lint/lint_plugin_utf16/lint.out @@ -0,0 +1,11 @@ +error[test-plugin/my-rule]: should be equal to string "1" + --> [WILDLINE]main.ts:1:15 + | +1 | const value = "𝄞"; + | ^^^ + + docs: https://docs.deno.com/lint/rules/test-plugin/my-rule + + +Found 1 problem (1 fixable via --fix) +Checked 2 files diff --git a/tests/specs/lint/lint_plugin_utf16/main.ts b/tests/specs/lint/lint_plugin_utf16/main.ts new file mode 100644 index 00000000000000..5a277eecbbcf4f --- /dev/null +++ b/tests/specs/lint/lint_plugin_utf16/main.ts @@ -0,0 +1,2 @@ +const value = "𝄞"; +console.log(value); diff --git a/tests/specs/lint/lint_plugin_utf16/plugin.ts b/tests/specs/lint/lint_plugin_utf16/plugin.ts new file mode 100644 index 00000000000000..40894f5e0e67dd --- /dev/null +++ b/tests/specs/lint/lint_plugin_utf16/plugin.ts @@ -0,0 +1,22 @@ +export default { + name: "test-plugin", + rules: { + "my-rule": { + create(context) { + return { + VariableDeclarator(node) { + if (node.init.type !== "Literal" || node.init.value !== "1") { + context.report({ + node: node.init, + message: 'should be equal to string "1"', + fix(fixer) { + return fixer.replaceText(node.init, '"1"'); + }, + }); + } + }, + }; + }, + }, + }, +}; diff --git a/tests/unit/__snapshots__/lint_plugin_test.ts.snap b/tests/unit/__snapshots__/lint_plugin_test.ts.snap index 337fcecc8f1e7d..d2a0d2aa8ce001 100644 --- a/tests/unit/__snapshots__/lint_plugin_test.ts.snap +++ b/tests/unit/__snapshots__/lint_plugin_test.ts.snap @@ -4,8 +4,8 @@ snapshot[`Plugin - Program 1`] = ` { body: [], range: [ - 1, - 1, + 0, + 0, ], sourceType: "script", type: "Program", @@ -17,13 +17,13 @@ snapshot[`Plugin - ImportDeclaration 1`] = ` attributes: [], importKind: "value", range: [ - 1, - 14, + 0, + 13, ], source: { range: [ - 8, - 13, + 7, + 12, ], raw: '"foo"', type: "Literal", @@ -39,13 +39,13 @@ snapshot[`Plugin - ImportDeclaration 2`] = ` attributes: [], importKind: "value", range: [ - 1, - 23, + 0, + 22, ], source: { range: [ - 17, - 22, + 16, + 21, ], raw: '"foo"', type: "Literal", @@ -57,15 +57,15 @@ snapshot[`Plugin - ImportDeclaration 2`] = ` name: "foo", optional: false, range: [ - 8, - 11, + 7, + 10, ], type: "Identifier", typeAnnotation: null, }, range: [ - 8, - 11, + 7, + 10, ], type: "ImportDefaultSpecifier", }, @@ -79,13 +79,13 @@ snapshot[`Plugin - ImportDeclaration 3`] = ` attributes: [], importKind: "value", range: [ - 1, - 28, + 0, + 27, ], source: { range: [ - 22, - 27, + 21, + 26, ], raw: '"foo"', type: "Literal", @@ -97,15 +97,15 @@ snapshot[`Plugin - ImportDeclaration 3`] = ` name: "foo", optional: false, range: [ - 13, - 16, + 12, + 15, ], type: "Identifier", typeAnnotation: null, }, range: [ - 8, - 16, + 7, + 15, ], type: "ImportNamespaceSpecifier", }, @@ -119,13 +119,13 @@ snapshot[`Plugin - ImportDeclaration 4`] = ` attributes: [], importKind: "value", range: [ - 1, - 39, + 0, + 38, ], source: { range: [ - 33, - 38, + 32, + 37, ], raw: '"foo"', type: "Literal", @@ -138,8 +138,8 @@ snapshot[`Plugin - ImportDeclaration 4`] = ` name: "foo", optional: false, range: [ - 10, - 13, + 9, + 12, ], type: "Identifier", typeAnnotation: null, @@ -148,15 +148,15 @@ snapshot[`Plugin - ImportDeclaration 4`] = ` name: "foo", optional: false, range: [ - 10, - 13, + 9, + 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 10, - 13, + 9, + 12, ], type: "ImportSpecifier", }, @@ -166,8 +166,8 @@ snapshot[`Plugin - ImportDeclaration 4`] = ` name: "bar", optional: false, range: [ - 15, - 18, + 14, + 17, ], type: "Identifier", typeAnnotation: null, @@ -176,15 +176,15 @@ snapshot[`Plugin - ImportDeclaration 4`] = ` name: "baz", optional: false, range: [ - 22, - 25, + 21, + 24, ], type: "Identifier", typeAnnotation: null, }, range: [ - 15, - 25, + 14, + 24, ], type: "ImportSpecifier", }, @@ -201,21 +201,21 @@ snapshot[`Plugin - ImportDeclaration 5`] = ` name: "type", optional: false, range: [ - 30, - 34, + 29, + 33, ], type: "Identifier", typeAnnotation: null, }, range: [ - 30, - 42, + 29, + 41, ], type: "ImportAttribute", value: { range: [ - 36, - 42, + 35, + 41, ], raw: '"json"', type: "Literal", @@ -225,13 +225,13 @@ snapshot[`Plugin - ImportDeclaration 5`] = ` ], importKind: "value", range: [ - 1, - 45, + 0, + 44, ], source: { range: [ - 17, - 22, + 16, + 21, ], raw: '"foo"', type: "Literal", @@ -243,15 +243,15 @@ snapshot[`Plugin - ImportDeclaration 5`] = ` name: "foo", optional: false, range: [ - 8, - 11, + 7, + 10, ], type: "Identifier", typeAnnotation: null, }, range: [ - 8, - 11, + 7, + 10, ], type: "ImportDefaultSpecifier", }, @@ -264,13 +264,13 @@ snapshot[`Plugin - ExportNamedDeclaration 1`] = ` { attributes: [], range: [ - 1, - 27, + 0, + 26, ], source: { range: [ - 21, - 26, + 20, + 25, ], raw: '"foo"', type: "Literal", @@ -283,8 +283,8 @@ snapshot[`Plugin - ExportNamedDeclaration 1`] = ` name: "foo", optional: false, range: [ - 10, - 13, + 9, + 12, ], type: "Identifier", typeAnnotation: null, @@ -293,15 +293,15 @@ snapshot[`Plugin - ExportNamedDeclaration 1`] = ` name: "foo", optional: false, range: [ - 10, - 13, + 9, + 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 10, - 13, + 9, + 12, ], type: "ExportSpecifier", }, @@ -314,13 +314,13 @@ snapshot[`Plugin - ExportNamedDeclaration 2`] = ` { attributes: [], range: [ - 1, - 34, + 0, + 33, ], source: { range: [ - 28, - 33, + 27, + 32, ], raw: '"foo"', type: "Literal", @@ -333,8 +333,8 @@ snapshot[`Plugin - ExportNamedDeclaration 2`] = ` name: "baz", optional: false, range: [ - 17, - 20, + 16, + 19, ], type: "Identifier", typeAnnotation: null, @@ -343,15 +343,15 @@ snapshot[`Plugin - ExportNamedDeclaration 2`] = ` name: "bar", optional: false, range: [ - 10, - 13, + 9, + 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 10, - 20, + 9, + 19, ], type: "ExportSpecifier", }, @@ -368,21 +368,21 @@ snapshot[`Plugin - ExportNamedDeclaration 3`] = ` name: "type", optional: false, range: [ - 34, - 38, + 33, + 37, ], type: "Identifier", typeAnnotation: null, }, range: [ - 34, - 46, + 33, + 45, ], type: "ImportAttribute", value: { range: [ - 40, - 46, + 39, + 45, ], raw: '"json"', type: "Literal", @@ -391,13 +391,13 @@ snapshot[`Plugin - ExportNamedDeclaration 3`] = ` }, ], range: [ - 1, - 49, + 0, + 48, ], source: { range: [ - 21, - 26, + 20, + 25, ], raw: '"foo"', type: "Literal", @@ -410,8 +410,8 @@ snapshot[`Plugin - ExportNamedDeclaration 3`] = ` name: "foo", optional: false, range: [ - 10, - 13, + 9, + 12, ], type: "Identifier", typeAnnotation: null, @@ -420,15 +420,15 @@ snapshot[`Plugin - ExportNamedDeclaration 3`] = ` name: "foo", optional: false, range: [ - 10, - 13, + 9, + 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 10, - 13, + 9, + 12, ], type: "ExportSpecifier", }, @@ -444,8 +444,8 @@ snapshot[`Plugin - ExportDefaultDeclaration 1`] = ` body: { body: [], range: [ - 31, - 33, + 30, + 32, ], type: "BlockStatement", }, @@ -455,16 +455,16 @@ snapshot[`Plugin - ExportDefaultDeclaration 1`] = ` name: "foo", optional: false, range: [ - 25, - 28, + 24, + 27, ], type: "Identifier", typeAnnotation: null, }, params: [], range: [ - 16, - 33, + 15, + 32, ], returnType: null, type: "FunctionDeclaration", @@ -472,8 +472,8 @@ snapshot[`Plugin - ExportDefaultDeclaration 1`] = ` }, exportKind: "value", range: [ - 1, - 33, + 0, + 32, ], type: "ExportDefaultDeclaration", } @@ -486,8 +486,8 @@ snapshot[`Plugin - ExportDefaultDeclaration 2`] = ` body: { body: [], range: [ - 28, - 30, + 27, + 29, ], type: "BlockStatement", }, @@ -496,8 +496,8 @@ snapshot[`Plugin - ExportDefaultDeclaration 2`] = ` id: null, params: [], range: [ - 16, - 30, + 15, + 29, ], returnType: null, type: "FunctionDeclaration", @@ -505,8 +505,8 @@ snapshot[`Plugin - ExportDefaultDeclaration 2`] = ` }, exportKind: "value", range: [ - 1, - 30, + 0, + 29, ], type: "ExportDefaultDeclaration", } @@ -519,8 +519,8 @@ snapshot[`Plugin - ExportDefaultDeclaration 3`] = ` body: { body: [], range: [ - 16, - 28, + 15, + 27, ], type: "ClassBody", }, @@ -529,24 +529,24 @@ snapshot[`Plugin - ExportDefaultDeclaration 3`] = ` name: "Foo", optional: false, range: [ - 22, - 25, + 21, + 24, ], type: "Identifier", typeAnnotation: null, }, implements: [], range: [ - 16, - 28, + 15, + 27, ], superClass: null, type: "ClassDeclaration", }, exportKind: "value", range: [ - 1, - 28, + 0, + 27, ], type: "ExportDefaultDeclaration", } @@ -559,8 +559,8 @@ snapshot[`Plugin - ExportDefaultDeclaration 4`] = ` body: { body: [], range: [ - 16, - 24, + 15, + 23, ], type: "ClassBody", }, @@ -568,16 +568,16 @@ snapshot[`Plugin - ExportDefaultDeclaration 4`] = ` id: null, implements: [], range: [ - 16, - 24, + 15, + 23, ], superClass: null, type: "ClassDeclaration", }, exportKind: "value", range: [ - 1, - 24, + 0, + 23, ], type: "ExportDefaultDeclaration", } @@ -589,16 +589,16 @@ snapshot[`Plugin - ExportDefaultDeclaration 5`] = ` name: "bar", optional: false, range: [ - 16, - 19, + 15, + 18, ], type: "Identifier", typeAnnotation: null, }, exportKind: "value", range: [ - 1, - 20, + 0, + 19, ], type: "ExportDefaultDeclaration", } @@ -610,8 +610,8 @@ snapshot[`Plugin - ExportDefaultDeclaration 6`] = ` body: { body: [], range: [ - 30, - 32, + 29, + 31, ], type: "TSInterfaceBody", }, @@ -621,23 +621,23 @@ snapshot[`Plugin - ExportDefaultDeclaration 6`] = ` name: "Foo", optional: false, range: [ - 26, - 29, + 25, + 28, ], type: "Identifier", typeAnnotation: null, }, range: [ - 16, - 32, + 15, + 31, ], type: "TSInterfaceDeclaration", typeParameters: [], }, exportKind: "type", range: [ - 1, - 32, + 0, + 31, ], type: "ExportDefaultDeclaration", } @@ -649,13 +649,13 @@ snapshot[`Plugin - ExportAllDeclaration 1`] = ` exportKind: "value", exported: null, range: [ - 1, - 21, + 0, + 20, ], source: { range: [ - 15, - 20, + 14, + 19, ], raw: '"foo"', type: "Literal", @@ -671,23 +671,23 @@ snapshot[`Plugin - ExportAllDeclaration 2`] = ` exportKind: "value", exported: { range: [ - 22, - 27, + 21, + 26, ], raw: '"foo"', type: "Literal", value: "foo", }, range: [ - 1, - 28, + 0, + 27, ], source: { name: "foo", optional: false, range: [ - 13, - 16, + 12, + 15, ], type: "Identifier", typeAnnotation: null, @@ -704,21 +704,21 @@ snapshot[`Plugin - ExportAllDeclaration 3`] = ` name: "type", optional: false, range: [ - 28, - 32, + 27, + 31, ], type: "Identifier", typeAnnotation: null, }, range: [ - 28, - 40, + 27, + 39, ], type: "ImportAttribute", value: { range: [ - 34, - 40, + 33, + 39, ], raw: '"json"', type: "Literal", @@ -729,13 +729,13 @@ snapshot[`Plugin - ExportAllDeclaration 3`] = ` exportKind: "value", exported: null, range: [ - 1, - 43, + 0, + 42, ], source: { range: [ - 15, - 20, + 14, + 19, ], raw: '"foo"', type: "Literal", @@ -751,15 +751,15 @@ snapshot[`Plugin - TSExportAssignment 1`] = ` name: "foo", optional: false, range: [ - 10, - 13, + 9, + 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 14, + 0, + 13, ], type: "TSExportAssignment", } @@ -771,15 +771,15 @@ snapshot[`Plugin - TSNamespaceExportDeclaration 1`] = ` name: "A", optional: false, range: [ + 20, 21, - 22, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 23, + 0, + 22, ], type: "TSNamespaceExportDeclaration", } @@ -791,8 +791,8 @@ snapshot[`Plugin - TSImportEqualsDeclaration 1`] = ` name: "a", optional: false, range: [ + 7, 8, - 9, ], type: "Identifier", typeAnnotation: null, @@ -802,15 +802,15 @@ snapshot[`Plugin - TSImportEqualsDeclaration 1`] = ` name: "b", optional: false, range: [ + 11, 12, - 13, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 13, + 0, + 12, ], type: "TSImportEqualsDeclaration", } @@ -822,8 +822,8 @@ snapshot[`Plugin - TSImportEqualsDeclaration 2`] = ` name: "a", optional: false, range: [ + 7, 8, - 9, ], type: "Identifier", typeAnnotation: null, @@ -832,22 +832,22 @@ snapshot[`Plugin - TSImportEqualsDeclaration 2`] = ` moduleReference: { expression: { range: [ - 20, - 25, + 19, + 24, ], raw: '"foo"', type: "Literal", value: "foo", }, range: [ - 12, - 26, + 11, + 25, ], type: "TSExternalModuleReference", }, range: [ - 1, - 26, + 0, + 25, ], type: "TSImportEqualsDeclaration", } @@ -861,22 +861,22 @@ snapshot[`Plugin - BlockStatement 1`] = ` name: "foo", optional: false, range: [ - 3, - 6, + 2, + 5, ], type: "Identifier", typeAnnotation: null, }, range: [ - 3, - 7, + 2, + 6, ], type: "ExpressionStatement", }, ], range: [ - 1, - 9, + 0, + 8, ], type: "BlockStatement", } @@ -886,8 +886,8 @@ snapshot[`Plugin - BreakStatement 1`] = ` { label: null, range: [ - 15, - 21, + 14, + 20, ], type: "BreakStatement", } @@ -899,15 +899,15 @@ snapshot[`Plugin - BreakStatement 2`] = ` name: "foo", optional: false, range: [ - 26, - 29, + 25, + 28, ], type: "Identifier", typeAnnotation: null, }, range: [ - 20, - 30, + 19, + 29, ], type: "BreakStatement", } @@ -917,8 +917,8 @@ snapshot[`Plugin - ContinueStatement 1`] = ` { label: null, range: [ - 1, - 10, + 0, + 9, ], type: "ContinueStatement", } @@ -930,15 +930,15 @@ snapshot[`Plugin - ContinueStatement 2`] = ` name: "foo", optional: false, range: [ - 10, - 13, + 9, + 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 14, + 0, + 13, ], type: "ContinueStatement", } @@ -947,8 +947,8 @@ snapshot[`Plugin - ContinueStatement 2`] = ` snapshot[`Plugin - DebuggerStatement 1`] = ` { range: [ - 1, - 10, + 0, + 9, ], type: "DebuggerStatement", } @@ -959,21 +959,21 @@ snapshot[`Plugin - DoWhileStatement 1`] = ` body: { body: [], range: [ - 4, - 6, + 3, + 5, ], type: "BlockStatement", }, range: [ - 1, - 19, + 0, + 18, ], test: { name: "foo", optional: false, range: [ - 14, - 17, + 13, + 16, ], type: "Identifier", typeAnnotation: null, @@ -988,15 +988,15 @@ snapshot[`Plugin - ExpressionStatement 1`] = ` name: "foo", optional: false, range: [ - 1, - 4, + 0, + 3, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 5, + 0, + 4, ], type: "ExpressionStatement", } @@ -1007,8 +1007,8 @@ snapshot[`Plugin - ForInStatement 1`] = ` body: { body: [], range: [ - 14, - 16, + 13, + 15, ], type: "BlockStatement", }, @@ -1016,22 +1016,22 @@ snapshot[`Plugin - ForInStatement 1`] = ` name: "a", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 16, + 0, + 15, ], right: { name: "b", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, @@ -1046,8 +1046,8 @@ snapshot[`Plugin - ForOfStatement 1`] = ` body: { body: [], range: [ - 14, - 16, + 13, + 15, ], type: "BlockStatement", }, @@ -1055,22 +1055,22 @@ snapshot[`Plugin - ForOfStatement 1`] = ` name: "a", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 16, + 0, + 15, ], right: { name: "b", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, @@ -1085,8 +1085,8 @@ snapshot[`Plugin - ForOfStatement 2`] = ` body: { body: [], range: [ - 20, - 22, + 19, + 21, ], type: "BlockStatement", }, @@ -1094,22 +1094,22 @@ snapshot[`Plugin - ForOfStatement 2`] = ` name: "a", optional: false, range: [ + 11, 12, - 13, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 22, + 0, + 21, ], right: { name: "b", optional: false, range: [ + 16, 17, - 18, ], type: "Identifier", typeAnnotation: null, @@ -1123,15 +1123,15 @@ snapshot[`Plugin - ForStatement 1`] = ` body: { body: [], range: [ - 10, - 12, + 9, + 11, ], type: "BlockStatement", }, init: null, range: [ - 1, - 12, + 0, + 11, ], test: null, type: "ForStatement", @@ -1144,8 +1144,8 @@ snapshot[`Plugin - ForStatement 2`] = ` body: { body: [], range: [ - 15, - 17, + 14, + 16, ], type: "BlockStatement", }, @@ -1153,22 +1153,22 @@ snapshot[`Plugin - ForStatement 2`] = ` name: "a", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 17, + 0, + 16, ], test: { name: "b", optional: false, range: [ + 8, 9, - 10, ], type: "Identifier", typeAnnotation: null, @@ -1178,8 +1178,8 @@ snapshot[`Plugin - ForStatement 2`] = ` name: "c", optional: false, range: [ + 11, 12, - 13, ], type: "Identifier", typeAnnotation: null, @@ -1193,21 +1193,21 @@ snapshot[`Plugin - IfStatement 1`] = ` consequent: { body: [], range: [ - 10, - 12, + 9, + 11, ], type: "BlockStatement", }, range: [ - 1, - 12, + 0, + 11, ], test: { name: "foo", optional: false, range: [ - 5, - 8, + 4, + 7, ], type: "Identifier", typeAnnotation: null, @@ -1221,29 +1221,29 @@ snapshot[`Plugin - IfStatement 2`] = ` alternate: { body: [], range: [ - 18, - 20, + 17, + 19, ], type: "BlockStatement", }, consequent: { body: [], range: [ - 10, - 12, + 9, + 11, ], type: "BlockStatement", }, range: [ - 1, - 20, + 0, + 19, ], test: { name: "foo", optional: false, range: [ - 5, - 8, + 4, + 7, ], type: "Identifier", typeAnnotation: null, @@ -1257,8 +1257,8 @@ snapshot[`Plugin - LabeledStatement 1`] = ` body: { body: [], range: [ - 6, - 8, + 5, + 7, ], type: "BlockStatement", }, @@ -1266,15 +1266,15 @@ snapshot[`Plugin - LabeledStatement 1`] = ` name: "foo", optional: false, range: [ - 1, - 4, + 0, + 3, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 8, + 0, + 7, ], type: "LabeledStatement", } @@ -1284,8 +1284,8 @@ snapshot[`Plugin - ReturnStatement 1`] = ` { argument: null, range: [ - 1, - 7, + 0, + 6, ], type: "ReturnStatement", } @@ -1297,15 +1297,15 @@ snapshot[`Plugin - ReturnStatement 2`] = ` name: "foo", optional: false, range: [ - 8, - 11, + 7, + 10, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 12, + 0, + 11, ], type: "ReturnStatement", } @@ -1317,15 +1317,15 @@ snapshot[`Plugin - SwitchStatement 1`] = ` { consequent: [], range: [ - 22, - 31, + 21, + 30, ], test: { name: "foo", optional: false, range: [ - 27, - 30, + 26, + 29, ], type: "Identifier", typeAnnotation: null, @@ -1337,22 +1337,22 @@ snapshot[`Plugin - SwitchStatement 1`] = ` { label: null, range: [ - 56, - 62, + 55, + 61, ], type: "BreakStatement", }, ], range: [ - 38, - 62, + 37, + 61, ], test: { name: "bar", optional: false, range: [ - 43, - 46, + 42, + 45, ], type: "Identifier", typeAnnotation: null, @@ -1364,15 +1364,15 @@ snapshot[`Plugin - SwitchStatement 1`] = ` { body: [], range: [ - 86, - 88, + 85, + 87, ], type: "BlockStatement", }, ], range: [ - 69, - 88, + 68, + 87, ], test: null, type: "SwitchCase", @@ -1382,15 +1382,15 @@ snapshot[`Plugin - SwitchStatement 1`] = ` name: "foo", optional: false, range: [ - 9, - 12, + 8, + 11, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 94, + 0, + 93, ], type: "SwitchStatement", } @@ -1402,15 +1402,15 @@ snapshot[`Plugin - ThrowStatement 1`] = ` name: "foo", optional: false, range: [ - 7, - 10, + 6, + 9, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 11, + 0, + 10, ], type: "ThrowStatement", } @@ -1421,8 +1421,8 @@ snapshot[`Plugin - TryStatement 1`] = ` block: { body: [], range: [ - 5, - 7, + 4, + 6, ], type: "BlockStatement", }, @@ -1431,21 +1431,21 @@ snapshot[`Plugin - TryStatement 1`] = ` body: { body: [], range: [ - 14, - 16, + 13, + 15, ], type: "BlockStatement", }, param: null, range: [ - 8, - 16, + 7, + 15, ], type: "CatchClause", }, range: [ - 1, - 16, + 0, + 15, ], type: "TryStatement", } @@ -1456,8 +1456,8 @@ snapshot[`Plugin - TryStatement 2`] = ` block: { body: [], range: [ - 5, - 7, + 4, + 6, ], type: "BlockStatement", }, @@ -1466,8 +1466,8 @@ snapshot[`Plugin - TryStatement 2`] = ` body: { body: [], range: [ - 18, - 20, + 17, + 19, ], type: "BlockStatement", }, @@ -1475,21 +1475,21 @@ snapshot[`Plugin - TryStatement 2`] = ` name: "e", optional: false, range: [ + 14, 15, - 16, ], type: "Identifier", typeAnnotation: null, }, range: [ - 8, - 20, + 7, + 19, ], type: "CatchClause", }, range: [ - 1, - 20, + 0, + 19, ], type: "TryStatement", } @@ -1500,23 +1500,23 @@ snapshot[`Plugin - TryStatement 3`] = ` block: { body: [], range: [ - 5, - 7, + 4, + 6, ], type: "BlockStatement", }, finalizer: { body: [], range: [ - 16, - 18, + 15, + 17, ], type: "BlockStatement", }, handler: null, range: [ - 1, - 18, + 0, + 17, ], type: "TryStatement", } @@ -1527,21 +1527,21 @@ snapshot[`Plugin - WhileStatement 1`] = ` body: { body: [], range: [ - 13, - 15, + 12, + 14, ], type: "BlockStatement", }, range: [ - 1, - 15, + 0, + 14, ], test: { name: "foo", optional: false, range: [ - 8, - 11, + 7, + 10, ], type: "Identifier", typeAnnotation: null, @@ -1555,22 +1555,22 @@ snapshot[`Plugin - WithStatement 1`] = ` body: { body: [], range: [ - 11, - 13, + 10, + 12, ], type: "BlockStatement", }, object: { elements: [], range: [ - 7, - 9, + 6, + 8, ], type: "ArrayExpression", }, range: [ - 1, - 13, + 0, + 12, ], type: "WithStatement", } @@ -1582,15 +1582,15 @@ snapshot[`Plugin - ArrayExpression 1`] = ` { elements: [], range: [ - 2, - 4, + 1, + 3, ], type: "ArrayExpression", }, ], range: [ - 1, - 9, + 0, + 8, ], type: "ArrayExpression", } @@ -1602,16 +1602,16 @@ snapshot[`Plugin - ArrowFunctionExpression 1`] = ` body: { body: [], range: [ - 7, - 9, + 6, + 8, ], type: "BlockStatement", }, generator: false, params: [], range: [ - 1, - 9, + 0, + 8, ], returnType: null, type: "ArrowFunctionExpression", @@ -1625,16 +1625,16 @@ snapshot[`Plugin - ArrowFunctionExpression 2`] = ` body: { body: [], range: [ - 13, - 15, + 12, + 14, ], type: "BlockStatement", }, generator: false, params: [], range: [ - 1, - 15, + 0, + 14, ], returnType: null, type: "ArrowFunctionExpression", @@ -1648,8 +1648,8 @@ snapshot[`Plugin - ArrowFunctionExpression 3`] = ` body: { body: [], range: [ - 34, - 36, + 33, + 35, ], type: "BlockStatement", }, @@ -1659,20 +1659,20 @@ snapshot[`Plugin - ArrowFunctionExpression 3`] = ` name: "a", optional: false, range: [ - 2, - 11, + 1, + 10, ], type: "Identifier", typeAnnotation: { range: [ - 3, - 11, + 2, + 10, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 5, - 11, + 4, + 10, ], type: "TSNumberKeyword", }, @@ -1683,34 +1683,34 @@ snapshot[`Plugin - ArrowFunctionExpression 3`] = ` name: "b", optional: false, range: [ + 15, 16, - 17, ], type: "Identifier", typeAnnotation: null, }, range: [ - 13, - 24, + 12, + 23, ], type: "RestElement", typeAnnotation: { range: [ - 17, - 24, + 16, + 23, ], type: "TSTypeAnnotation", typeAnnotation: { elementType: { range: [ - 19, - 22, + 18, + 21, ], type: "TSAnyKeyword", }, range: [ - 19, - 24, + 18, + 23, ], type: "TSArrayType", }, @@ -1718,19 +1718,19 @@ snapshot[`Plugin - ArrowFunctionExpression 3`] = ` }, ], range: [ - 1, - 36, + 0, + 35, ], returnType: { range: [ - 25, - 30, + 24, + 29, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 27, - 30, + 26, + 29, ], type: "TSAnyKeyword", }, @@ -1746,23 +1746,23 @@ snapshot[`Plugin - AssignmentExpression 1`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "=", range: [ - 1, - 6, + 0, + 5, ], right: { name: "b", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, @@ -1777,39 +1777,39 @@ snapshot[`Plugin - AssignmentExpression 2`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "=", range: [ - 1, - 12, + 0, + 11, ], right: { left: { name: "a", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, }, operator: "??=", range: [ - 5, - 12, + 4, + 11, ], right: { name: "b", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, @@ -1826,15 +1826,15 @@ snapshot[`Plugin - AwaitExpression 1`] = ` name: "foo", optional: false, range: [ - 7, - 10, + 6, + 9, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 10, + 0, + 9, ], type: "AwaitExpression", } @@ -1846,23 +1846,23 @@ snapshot[`Plugin - BinaryExpression 1`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: ">", range: [ - 1, - 6, + 0, + 5, ], right: { name: "b", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, @@ -1877,23 +1877,23 @@ snapshot[`Plugin - BinaryExpression 2`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: ">=", range: [ - 1, - 7, + 0, + 6, ], right: { name: "b", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, @@ -1908,23 +1908,23 @@ snapshot[`Plugin - BinaryExpression 3`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "<", range: [ - 1, - 6, + 0, + 5, ], right: { name: "b", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, @@ -1939,23 +1939,23 @@ snapshot[`Plugin - BinaryExpression 4`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "<=", range: [ - 1, - 7, + 0, + 6, ], right: { name: "b", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, @@ -1970,23 +1970,23 @@ snapshot[`Plugin - BinaryExpression 5`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "==", range: [ - 1, - 7, + 0, + 6, ], right: { name: "b", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, @@ -2001,23 +2001,23 @@ snapshot[`Plugin - BinaryExpression 6`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "===", range: [ - 1, - 8, + 0, + 7, ], right: { name: "b", optional: false, range: [ + 6, 7, - 8, ], type: "Identifier", typeAnnotation: null, @@ -2032,23 +2032,23 @@ snapshot[`Plugin - BinaryExpression 7`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "!=", range: [ - 1, - 7, + 0, + 6, ], right: { name: "b", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, @@ -2063,23 +2063,23 @@ snapshot[`Plugin - BinaryExpression 8`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "!=", range: [ - 1, - 8, + 0, + 7, ], right: { name: "b", optional: false, range: [ + 6, 7, - 8, ], type: "Identifier", typeAnnotation: null, @@ -2094,23 +2094,23 @@ snapshot[`Plugin - BinaryExpression 9`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "<<", range: [ - 1, - 7, + 0, + 6, ], right: { name: "b", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, @@ -2125,23 +2125,23 @@ snapshot[`Plugin - BinaryExpression 10`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: ">>", range: [ - 1, - 7, + 0, + 6, ], right: { name: "b", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, @@ -2156,23 +2156,23 @@ snapshot[`Plugin - BinaryExpression 11`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: ">>>", range: [ - 1, - 8, + 0, + 7, ], right: { name: "b", optional: false, range: [ + 6, 7, - 8, ], type: "Identifier", typeAnnotation: null, @@ -2187,23 +2187,23 @@ snapshot[`Plugin - BinaryExpression 12`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "+", range: [ - 1, - 6, + 0, + 5, ], right: { name: "b", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, @@ -2218,23 +2218,23 @@ snapshot[`Plugin - BinaryExpression 13`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "-", range: [ - 1, - 6, + 0, + 5, ], right: { name: "b", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, @@ -2249,23 +2249,23 @@ snapshot[`Plugin - BinaryExpression 14`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "*", range: [ - 1, - 6, + 0, + 5, ], right: { name: "b", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, @@ -2280,23 +2280,23 @@ snapshot[`Plugin - BinaryExpression 15`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "/", range: [ - 1, - 6, + 0, + 5, ], right: { name: "b", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, @@ -2311,23 +2311,23 @@ snapshot[`Plugin - BinaryExpression 16`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "%", range: [ - 1, - 6, + 0, + 5, ], right: { name: "b", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, @@ -2342,23 +2342,23 @@ snapshot[`Plugin - BinaryExpression 17`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "|", range: [ - 1, - 6, + 0, + 5, ], right: { name: "b", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, @@ -2373,23 +2373,23 @@ snapshot[`Plugin - BinaryExpression 18`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "^", range: [ - 1, - 6, + 0, + 5, ], right: { name: "b", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, @@ -2404,23 +2404,23 @@ snapshot[`Plugin - BinaryExpression 19`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "&", range: [ - 1, - 6, + 0, + 5, ], right: { name: "b", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, @@ -2435,23 +2435,23 @@ snapshot[`Plugin - BinaryExpression 20`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "in", range: [ - 1, - 7, + 0, + 6, ], right: { name: "b", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, @@ -2466,23 +2466,23 @@ snapshot[`Plugin - BinaryExpression 21`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "**", range: [ - 1, - 7, + 0, + 6, ], right: { name: "b", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, @@ -2498,16 +2498,16 @@ snapshot[`Plugin - CallExpression 1`] = ` name: "foo", optional: false, range: [ - 1, - 4, + 0, + 3, ], type: "Identifier", typeAnnotation: null, }, optional: false, range: [ - 1, - 6, + 0, + 5, ], type: "CallExpression", typeArguments: null, @@ -2521,8 +2521,8 @@ snapshot[`Plugin - CallExpression 2`] = ` name: "a", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, @@ -2532,15 +2532,15 @@ snapshot[`Plugin - CallExpression 2`] = ` name: "b", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 8, - 11, + 7, + 10, ], type: "SpreadElement", }, @@ -2549,16 +2549,16 @@ snapshot[`Plugin - CallExpression 2`] = ` name: "foo", optional: false, range: [ - 1, - 4, + 0, + 3, ], type: "Identifier", typeAnnotation: null, }, optional: false, range: [ - 1, - 13, + 0, + 12, ], type: "CallExpression", typeArguments: null, @@ -2572,16 +2572,16 @@ snapshot[`Plugin - CallExpression 3`] = ` name: "foo", optional: false, range: [ - 1, - 4, + 0, + 3, ], type: "Identifier", typeAnnotation: null, }, optional: true, range: [ - 1, - 8, + 0, + 7, ], type: "CallExpression", typeArguments: null, @@ -2595,24 +2595,24 @@ snapshot[`Plugin - CallExpression 4`] = ` name: "foo", optional: false, range: [ - 1, - 4, + 0, + 3, ], type: "Identifier", typeAnnotation: null, }, optional: false, range: [ - 1, - 9, + 0, + 8, ], type: "CallExpression", typeArguments: { params: [ { range: [ + 4, 5, - 6, ], type: "TSTypeReference", typeArguments: null, @@ -2620,8 +2620,8 @@ snapshot[`Plugin - CallExpression 4`] = ` name: "T", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, @@ -2629,8 +2629,8 @@ snapshot[`Plugin - CallExpression 4`] = ` }, ], range: [ - 4, - 7, + 3, + 6, ], type: "TSTypeParameterInstantiation", }, @@ -2645,8 +2645,8 @@ snapshot[`Plugin - ChainExpression 1`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, @@ -2656,21 +2656,21 @@ snapshot[`Plugin - ChainExpression 1`] = ` name: "b", optional: false, range: [ + 3, 4, - 5, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 5, + 0, + 4, ], type: "MemberExpression", }, range: [ - 1, - 5, + 0, + 4, ], type: "ChainExpression", } @@ -2682,8 +2682,8 @@ snapshot[`Plugin - ClassExpression 1`] = ` body: { body: [], range: [ - 5, - 13, + 4, + 12, ], type: "ClassBody", }, @@ -2691,8 +2691,8 @@ snapshot[`Plugin - ClassExpression 1`] = ` id: null, implements: [], range: [ - 5, - 13, + 4, + 12, ], superClass: null, type: "ClassExpression", @@ -2705,8 +2705,8 @@ snapshot[`Plugin - ClassExpression 2`] = ` body: { body: [], range: [ - 5, - 17, + 4, + 16, ], type: "ClassBody", }, @@ -2715,16 +2715,16 @@ snapshot[`Plugin - ClassExpression 2`] = ` name: "Foo", optional: false, range: [ - 11, - 14, + 10, + 13, ], type: "Identifier", typeAnnotation: null, }, implements: [], range: [ - 5, - 17, + 4, + 16, ], superClass: null, type: "ClassExpression", @@ -2737,8 +2737,8 @@ snapshot[`Plugin - ClassExpression 3`] = ` body: { body: [], range: [ - 5, - 29, + 4, + 28, ], type: "ClassBody", }, @@ -2747,23 +2747,23 @@ snapshot[`Plugin - ClassExpression 3`] = ` name: "Foo", optional: false, range: [ - 11, - 14, + 10, + 13, ], type: "Identifier", typeAnnotation: null, }, implements: [], range: [ - 5, - 29, + 4, + 28, ], superClass: { name: "Bar", optional: false, range: [ - 23, - 26, + 22, + 25, ], type: "Identifier", typeAnnotation: null, @@ -2778,8 +2778,8 @@ snapshot[`Plugin - ClassExpression 4`] = ` body: { body: [], range: [ - 5, - 50, + 4, + 49, ], type: "ClassBody", }, @@ -2788,8 +2788,8 @@ snapshot[`Plugin - ClassExpression 4`] = ` name: "Foo", optional: false, range: [ - 11, - 14, + 10, + 13, ], type: "Identifier", typeAnnotation: null, @@ -2800,15 +2800,15 @@ snapshot[`Plugin - ClassExpression 4`] = ` name: "Baz", optional: false, range: [ - 38, - 41, + 37, + 40, ], type: "Identifier", typeAnnotation: null, }, range: [ - 38, - 41, + 37, + 40, ], type: "TSClassImplements", typeArguments: null, @@ -2818,30 +2818,30 @@ snapshot[`Plugin - ClassExpression 4`] = ` name: "Baz2", optional: false, range: [ - 43, - 47, + 42, + 46, ], type: "Identifier", typeAnnotation: null, }, range: [ - 43, - 47, + 42, + 46, ], type: "TSClassImplements", typeArguments: null, }, ], range: [ - 5, - 50, + 4, + 49, ], superClass: { name: "Bar", optional: false, range: [ - 23, - 26, + 22, + 25, ], type: "Identifier", typeAnnotation: null, @@ -2856,8 +2856,8 @@ snapshot[`Plugin - ClassExpression 5`] = ` body: { body: [], range: [ - 5, - 20, + 4, + 19, ], type: "ClassBody", }, @@ -2866,16 +2866,16 @@ snapshot[`Plugin - ClassExpression 5`] = ` name: "Foo", optional: false, range: [ - 11, - 14, + 10, + 13, ], type: "Identifier", typeAnnotation: null, }, implements: [], range: [ - 5, - 20, + 4, + 19, ], superClass: null, type: "ClassExpression", @@ -2895,8 +2895,8 @@ snapshot[`Plugin - ClassExpression 6`] = ` name: "foo", optional: false, range: [ - 13, - 16, + 12, + 15, ], type: "Identifier", typeAnnotation: null, @@ -2905,8 +2905,8 @@ snapshot[`Plugin - ClassExpression 6`] = ` optional: false, override: false, range: [ - 13, - 21, + 12, + 20, ], static: false, type: "MethodDefinition", @@ -2915,8 +2915,8 @@ snapshot[`Plugin - ClassExpression 6`] = ` body: { body: [], range: [ - 19, - 21, + 18, + 20, ], type: "BlockStatement", }, @@ -2924,8 +2924,8 @@ snapshot[`Plugin - ClassExpression 6`] = ` id: null, params: [], range: [ - 13, - 21, + 12, + 20, ], returnType: null, type: "FunctionExpression", @@ -2934,8 +2934,8 @@ snapshot[`Plugin - ClassExpression 6`] = ` }, ], range: [ - 5, - 23, + 4, + 22, ], type: "ClassBody", }, @@ -2943,8 +2943,8 @@ snapshot[`Plugin - ClassExpression 6`] = ` id: null, implements: [], range: [ - 5, - 23, + 4, + 22, ], superClass: null, type: "ClassExpression", @@ -2963,8 +2963,8 @@ snapshot[`Plugin - ClassExpression 7`] = ` key: { name: "foo", range: [ - 13, - 17, + 12, + 16, ], type: "PrivateIdentifier", }, @@ -2972,8 +2972,8 @@ snapshot[`Plugin - ClassExpression 7`] = ` optional: false, override: false, range: [ - 13, - 22, + 12, + 21, ], static: false, type: "MethodDefinition", @@ -2982,8 +2982,8 @@ snapshot[`Plugin - ClassExpression 7`] = ` body: { body: [], range: [ - 20, - 22, + 19, + 21, ], type: "BlockStatement", }, @@ -2991,8 +2991,8 @@ snapshot[`Plugin - ClassExpression 7`] = ` id: null, params: [], range: [ - 13, - 22, + 12, + 21, ], returnType: null, type: "FunctionExpression", @@ -3001,8 +3001,8 @@ snapshot[`Plugin - ClassExpression 7`] = ` }, ], range: [ - 5, - 24, + 4, + 23, ], type: "ClassBody", }, @@ -3010,8 +3010,8 @@ snapshot[`Plugin - ClassExpression 7`] = ` id: null, implements: [], range: [ - 5, - 24, + 4, + 23, ], superClass: null, type: "ClassExpression", @@ -3032,8 +3032,8 @@ snapshot[`Plugin - ClassExpression 8`] = ` name: "foo", optional: false, range: [ - 13, - 16, + 12, + 15, ], type: "Identifier", typeAnnotation: null, @@ -3041,8 +3041,8 @@ snapshot[`Plugin - ClassExpression 8`] = ` optional: false, override: false, range: [ - 13, - 24, + 12, + 23, ], readonly: false, static: false, @@ -3051,8 +3051,8 @@ snapshot[`Plugin - ClassExpression 8`] = ` }, ], range: [ - 5, - 26, + 4, + 25, ], type: "ClassBody", }, @@ -3060,8 +3060,8 @@ snapshot[`Plugin - ClassExpression 8`] = ` id: null, implements: [], range: [ - 5, - 26, + 4, + 25, ], superClass: null, type: "ClassExpression", @@ -3082,8 +3082,8 @@ snapshot[`Plugin - ClassExpression 9`] = ` name: "foo", optional: false, range: [ - 13, - 16, + 12, + 15, ], type: "Identifier", typeAnnotation: null, @@ -3091,8 +3091,8 @@ snapshot[`Plugin - ClassExpression 9`] = ` optional: false, override: false, range: [ - 13, - 22, + 12, + 21, ], readonly: false, static: false, @@ -3101,8 +3101,8 @@ snapshot[`Plugin - ClassExpression 9`] = ` name: "bar", optional: false, range: [ - 19, - 22, + 18, + 21, ], type: "Identifier", typeAnnotation: null, @@ -3110,8 +3110,8 @@ snapshot[`Plugin - ClassExpression 9`] = ` }, ], range: [ - 5, - 24, + 4, + 23, ], type: "ClassBody", }, @@ -3119,8 +3119,8 @@ snapshot[`Plugin - ClassExpression 9`] = ` id: null, implements: [], range: [ - 5, - 24, + 4, + 23, ], superClass: null, type: "ClassExpression", @@ -3140,8 +3140,8 @@ snapshot[`Plugin - ClassExpression 10`] = ` name: "constructor", optional: false, range: [ - 13, - 24, + 12, + 23, ], type: "Identifier", typeAnnotation: null, @@ -3150,8 +3150,8 @@ snapshot[`Plugin - ClassExpression 10`] = ` optional: false, override: false, range: [ - 13, - 47, + 12, + 46, ], static: false, type: "MethodDefinition", @@ -3160,8 +3160,8 @@ snapshot[`Plugin - ClassExpression 10`] = ` body: { body: [], range: [ - 45, - 47, + 44, + 46, ], type: "BlockStatement", }, @@ -3176,28 +3176,28 @@ snapshot[`Plugin - ClassExpression 10`] = ` name: "foo", optional: false, range: [ - 32, - 35, + 31, + 34, ], type: "Identifier", typeAnnotation: { range: [ - 35, - 43, + 34, + 42, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 37, - 43, + 36, + 42, ], type: "TSStringKeyword", }, }, }, range: [ - 25, - 43, + 24, + 42, ], readonly: false, static: false, @@ -3205,8 +3205,8 @@ snapshot[`Plugin - ClassExpression 10`] = ` }, ], range: [ - 13, - 47, + 12, + 46, ], returnType: null, type: "FunctionExpression", @@ -3215,8 +3215,8 @@ snapshot[`Plugin - ClassExpression 10`] = ` }, ], range: [ - 5, - 49, + 4, + 48, ], type: "ClassBody", }, @@ -3224,8 +3224,8 @@ snapshot[`Plugin - ClassExpression 10`] = ` id: null, implements: [], range: [ - 5, - 49, + 4, + 48, ], superClass: null, type: "ClassExpression", @@ -3245,16 +3245,16 @@ snapshot[`Plugin - ClassExpression 11`] = ` key: { name: "foo", range: [ - 13, - 17, + 12, + 16, ], type: "PrivateIdentifier", }, optional: false, override: false, range: [ - 13, - 31, + 12, + 30, ], readonly: false, static: false, @@ -3263,8 +3263,8 @@ snapshot[`Plugin - ClassExpression 11`] = ` name: "bar", optional: false, range: [ - 28, - 31, + 27, + 30, ], type: "Identifier", typeAnnotation: null, @@ -3272,8 +3272,8 @@ snapshot[`Plugin - ClassExpression 11`] = ` }, ], range: [ - 5, - 33, + 4, + 32, ], type: "ClassBody", }, @@ -3281,8 +3281,8 @@ snapshot[`Plugin - ClassExpression 11`] = ` id: null, implements: [], range: [ - 5, - 33, + 4, + 32, ], superClass: null, type: "ClassExpression", @@ -3303,8 +3303,8 @@ snapshot[`Plugin - ClassExpression 12`] = ` name: "foo", optional: false, range: [ - 20, - 23, + 19, + 22, ], type: "Identifier", typeAnnotation: null, @@ -3312,8 +3312,8 @@ snapshot[`Plugin - ClassExpression 12`] = ` optional: false, override: false, range: [ - 13, - 29, + 12, + 28, ], readonly: false, static: true, @@ -3322,8 +3322,8 @@ snapshot[`Plugin - ClassExpression 12`] = ` name: "bar", optional: false, range: [ - 26, - 29, + 25, + 28, ], type: "Identifier", typeAnnotation: null, @@ -3331,8 +3331,8 @@ snapshot[`Plugin - ClassExpression 12`] = ` }, ], range: [ - 5, - 31, + 4, + 30, ], type: "ClassBody", }, @@ -3340,8 +3340,8 @@ snapshot[`Plugin - ClassExpression 12`] = ` id: null, implements: [], range: [ - 5, - 31, + 4, + 30, ], superClass: null, type: "ClassExpression", @@ -3362,8 +3362,8 @@ snapshot[`Plugin - ClassExpression 13`] = ` name: "foo", optional: false, range: [ - 20, - 23, + 19, + 22, ], type: "Identifier", typeAnnotation: null, @@ -3371,8 +3371,8 @@ snapshot[`Plugin - ClassExpression 13`] = ` optional: false, override: false, range: [ - 13, - 24, + 12, + 23, ], readonly: false, static: true, @@ -3388,23 +3388,23 @@ snapshot[`Plugin - ClassExpression 13`] = ` name: "foo", optional: false, range: [ - 34, - 37, + 33, + 36, ], type: "Identifier", typeAnnotation: null, }, operator: "=", range: [ - 34, - 43, + 33, + 42, ], right: { name: "bar", optional: false, range: [ - 40, - 43, + 39, + 42, ], type: "Identifier", typeAnnotation: null, @@ -3412,28 +3412,28 @@ snapshot[`Plugin - ClassExpression 13`] = ` type: "AssignmentExpression", }, range: [ - 34, - 43, + 33, + 42, ], type: "ExpressionStatement", }, ], range: [ - 32, - 45, + 31, + 44, ], type: "BlockStatement", }, range: [ - 25, - 45, + 24, + 44, ], type: "StaticBlock", }, ], range: [ - 5, - 47, + 4, + 46, ], type: "ClassBody", }, @@ -3441,8 +3441,8 @@ snapshot[`Plugin - ClassExpression 13`] = ` id: null, implements: [], range: [ - 5, - 47, + 4, + 46, ], superClass: null, type: "ClassExpression", @@ -3455,8 +3455,8 @@ snapshot[`Plugin - ConditionalExpression 1`] = ` name: "c", optional: false, range: [ + 8, 9, - 10, ], type: "Identifier", typeAnnotation: null, @@ -3465,22 +3465,22 @@ snapshot[`Plugin - ConditionalExpression 1`] = ` name: "b", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 10, + 0, + 9, ], test: { name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, @@ -3495,8 +3495,8 @@ snapshot[`Plugin - FunctionExpression 1`] = ` body: { body: [], range: [ - 17, - 19, + 16, + 18, ], type: "BlockStatement", }, @@ -3504,8 +3504,8 @@ snapshot[`Plugin - FunctionExpression 1`] = ` id: null, params: [], range: [ - 5, - 19, + 4, + 18, ], returnType: null, type: "FunctionExpression", @@ -3519,8 +3519,8 @@ snapshot[`Plugin - FunctionExpression 2`] = ` body: { body: [], range: [ - 20, - 22, + 19, + 21, ], type: "BlockStatement", }, @@ -3529,16 +3529,16 @@ snapshot[`Plugin - FunctionExpression 2`] = ` name: "foo", optional: false, range: [ - 14, - 17, + 13, + 16, ], type: "Identifier", typeAnnotation: null, }, params: [], range: [ - 5, - 22, + 4, + 21, ], returnType: null, type: "FunctionExpression", @@ -3552,8 +3552,8 @@ snapshot[`Plugin - FunctionExpression 3`] = ` body: { body: [], range: [ - 45, - 47, + 44, + 46, ], type: "BlockStatement", }, @@ -3564,20 +3564,20 @@ snapshot[`Plugin - FunctionExpression 3`] = ` name: "a", optional: true, range: [ + 14, 15, - 16, ], type: "Identifier", typeAnnotation: { range: [ - 17, - 25, + 16, + 24, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 19, - 25, + 18, + 24, ], type: "TSNumberKeyword", }, @@ -3588,34 +3588,34 @@ snapshot[`Plugin - FunctionExpression 3`] = ` name: "b", optional: false, range: [ + 29, 30, - 31, ], type: "Identifier", typeAnnotation: null, }, range: [ - 27, - 38, + 26, + 37, ], type: "RestElement", typeAnnotation: { range: [ - 31, - 38, + 30, + 37, ], type: "TSTypeAnnotation", typeAnnotation: { elementType: { range: [ - 33, - 36, + 32, + 35, ], type: "TSAnyKeyword", }, range: [ - 33, - 38, + 32, + 37, ], type: "TSArrayType", }, @@ -3623,19 +3623,19 @@ snapshot[`Plugin - FunctionExpression 3`] = ` }, ], range: [ - 5, - 47, + 4, + 46, ], returnType: { range: [ - 39, - 44, + 38, + 43, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 41, - 44, + 40, + 43, ], type: "TSAnyKeyword", }, @@ -3651,8 +3651,8 @@ snapshot[`Plugin - FunctionExpression 4`] = ` body: { body: [], range: [ - 24, - 26, + 23, + 25, ], type: "BlockStatement", }, @@ -3660,8 +3660,8 @@ snapshot[`Plugin - FunctionExpression 4`] = ` id: null, params: [], range: [ - 5, - 26, + 4, + 25, ], returnType: null, type: "FunctionExpression", @@ -3674,8 +3674,8 @@ snapshot[`Plugin - Identifier 1`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, @@ -3692,8 +3692,8 @@ snapshot[`Plugin - ImportExpression 1`] = ` name: "with", optional: false, range: [ - 17, - 21, + 16, + 20, ], type: "Identifier", typeAnnotation: null, @@ -3701,8 +3701,8 @@ snapshot[`Plugin - ImportExpression 1`] = ` kind: "init", method: false, range: [ - 17, - 39, + 16, + 38, ], shorthand: false, type: "Property", @@ -3714,8 +3714,8 @@ snapshot[`Plugin - ImportExpression 1`] = ` name: "type", optional: false, range: [ - 25, - 29, + 24, + 28, ], type: "Identifier", typeAnnotation: null, @@ -3723,15 +3723,15 @@ snapshot[`Plugin - ImportExpression 1`] = ` kind: "init", method: false, range: [ - 25, - 37, + 24, + 36, ], shorthand: false, type: "Property", value: { range: [ - 31, - 37, + 30, + 36, ], raw: "'json'", type: "Literal", @@ -3740,27 +3740,27 @@ snapshot[`Plugin - ImportExpression 1`] = ` }, ], range: [ - 23, - 39, + 22, + 38, ], type: "ObjectExpression", }, }, ], range: [ - 15, - 41, + 14, + 40, ], type: "ObjectExpression", }, range: [ - 1, - 42, + 0, + 41, ], source: { range: [ - 8, - 13, + 7, + 12, ], raw: "'foo'", type: "Literal", @@ -3776,23 +3776,23 @@ snapshot[`Plugin - LogicalExpression 1`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "&&", range: [ - 1, - 7, + 0, + 6, ], right: { name: "b", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, @@ -3807,23 +3807,23 @@ snapshot[`Plugin - LogicalExpression 2`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "||", range: [ - 1, - 7, + 0, + 6, ], right: { name: "b", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, @@ -3838,23 +3838,23 @@ snapshot[`Plugin - LogicalExpression 3`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, operator: "??", range: [ - 1, - 7, + 0, + 6, ], right: { name: "b", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, @@ -3870,8 +3870,8 @@ snapshot[`Plugin - MemberExpression 1`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, @@ -3881,15 +3881,15 @@ snapshot[`Plugin - MemberExpression 1`] = ` name: "b", optional: false, range: [ + 2, 3, - 4, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 4, + 0, + 3, ], type: "MemberExpression", } @@ -3902,8 +3902,8 @@ snapshot[`Plugin - MemberExpression 2`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, @@ -3911,16 +3911,16 @@ snapshot[`Plugin - MemberExpression 2`] = ` optional: false, property: { range: [ - 3, - 6, + 2, + 5, ], raw: "'b'", type: "Literal", value: "b", }, range: [ - 1, - 7, + 0, + 6, ], type: "MemberExpression", } @@ -3932,15 +3932,15 @@ snapshot[`Plugin - MetaProperty 1`] = ` name: "meta", optional: false, range: [ - 1, - 12, + 0, + 11, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 12, + 0, + 11, ], type: "MetaProperty", } @@ -3953,15 +3953,15 @@ snapshot[`Plugin - NewExpression 1`] = ` name: "Foo", optional: false, range: [ - 5, - 8, + 4, + 7, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 10, + 0, + 9, ], type: "NewExpression", typeArguments: null, @@ -3975,8 +3975,8 @@ snapshot[`Plugin - NewExpression 2`] = ` name: "a", optional: false, range: [ + 11, 12, - 13, ], type: "Identifier", typeAnnotation: null, @@ -3986,15 +3986,15 @@ snapshot[`Plugin - NewExpression 2`] = ` name: "b", optional: false, range: [ + 17, 18, - 19, ], type: "Identifier", typeAnnotation: null, }, range: [ - 15, - 18, + 14, + 17, ], type: "SpreadElement", }, @@ -4003,23 +4003,23 @@ snapshot[`Plugin - NewExpression 2`] = ` name: "Foo", optional: false, range: [ - 5, - 8, + 4, + 7, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 20, + 0, + 19, ], type: "NewExpression", typeArguments: { params: [ { range: [ + 8, 9, - 10, ], type: "TSTypeReference", typeArguments: null, @@ -4027,8 +4027,8 @@ snapshot[`Plugin - NewExpression 2`] = ` name: "T", optional: false, range: [ + 8, 9, - 10, ], type: "Identifier", typeAnnotation: null, @@ -4036,8 +4036,8 @@ snapshot[`Plugin - NewExpression 2`] = ` }, ], range: [ - 8, - 11, + 7, + 10, ], type: "TSTypeParameterInstantiation", }, @@ -4048,8 +4048,8 @@ snapshot[`Plugin - ObjectExpression 1`] = ` { properties: [], range: [ - 5, - 7, + 4, + 6, ], type: "ObjectExpression", } @@ -4064,8 +4064,8 @@ snapshot[`Plugin - ObjectExpression 2`] = ` name: "a", optional: false, range: [ + 6, 7, - 8, ], type: "Identifier", typeAnnotation: null, @@ -4073,8 +4073,8 @@ snapshot[`Plugin - ObjectExpression 2`] = ` kind: "init", method: false, range: [ + 6, 7, - 8, ], shorthand: true, type: "Property", @@ -4082,8 +4082,8 @@ snapshot[`Plugin - ObjectExpression 2`] = ` name: "a", optional: false, range: [ + 6, 7, - 8, ], type: "Identifier", typeAnnotation: null, @@ -4091,8 +4091,8 @@ snapshot[`Plugin - ObjectExpression 2`] = ` }, ], range: [ - 5, - 10, + 4, + 9, ], type: "ObjectExpression", } @@ -4107,8 +4107,8 @@ snapshot[`Plugin - ObjectExpression 3`] = ` name: "b", optional: false, range: [ + 6, 7, - 8, ], type: "Identifier", typeAnnotation: null, @@ -4116,8 +4116,8 @@ snapshot[`Plugin - ObjectExpression 3`] = ` kind: "init", method: false, range: [ - 7, - 11, + 6, + 10, ], shorthand: false, type: "Property", @@ -4125,8 +4125,8 @@ snapshot[`Plugin - ObjectExpression 3`] = ` name: "c", optional: false, range: [ + 9, 10, - 11, ], type: "Identifier", typeAnnotation: null, @@ -4138,8 +4138,8 @@ snapshot[`Plugin - ObjectExpression 3`] = ` name: "c", optional: false, range: [ + 13, 14, - 15, ], type: "Identifier", typeAnnotation: null, @@ -4147,8 +4147,8 @@ snapshot[`Plugin - ObjectExpression 3`] = ` kind: "init", method: false, range: [ - 13, - 19, + 12, + 18, ], shorthand: false, type: "Property", @@ -4156,8 +4156,8 @@ snapshot[`Plugin - ObjectExpression 3`] = ` name: "d", optional: false, range: [ + 17, 18, - 19, ], type: "Identifier", typeAnnotation: null, @@ -4165,8 +4165,8 @@ snapshot[`Plugin - ObjectExpression 3`] = ` }, ], range: [ - 5, - 21, + 4, + 20, ], type: "ObjectExpression", } @@ -4176,8 +4176,8 @@ snapshot[`Plugin - PrivateIdentifier 1`] = ` { name: "foo", range: [ - 13, - 17, + 12, + 16, ], type: "PrivateIdentifier", } @@ -4190,8 +4190,8 @@ snapshot[`Plugin - SequenceExpression 1`] = ` name: "a", optional: false, range: [ + 1, 2, - 3, ], type: "Identifier", typeAnnotation: null, @@ -4200,16 +4200,16 @@ snapshot[`Plugin - SequenceExpression 1`] = ` name: "b", optional: false, range: [ + 4, 5, - 6, ], type: "Identifier", typeAnnotation: null, }, ], range: [ - 2, - 6, + 1, + 5, ], type: "SequenceExpression", } @@ -4218,8 +4218,8 @@ snapshot[`Plugin - SequenceExpression 1`] = ` snapshot[`Plugin - Super 1`] = ` { range: [ - 41, - 46, + 40, + 45, ], type: "Super", } @@ -4233,8 +4233,8 @@ snapshot[`Plugin - TaggedTemplateExpression 1`] = ` name: "bar", optional: false, range: [ - 11, - 14, + 10, + 13, ], type: "Identifier", typeAnnotation: null, @@ -4244,8 +4244,8 @@ snapshot[`Plugin - TaggedTemplateExpression 1`] = ` { cooked: "foo ", range: [ - 5, - 9, + 4, + 8, ], raw: "foo ", tail: false, @@ -4254,8 +4254,8 @@ snapshot[`Plugin - TaggedTemplateExpression 1`] = ` { cooked: " baz", range: [ - 15, - 19, + 14, + 18, ], raw: " baz", tail: true, @@ -4263,21 +4263,21 @@ snapshot[`Plugin - TaggedTemplateExpression 1`] = ` }, ], range: [ - 4, - 20, + 3, + 19, ], type: "TemplateLiteral", }, range: [ - 1, - 20, + 0, + 19, ], tag: { name: "foo", optional: false, range: [ - 1, - 4, + 0, + 3, ], type: "Identifier", typeAnnotation: null, @@ -4294,8 +4294,8 @@ snapshot[`Plugin - TemplateLiteral 1`] = ` name: "bar", optional: false, range: [ - 8, - 11, + 7, + 10, ], type: "Identifier", typeAnnotation: null, @@ -4305,8 +4305,8 @@ snapshot[`Plugin - TemplateLiteral 1`] = ` { cooked: "foo ", range: [ - 2, - 6, + 1, + 5, ], raw: "foo ", tail: false, @@ -4315,8 +4315,8 @@ snapshot[`Plugin - TemplateLiteral 1`] = ` { cooked: " baz", range: [ - 12, - 16, + 11, + 15, ], raw: " baz", tail: true, @@ -4324,8 +4324,8 @@ snapshot[`Plugin - TemplateLiteral 1`] = ` }, ], range: [ - 1, - 17, + 0, + 16, ], type: "TemplateLiteral", } @@ -4334,8 +4334,8 @@ snapshot[`Plugin - TemplateLiteral 1`] = ` snapshot[`Plugin - ThisExpression 1`] = ` { range: [ - 1, - 5, + 0, + 4, ], type: "ThisExpression", } @@ -4347,21 +4347,21 @@ snapshot[`Plugin - TSAsExpression 1`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 7, + 0, + 6, ], type: "TSAsExpression", typeAnnotation: { range: [ + 5, 6, - 7, ], type: "TSTypeReference", typeArguments: null, @@ -4369,8 +4369,8 @@ snapshot[`Plugin - TSAsExpression 1`] = ` name: "b", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, @@ -4385,21 +4385,21 @@ snapshot[`Plugin - TSAsExpression 2`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 11, + 0, + 10, ], type: "TSAsExpression", typeAnnotation: { range: [ - 1, - 11, + 0, + 10, ], type: "TSTypeReference", typeArguments: null, @@ -4407,8 +4407,8 @@ snapshot[`Plugin - TSAsExpression 2`] = ` name: "const", optional: false, range: [ - 1, - 11, + 0, + 10, ], type: "Identifier", typeAnnotation: null, @@ -4423,15 +4423,15 @@ snapshot[`Plugin - TSNonNullExpression 1`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 3, + 0, + 2, ], type: "TSNonNullExpression", } @@ -4443,21 +4443,21 @@ snapshot[`Plugin - TSSatisfiesExpression 1`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 14, + 0, + 13, ], type: "TSSatisfiesExpression", typeAnnotation: { range: [ + 12, 13, - 14, ], type: "TSTypeReference", typeArguments: null, @@ -4465,8 +4465,8 @@ snapshot[`Plugin - TSSatisfiesExpression 1`] = ` name: "b", optional: false, range: [ + 12, 13, - 14, ], type: "Identifier", typeAnnotation: null, @@ -4481,16 +4481,16 @@ snapshot[`Plugin - UnaryExpression 1`] = ` name: "a", optional: false, range: [ + 7, 8, - 9, ], type: "Identifier", typeAnnotation: null, }, operator: "typeof", range: [ - 1, - 9, + 0, + 8, ], type: "UnaryExpression", } @@ -4500,8 +4500,8 @@ snapshot[`Plugin - UnaryExpression 2`] = ` { argument: { range: [ + 5, 6, - 7, ], raw: "0", type: "Literal", @@ -4509,8 +4509,8 @@ snapshot[`Plugin - UnaryExpression 2`] = ` }, operator: "void", range: [ - 1, - 7, + 0, + 6, ], type: "UnaryExpression", } @@ -4522,16 +4522,16 @@ snapshot[`Plugin - UnaryExpression 3`] = ` name: "a", optional: false, range: [ + 1, 2, - 3, ], type: "Identifier", typeAnnotation: null, }, operator: "-", range: [ - 1, - 3, + 0, + 2, ], type: "UnaryExpression", } @@ -4543,16 +4543,16 @@ snapshot[`Plugin - UnaryExpression 4`] = ` name: "a", optional: false, range: [ + 1, 2, - 3, ], type: "Identifier", typeAnnotation: null, }, operator: "+", range: [ - 1, - 3, + 0, + 2, ], type: "UnaryExpression", } @@ -4564,8 +4564,8 @@ snapshot[`Plugin - UpdateExpression 1`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, @@ -4573,8 +4573,8 @@ snapshot[`Plugin - UpdateExpression 1`] = ` operator: "++", prefix: false, range: [ - 1, - 4, + 0, + 3, ], type: "UpdateExpression", } @@ -4586,8 +4586,8 @@ snapshot[`Plugin - UpdateExpression 2`] = ` name: "a", optional: false, range: [ + 2, 3, - 4, ], type: "Identifier", typeAnnotation: null, @@ -4595,8 +4595,8 @@ snapshot[`Plugin - UpdateExpression 2`] = ` operator: "++", prefix: true, range: [ - 1, - 4, + 0, + 3, ], type: "UpdateExpression", } @@ -4608,8 +4608,8 @@ snapshot[`Plugin - UpdateExpression 3`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, @@ -4617,8 +4617,8 @@ snapshot[`Plugin - UpdateExpression 3`] = ` operator: "--", prefix: false, range: [ - 1, - 4, + 0, + 3, ], type: "UpdateExpression", } @@ -4630,8 +4630,8 @@ snapshot[`Plugin - UpdateExpression 4`] = ` name: "a", optional: false, range: [ + 2, 3, - 4, ], type: "Identifier", typeAnnotation: null, @@ -4639,8 +4639,8 @@ snapshot[`Plugin - UpdateExpression 4`] = ` operator: "--", prefix: true, range: [ - 1, - 4, + 0, + 3, ], type: "UpdateExpression", } @@ -4652,16 +4652,16 @@ snapshot[`Plugin - YieldExpression 1`] = ` name: "bar", optional: false, range: [ - 25, - 28, + 24, + 27, ], type: "Identifier", typeAnnotation: null, }, delegate: false, range: [ - 19, - 28, + 18, + 27, ], type: "YieldExpression", } @@ -4670,8 +4670,8 @@ snapshot[`Plugin - YieldExpression 1`] = ` snapshot[`Plugin - Literal 1`] = ` { range: [ + 0, 1, - 2, ], raw: "1", type: "Literal", @@ -4682,8 +4682,8 @@ snapshot[`Plugin - Literal 1`] = ` snapshot[`Plugin - Literal 2`] = ` { range: [ - 1, - 6, + 0, + 5, ], raw: "'foo'", type: "Literal", @@ -4694,8 +4694,8 @@ snapshot[`Plugin - Literal 2`] = ` snapshot[`Plugin - Literal 3`] = ` { range: [ - 1, - 6, + 0, + 5, ], raw: '"foo"', type: "Literal", @@ -4706,8 +4706,8 @@ snapshot[`Plugin - Literal 3`] = ` snapshot[`Plugin - Literal 4`] = ` { range: [ - 1, - 5, + 0, + 4, ], raw: "true", type: "Literal", @@ -4718,8 +4718,8 @@ snapshot[`Plugin - Literal 4`] = ` snapshot[`Plugin - Literal 5`] = ` { range: [ - 1, - 6, + 0, + 5, ], raw: "false", type: "Literal", @@ -4730,8 +4730,8 @@ snapshot[`Plugin - Literal 5`] = ` snapshot[`Plugin - Literal 6`] = ` { range: [ - 1, - 5, + 0, + 4, ], raw: "null", type: "Literal", @@ -4743,8 +4743,8 @@ snapshot[`Plugin - Literal 7`] = ` { bigint: "1", range: [ - 1, - 3, + 0, + 2, ], raw: "1n", type: "Literal", @@ -4755,8 +4755,8 @@ snapshot[`Plugin - Literal 7`] = ` snapshot[`Plugin - Literal 8`] = ` { range: [ - 1, - 7, + 0, + 6, ], raw: "/foo/g", regex: { @@ -4777,22 +4777,22 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "div", range: [ - 2, - 5, + 1, + 4, ], type: "JSXIdentifier", }, range: [ - 1, - 8, + 0, + 7, ], selfClosing: true, type: "JSXOpeningElement", typeArguments: null, }, range: [ - 1, - 8, + 0, + 7, ], type: "JSXElement", } @@ -4805,14 +4805,14 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "div", range: [ - 8, - 11, + 7, + 10, ], type: "JSXIdentifier", }, range: [ - 6, - 12, + 5, + 11, ], type: "JSXClosingElement", }, @@ -4821,22 +4821,22 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "div", range: [ - 2, - 5, + 1, + 4, ], type: "JSXIdentifier", }, range: [ - 1, - 6, + 0, + 5, ], selfClosing: false, type: "JSXOpeningElement", typeArguments: null, }, range: [ - 1, - 12, + 0, + 11, ], type: "JSXElement", } @@ -4849,14 +4849,14 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "div", range: [ - 10, - 13, + 9, + 12, ], type: "JSXIdentifier", }, range: [ - 8, - 14, + 7, + 13, ], type: "JSXClosingElement", }, @@ -4866,14 +4866,14 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "a", range: [ + 5, 6, - 7, ], type: "JSXIdentifier", }, range: [ + 5, 6, - 7, ], type: "JSXAttribute", value: null, @@ -4882,22 +4882,22 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "div", range: [ - 2, - 5, + 1, + 4, ], type: "JSXIdentifier", }, range: [ - 1, - 8, + 0, + 7, ], selfClosing: false, type: "JSXOpeningElement", typeArguments: null, }, range: [ - 1, - 14, + 0, + 13, ], type: "JSXElement", } @@ -4913,20 +4913,20 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "a", range: [ + 5, 6, - 7, ], type: "JSXIdentifier", }, range: [ - 6, - 11, + 5, + 10, ], type: "JSXAttribute", value: { range: [ - 8, - 11, + 7, + 10, ], raw: '"b"', type: "Literal", @@ -4937,22 +4937,22 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "div", range: [ - 2, - 5, + 1, + 4, ], type: "JSXIdentifier", }, range: [ - 1, - 14, + 0, + 13, ], selfClosing: true, type: "JSXOpeningElement", typeArguments: null, }, range: [ - 1, - 14, + 0, + 13, ], type: "JSXElement", } @@ -4968,29 +4968,29 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "a", range: [ + 5, 6, - 7, ], type: "JSXIdentifier", }, range: [ - 6, - 11, + 5, + 10, ], type: "JSXAttribute", value: { expression: { range: [ + 8, 9, - 10, ], raw: "2", type: "Literal", value: 2, }, range: [ - 8, - 11, + 7, + 10, ], type: "JSXExpressionContainer", }, @@ -4999,22 +4999,22 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "div", range: [ - 2, - 5, + 1, + 4, ], type: "JSXIdentifier", }, range: [ - 1, - 14, + 0, + 13, ], selfClosing: true, type: "JSXOpeningElement", typeArguments: null, }, range: [ - 1, - 14, + 0, + 13, ], type: "JSXElement", } @@ -5025,8 +5025,8 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr children: [ { range: [ - 6, - 9, + 5, + 8, ], raw: "foo", type: "JSXText", @@ -5035,16 +5035,16 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr { expression: { range: [ + 9, 10, - 11, ], raw: "2", type: "Literal", value: 2, }, range: [ - 9, - 12, + 8, + 11, ], type: "JSXExpressionContainer", }, @@ -5053,14 +5053,14 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "div", range: [ - 14, - 17, + 13, + 16, ], type: "JSXIdentifier", }, range: [ - 12, - 18, + 11, + 17, ], type: "JSXClosingElement", }, @@ -5069,22 +5069,22 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "div", range: [ - 2, - 5, + 1, + 4, ], type: "JSXIdentifier", }, range: [ - 1, - 6, + 0, + 5, ], selfClosing: false, type: "JSXOpeningElement", typeArguments: null, }, range: [ - 1, - 18, + 0, + 17, ], type: "JSXElement", } @@ -5100,36 +5100,36 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr object: { name: "a", range: [ + 1, 2, - 3, ], type: "JSXIdentifier", }, property: { name: "b", range: [ + 3, 4, - 5, ], type: "JSXIdentifier", }, range: [ - 2, - 5, + 1, + 4, ], type: "JSXMemberExpression", }, range: [ - 1, - 8, + 0, + 7, ], selfClosing: true, type: "JSXOpeningElement", typeArguments: null, }, range: [ - 1, - 8, + 0, + 7, ], type: "JSXElement", } @@ -5146,43 +5146,43 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "b", range: [ + 7, 8, - 9, ], type: "JSXIdentifier", }, namespace: { name: "a", range: [ + 5, 6, - 7, ], type: "JSXIdentifier", }, range: [ - 6, - 9, + 5, + 8, ], type: "JSXNamespacedName", }, range: [ - 6, - 13, + 5, + 12, ], type: "JSXAttribute", value: { expression: { range: [ + 10, 11, - 12, ], raw: "2", type: "Literal", value: 2, }, range: [ - 10, - 13, + 9, + 12, ], type: "JSXExpressionContainer", }, @@ -5191,22 +5191,22 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "div", range: [ - 2, - 5, + 1, + 4, ], type: "JSXIdentifier", }, range: [ - 1, - 16, + 0, + 15, ], selfClosing: true, type: "JSXOpeningElement", typeArguments: null, }, range: [ - 1, - 16, + 0, + 15, ], type: "JSXElement", } @@ -5221,22 +5221,22 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "Foo", range: [ - 2, - 5, + 1, + 4, ], type: "JSXIdentifier", }, range: [ - 1, - 8, + 0, + 7, ], selfClosing: true, type: "JSXOpeningElement", typeArguments: null, }, range: [ - 1, - 8, + 0, + 7, ], type: "JSXElement", } @@ -5251,14 +5251,14 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: { name: "Foo", range: [ - 2, - 5, + 1, + 4, ], type: "JSXIdentifier", }, range: [ - 1, - 11, + 0, + 10, ], selfClosing: true, type: "JSXOpeningElement", @@ -5266,8 +5266,8 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr params: [ { range: [ + 5, 6, - 7, ], type: "TSTypeReference", typeArguments: null, @@ -5275,8 +5275,8 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr name: "T", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, @@ -5284,15 +5284,15 @@ snapshot[`Plugin - JSXElement + JSXOpeningElement + JSXClosingElement + JSXAttr }, ], range: [ - 5, - 8, + 4, + 7, ], type: "TSTypeParameterInstantiation", }, }, range: [ - 1, - 11, + 0, + 10, ], type: "JSXElement", } @@ -5303,21 +5303,21 @@ snapshot[`Plugin - JSXFragment + JSXOpeningFragment + JSXClosingFragment 1`] = ` children: [], closingFragment: { range: [ - 3, - 6, + 2, + 5, ], type: "JSXClosingFragment", }, openingFragment: { range: [ - 1, - 3, + 0, + 2, ], type: "JSXOpeningFragment", }, range: [ - 1, - 6, + 0, + 5, ], type: "JSXFragment", } @@ -5328,8 +5328,8 @@ snapshot[`Plugin - JSXFragment + JSXOpeningFragment + JSXClosingFragment 2`] = ` children: [ { range: [ - 3, - 6, + 2, + 5, ], raw: "foo", type: "JSXText", @@ -5338,37 +5338,37 @@ snapshot[`Plugin - JSXFragment + JSXOpeningFragment + JSXClosingFragment 2`] = ` { expression: { range: [ + 6, 7, - 8, ], raw: "2", type: "Literal", value: 2, }, range: [ - 6, - 9, + 5, + 8, ], type: "JSXExpressionContainer", }, ], closingFragment: { range: [ - 9, - 12, + 8, + 11, ], type: "JSXClosingFragment", }, openingFragment: { range: [ - 1, - 3, + 0, + 2, ], type: "JSXOpeningFragment", }, range: [ - 1, - 12, + 0, + 11, ], type: "JSXFragment", } @@ -5380,21 +5380,21 @@ snapshot[`Plugin - TSAsExpression 3`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 9, + 0, + 8, ], type: "TSAsExpression", typeAnnotation: { range: [ - 6, - 9, + 5, + 8, ], type: "TSAnyKeyword", }, @@ -5405,22 +5405,22 @@ snapshot[`Plugin - TSAsExpression 4`] = ` { expression: { range: [ - 1, - 6, + 0, + 5, ], raw: '"foo"', type: "Literal", value: "foo", }, range: [ - 1, - 15, + 0, + 14, ], type: "TSAsExpression", typeAnnotation: { range: [ - 1, - 15, + 0, + 14, ], type: "TSTypeReference", typeArguments: null, @@ -5428,8 +5428,8 @@ snapshot[`Plugin - TSAsExpression 4`] = ` name: "const", optional: false, range: [ - 1, - 15, + 0, + 14, ], type: "Identifier", typeAnnotation: null, @@ -5443,8 +5443,8 @@ snapshot[`Plugin - TSEnumDeclaration 1`] = ` body: { members: [], range: [ - 1, - 12, + 0, + 11, ], type: "TSEnumBody", }, @@ -5454,15 +5454,15 @@ snapshot[`Plugin - TSEnumDeclaration 1`] = ` name: "Foo", optional: false, range: [ - 6, - 9, + 5, + 8, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 12, + 0, + 11, ], type: "TSEnumDeclaration", } @@ -5473,8 +5473,8 @@ snapshot[`Plugin - TSEnumDeclaration 2`] = ` body: { members: [], range: [ - 1, - 18, + 0, + 17, ], type: "TSEnumBody", }, @@ -5484,15 +5484,15 @@ snapshot[`Plugin - TSEnumDeclaration 2`] = ` name: "Foo", optional: false, range: [ - 12, - 15, + 11, + 14, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 18, + 0, + 17, ], type: "TSEnumDeclaration", } @@ -5507,16 +5507,16 @@ snapshot[`Plugin - TSEnumDeclaration 3`] = ` name: "A", optional: false, range: [ + 11, 12, - 13, ], type: "Identifier", typeAnnotation: null, }, initializer: null, range: [ + 11, 12, - 13, ], type: "TSEnumMember", }, @@ -5525,23 +5525,23 @@ snapshot[`Plugin - TSEnumDeclaration 3`] = ` name: "B", optional: false, range: [ + 14, 15, - 16, ], type: "Identifier", typeAnnotation: null, }, initializer: null, range: [ + 14, 15, - 16, ], type: "TSEnumMember", }, ], range: [ - 1, - 18, + 0, + 17, ], type: "TSEnumBody", }, @@ -5551,15 +5551,15 @@ snapshot[`Plugin - TSEnumDeclaration 3`] = ` name: "Foo", optional: false, range: [ - 6, - 9, + 5, + 8, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 18, + 0, + 17, ], type: "TSEnumDeclaration", } @@ -5572,8 +5572,8 @@ snapshot[`Plugin - TSEnumDeclaration 4`] = ` { id: { range: [ - 12, - 17, + 11, + 16, ], raw: '"a-b"', type: "Literal", @@ -5581,15 +5581,15 @@ snapshot[`Plugin - TSEnumDeclaration 4`] = ` }, initializer: null, range: [ - 12, - 17, + 11, + 16, ], type: "TSEnumMember", }, ], range: [ - 1, - 19, + 0, + 18, ], type: "TSEnumBody", }, @@ -5599,15 +5599,15 @@ snapshot[`Plugin - TSEnumDeclaration 4`] = ` name: "Foo", optional: false, range: [ - 6, - 9, + 5, + 8, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 19, + 0, + 18, ], type: "TSEnumDeclaration", } @@ -5622,24 +5622,24 @@ snapshot[`Plugin - TSEnumDeclaration 5`] = ` name: "A", optional: false, range: [ + 11, 12, - 13, ], type: "Identifier", typeAnnotation: null, }, initializer: { range: [ + 15, 16, - 17, ], raw: "1", type: "Literal", value: 1, }, range: [ - 12, - 17, + 11, + 16, ], type: "TSEnumMember", }, @@ -5648,24 +5648,24 @@ snapshot[`Plugin - TSEnumDeclaration 5`] = ` name: "B", optional: false, range: [ + 18, 19, - 20, ], type: "Identifier", typeAnnotation: null, }, initializer: { range: [ + 22, 23, - 24, ], raw: "2", type: "Literal", value: 2, }, range: [ - 19, - 24, + 18, + 23, ], type: "TSEnumMember", }, @@ -5674,8 +5674,8 @@ snapshot[`Plugin - TSEnumDeclaration 5`] = ` name: "C", optional: false, range: [ + 25, 26, - 27, ], type: "Identifier", typeAnnotation: null, @@ -5685,23 +5685,23 @@ snapshot[`Plugin - TSEnumDeclaration 5`] = ` name: "A", optional: false, range: [ + 29, 30, - 31, ], type: "Identifier", typeAnnotation: null, }, operator: "|", range: [ - 30, - 35, + 29, + 34, ], right: { name: "B", optional: false, range: [ + 33, 34, - 35, ], type: "Identifier", typeAnnotation: null, @@ -5709,15 +5709,15 @@ snapshot[`Plugin - TSEnumDeclaration 5`] = ` type: "BinaryExpression", }, range: [ - 26, - 35, + 25, + 34, ], type: "TSEnumMember", }, ], range: [ - 1, - 37, + 0, + 36, ], type: "TSEnumBody", }, @@ -5727,15 +5727,15 @@ snapshot[`Plugin - TSEnumDeclaration 5`] = ` name: "Foo", optional: false, range: [ - 6, - 9, + 5, + 8, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 37, + 0, + 36, ], type: "TSEnumDeclaration", } @@ -5746,8 +5746,8 @@ snapshot[`Plugin - TSInterface 1`] = ` body: { body: [], range: [ - 13, - 15, + 12, + 14, ], type: "TSInterfaceBody", }, @@ -5757,15 +5757,15 @@ snapshot[`Plugin - TSInterface 1`] = ` name: "A", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 15, + 0, + 14, ], type: "TSInterface", typeParameters: [], @@ -5777,8 +5777,8 @@ snapshot[`Plugin - TSInterface 2`] = ` body: { body: [], range: [ - 16, - 18, + 15, + 17, ], type: "TSInterfaceBody", }, @@ -5794,23 +5794,23 @@ snapshot[`Plugin - TSInterface 2`] = ` name: "T", optional: false, range: [ + 12, 13, - 14, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ + 12, 13, - 14, ], type: "TSTypeParameter", }, ], range: [ - 12, - 15, + 11, + 14, ], type: "TSTypeParameterDeclaration", }, @@ -5818,15 +5818,15 @@ snapshot[`Plugin - TSInterface 2`] = ` name: "A", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 18, + 0, + 17, ], type: "TSInterface", typeParameters: [], @@ -5838,8 +5838,8 @@ snapshot[`Plugin - TSInterface 3`] = ` body: { body: [], range: [ - 36, - 38, + 35, + 37, ], type: "TSInterfaceBody", }, @@ -5849,15 +5849,15 @@ snapshot[`Plugin - TSInterface 3`] = ` name: "A", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 38, + 0, + 37, ], type: "TSInterface", typeParameters: [ @@ -5866,23 +5866,23 @@ snapshot[`Plugin - TSInterface 3`] = ` name: "Foo", optional: false, range: [ - 21, - 24, + 20, + 23, ], type: "Identifier", typeAnnotation: null, }, range: [ - 21, - 27, + 20, + 26, ], type: "TSInterfaceHeritage", typeArguments: { params: [ { range: [ + 24, 25, - 26, ], type: "TSTypeReference", typeArguments: null, @@ -5890,8 +5890,8 @@ snapshot[`Plugin - TSInterface 3`] = ` name: "T", optional: false, range: [ + 24, 25, - 26, ], type: "Identifier", typeAnnotation: null, @@ -5899,8 +5899,8 @@ snapshot[`Plugin - TSInterface 3`] = ` }, ], range: [ - 24, - 27, + 23, + 26, ], type: "TSTypeParameterInstantiation", }, @@ -5910,23 +5910,23 @@ snapshot[`Plugin - TSInterface 3`] = ` name: "Bar", optional: false, range: [ - 29, - 32, + 28, + 31, ], type: "Identifier", typeAnnotation: null, }, range: [ - 29, - 35, + 28, + 34, ], type: "TSInterfaceHeritage", typeArguments: { params: [ { range: [ + 32, 33, - 34, ], type: "TSTypeReference", typeArguments: null, @@ -5934,8 +5934,8 @@ snapshot[`Plugin - TSInterface 3`] = ` name: "T", optional: false, range: [ + 32, 33, - 34, ], type: "Identifier", typeAnnotation: null, @@ -5943,9 +5943,9 @@ snapshot[`Plugin - TSInterface 3`] = ` }, ], range: [ - 32, - 35, - ], + 31, + 34, + ], type: "TSTypeParameterInstantiation", }, }, @@ -5963,30 +5963,30 @@ snapshot[`Plugin - TSInterface 4`] = ` name: "foo", optional: false, range: [ - 15, - 18, + 14, + 17, ], type: "Identifier", typeAnnotation: null, }, optional: false, range: [ - 15, - 24, + 14, + 23, ], readonly: false, static: false, type: "TSPropertySignature", typeAnnotation: { range: [ - 18, - 23, + 17, + 22, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 20, - 23, + 19, + 22, ], type: "TSAnyKeyword", }, @@ -5998,30 +5998,30 @@ snapshot[`Plugin - TSInterface 4`] = ` name: "bar", optional: false, range: [ - 25, - 28, + 24, + 27, ], type: "Identifier", typeAnnotation: null, }, optional: true, range: [ - 25, - 34, + 24, + 33, ], readonly: false, static: false, type: "TSPropertySignature", typeAnnotation: { range: [ - 29, - 34, + 28, + 33, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 31, - 34, + 30, + 33, ], type: "TSAnyKeyword", }, @@ -6029,8 +6029,8 @@ snapshot[`Plugin - TSInterface 4`] = ` }, ], range: [ - 13, - 36, + 12, + 35, ], type: "TSInterfaceBody", }, @@ -6040,15 +6040,15 @@ snapshot[`Plugin - TSInterface 4`] = ` name: "A", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 36, + 0, + 35, ], type: "TSInterface", typeParameters: [], @@ -6065,20 +6065,20 @@ snapshot[`Plugin - TSInterface 5`] = ` name: "key", optional: false, range: [ - 25, - 36, + 24, + 35, ], type: "Identifier", typeAnnotation: { range: [ - 28, - 36, + 27, + 35, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 30, - 36, + 29, + 35, ], type: "TSStringKeyword", }, @@ -6086,21 +6086,21 @@ snapshot[`Plugin - TSInterface 5`] = ` }, ], range: [ - 15, - 42, + 14, + 41, ], readonly: true, type: "TSIndexSignature", typeAnnotation: { range: [ - 37, - 42, + 36, + 41, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 39, - 42, + 38, + 41, ], type: "TSAnyKeyword", }, @@ -6108,8 +6108,8 @@ snapshot[`Plugin - TSInterface 5`] = ` }, ], range: [ - 13, - 44, + 12, + 43, ], type: "TSInterfaceBody", }, @@ -6119,15 +6119,15 @@ snapshot[`Plugin - TSInterface 5`] = ` name: "A", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 44, + 0, + 43, ], type: "TSInterface", typeParameters: [], @@ -6144,30 +6144,30 @@ snapshot[`Plugin - TSInterface 6`] = ` name: "a", optional: false, range: [ + 23, 24, - 25, ], type: "Identifier", typeAnnotation: null, }, optional: false, range: [ - 15, - 30, + 14, + 29, ], readonly: true, static: false, type: "TSPropertySignature", typeAnnotation: { range: [ - 25, - 30, + 24, + 29, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 27, - 30, + 26, + 29, ], type: "TSAnyKeyword", }, @@ -6175,8 +6175,8 @@ snapshot[`Plugin - TSInterface 6`] = ` }, ], range: [ - 13, - 32, + 12, + 31, ], type: "TSInterfaceBody", }, @@ -6186,15 +6186,15 @@ snapshot[`Plugin - TSInterface 6`] = ` name: "A", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 32, + 0, + 31, ], type: "TSInterface", typeParameters: [], @@ -6211,20 +6211,20 @@ snapshot[`Plugin - TSInterface 7`] = ` name: "a", optional: false, range: [ + 18, 19, - 20, ], type: "Identifier", typeAnnotation: { range: [ - 20, - 23, + 19, + 22, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ + 21, 22, - 23, ], type: "TSTypeReference", typeArguments: null, @@ -6232,8 +6232,8 @@ snapshot[`Plugin - TSInterface 7`] = ` name: "T", optional: false, range: [ + 21, 22, - 23, ], type: "Identifier", typeAnnotation: null, @@ -6243,19 +6243,19 @@ snapshot[`Plugin - TSInterface 7`] = ` }, ], range: [ - 15, - 27, + 14, + 26, ], returnType: { range: [ - 24, - 27, + 23, + 26, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ + 25, 26, - 27, ], type: "TSTypeReference", typeArguments: null, @@ -6263,8 +6263,8 @@ snapshot[`Plugin - TSInterface 7`] = ` name: "T", optional: false, range: [ + 25, 26, - 27, ], type: "Identifier", typeAnnotation: null, @@ -6283,31 +6283,31 @@ snapshot[`Plugin - TSInterface 7`] = ` name: "T", optional: false, range: [ + 15, 16, - 17, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ + 15, 16, - 17, ], type: "TSTypeParameter", }, ], range: [ - 15, - 18, + 14, + 17, ], type: "TSTypeParameterDeclaration", }, }, ], range: [ - 13, - 29, + 12, + 28, ], type: "TSInterfaceBody", }, @@ -6317,15 +6317,15 @@ snapshot[`Plugin - TSInterface 7`] = ` name: "A", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 29, + 0, + 28, ], type: "TSInterface", typeParameters: [], @@ -6342,20 +6342,20 @@ snapshot[`Plugin - TSInterface 8`] = ` name: "a", optional: false, range: [ + 22, 23, - 24, ], type: "Identifier", typeAnnotation: { range: [ - 24, - 27, + 23, + 26, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ + 25, 26, - 27, ], type: "TSTypeReference", typeArguments: null, @@ -6363,8 +6363,8 @@ snapshot[`Plugin - TSInterface 8`] = ` name: "T", optional: false, range: [ + 25, 26, - 27, ], type: "Identifier", typeAnnotation: null, @@ -6374,19 +6374,19 @@ snapshot[`Plugin - TSInterface 8`] = ` }, ], range: [ - 15, - 31, + 14, + 30, ], returnType: { range: [ - 28, - 31, + 27, + 30, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ + 29, 30, - 31, ], type: "TSTypeReference", typeArguments: null, @@ -6394,8 +6394,8 @@ snapshot[`Plugin - TSInterface 8`] = ` name: "T", optional: false, range: [ + 29, 30, - 31, ], type: "Identifier", typeAnnotation: null, @@ -6414,31 +6414,31 @@ snapshot[`Plugin - TSInterface 8`] = ` name: "T", optional: false, range: [ + 19, 20, - 21, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ + 19, 20, - 21, ], type: "TSTypeParameter", }, ], range: [ - 19, - 22, + 18, + 21, ], type: "TSTypeParameterDeclaration", }, }, ], range: [ - 13, - 33, + 12, + 32, ], type: "TSInterfaceBody", }, @@ -6448,15 +6448,15 @@ snapshot[`Plugin - TSInterface 8`] = ` name: "A", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 33, + 0, + 32, ], type: "TSInterface", typeParameters: [], @@ -6473,24 +6473,24 @@ snapshot[`Plugin - TSInterface 9`] = ` name: "a", optional: false, range: [ + 14, 15, - 16, ], type: "Identifier", typeAnnotation: null, }, optional: false, range: [ - 15, - 36, + 14, + 35, ], readonly: false, static: false, type: "TSPropertySignature", typeAnnotation: { range: [ - 16, - 36, + 15, + 35, ], type: "TSTypeAnnotation", typeAnnotation: { @@ -6499,20 +6499,20 @@ snapshot[`Plugin - TSInterface 9`] = ` name: "a", optional: false, range: [ + 25, 26, - 27, ], type: "Identifier", typeAnnotation: { range: [ - 27, - 30, + 26, + 29, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ + 28, 29, - 30, ], type: "TSTypeReference", typeArguments: null, @@ -6520,8 +6520,8 @@ snapshot[`Plugin - TSInterface 9`] = ` name: "T", optional: false, range: [ + 28, 29, - 30, ], type: "Identifier", typeAnnotation: null, @@ -6531,19 +6531,19 @@ snapshot[`Plugin - TSInterface 9`] = ` }, ], range: [ - 18, - 36, + 17, + 35, ], returnType: { range: [ - 32, - 36, + 31, + 35, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ + 34, 35, - 36, ], type: "TSTypeReference", typeArguments: null, @@ -6551,8 +6551,8 @@ snapshot[`Plugin - TSInterface 9`] = ` name: "T", optional: false, range: [ + 34, 35, - 36, ], type: "Identifier", typeAnnotation: null, @@ -6571,23 +6571,23 @@ snapshot[`Plugin - TSInterface 9`] = ` name: "T", optional: false, range: [ + 22, 23, - 24, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ + 22, 23, - 24, ], type: "TSTypeParameter", }, ], range: [ - 22, - 25, + 21, + 24, ], type: "TSTypeParameterDeclaration", }, @@ -6596,8 +6596,8 @@ snapshot[`Plugin - TSInterface 9`] = ` }, ], range: [ - 13, - 38, + 12, + 37, ], type: "TSInterfaceBody", }, @@ -6607,15 +6607,15 @@ snapshot[`Plugin - TSInterface 9`] = ` name: "A", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 38, + 0, + 37, ], type: "TSInterface", typeParameters: [], @@ -6632,8 +6632,8 @@ snapshot[`Plugin - TSInterface 10`] = ` name: "a", optional: false, range: [ + 18, 19, - 20, ], type: "Identifier", typeAnnotation: null, @@ -6641,20 +6641,20 @@ snapshot[`Plugin - TSInterface 10`] = ` kind: "getter", optional: false, range: [ - 15, - 30, + 14, + 29, ], readonly: false, returnType: { range: [ - 22, - 30, + 21, + 29, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 24, - 30, + 23, + 29, ], type: "TSStringKeyword", }, @@ -6664,8 +6664,8 @@ snapshot[`Plugin - TSInterface 10`] = ` }, ], range: [ - 13, - 32, + 12, + 31, ], type: "TSInterfaceBody", }, @@ -6675,15 +6675,15 @@ snapshot[`Plugin - TSInterface 10`] = ` name: "A", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 32, + 0, + 31, ], type: "TSInterface", typeParameters: [], @@ -6700,8 +6700,8 @@ snapshot[`Plugin - TSInterface 11`] = ` name: "a", optional: false, range: [ + 18, 19, - 20, ], type: "Identifier", typeAnnotation: null, @@ -6713,20 +6713,20 @@ snapshot[`Plugin - TSInterface 11`] = ` name: "v", optional: false, range: [ + 20, 21, - 22, ], type: "Identifier", typeAnnotation: { range: [ - 22, - 30, + 21, + 29, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 24, - 30, + 23, + 29, ], type: "TSStringKeyword", }, @@ -6734,8 +6734,8 @@ snapshot[`Plugin - TSInterface 11`] = ` }, ], range: [ - 15, - 31, + 14, + 30, ], readonly: false, static: false, @@ -6743,8 +6743,8 @@ snapshot[`Plugin - TSInterface 11`] = ` }, ], range: [ - 13, - 33, + 12, + 32, ], type: "TSInterfaceBody", }, @@ -6754,15 +6754,15 @@ snapshot[`Plugin - TSInterface 11`] = ` name: "A", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 33, + 0, + 32, ], type: "TSInterface", typeParameters: [], @@ -6779,8 +6779,8 @@ snapshot[`Plugin - TSInterface 12`] = ` name: "a", optional: false, range: [ + 14, 15, - 16, ], type: "Identifier", typeAnnotation: null, @@ -6792,20 +6792,20 @@ snapshot[`Plugin - TSInterface 12`] = ` name: "arg", optional: true, range: [ - 20, - 23, + 19, + 22, ], type: "Identifier", typeAnnotation: { range: [ - 24, - 29, + 23, + 28, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 26, - 29, + 25, + 28, ], type: "TSAnyKeyword", }, @@ -6816,34 +6816,34 @@ snapshot[`Plugin - TSInterface 12`] = ` name: "args", optional: false, range: [ - 34, - 38, + 33, + 37, ], type: "Identifier", typeAnnotation: null, }, range: [ - 31, - 45, + 30, + 44, ], type: "RestElement", typeAnnotation: { range: [ - 38, - 45, + 37, + 44, ], type: "TSTypeAnnotation", typeAnnotation: { elementType: { range: [ - 40, - 43, + 39, + 42, ], type: "TSAnyKeyword", }, range: [ - 40, - 45, + 39, + 44, ], type: "TSArrayType", }, @@ -6851,20 +6851,20 @@ snapshot[`Plugin - TSInterface 12`] = ` }, ], range: [ - 15, - 51, + 14, + 50, ], readonly: false, returnType: { range: [ - 46, - 51, + 45, + 50, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 48, - 51, + 47, + 50, ], type: "TSAnyKeyword", }, @@ -6882,31 +6882,31 @@ snapshot[`Plugin - TSInterface 12`] = ` name: "T", optional: false, range: [ + 16, 17, - 18, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ + 16, 17, - 18, ], type: "TSTypeParameter", }, ], range: [ - 16, - 19, + 15, + 18, ], type: "TSTypeParameterDeclaration", }, }, ], range: [ - 13, - 53, + 12, + 52, ], type: "TSInterfaceBody", }, @@ -6916,15 +6916,15 @@ snapshot[`Plugin - TSInterface 12`] = ` name: "A", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 53, + 0, + 52, ], type: "TSInterface", typeParameters: [], @@ -6936,20 +6936,20 @@ snapshot[`Plugin - TSSatisfiesExpression 2`] = ` expression: { properties: [], range: [ - 11, - 13, + 10, + 12, ], type: "ObjectExpression", }, range: [ - 11, - 25, + 10, + 24, ], type: "TSSatisfiesExpression", typeAnnotation: { range: [ + 23, 24, - 25, ], type: "TSTypeReference", typeArguments: null, @@ -6957,8 +6957,8 @@ snapshot[`Plugin - TSSatisfiesExpression 2`] = ` name: "A", optional: false, range: [ + 23, 24, - 25, ], type: "Identifier", typeAnnotation: null, @@ -6974,21 +6974,21 @@ snapshot[`Plugin - TSTypeAliasDeclaration 1`] = ` name: "A", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 13, + 0, + 12, ], type: "TSTypeAliasDeclaration", typeAnnotation: { range: [ - 10, - 13, + 9, + 12, ], type: "TSAnyKeyword", }, @@ -7003,21 +7003,21 @@ snapshot[`Plugin - TSTypeAliasDeclaration 2`] = ` name: "A", optional: false, range: [ + 5, 6, - 7, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 16, + 0, + 15, ], type: "TSTypeAliasDeclaration", typeAnnotation: { range: [ - 13, - 16, + 12, + 15, ], type: "TSAnyKeyword", }, @@ -7032,23 +7032,23 @@ snapshot[`Plugin - TSTypeAliasDeclaration 2`] = ` name: "T", optional: false, range: [ + 7, 8, - 9, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ + 7, 8, - 9, ], type: "TSTypeParameter", }, ], range: [ - 7, - 10, + 6, + 9, ], type: "TSTypeParameterDeclaration", }, @@ -7062,21 +7062,21 @@ snapshot[`Plugin - TSTypeAliasDeclaration 3`] = ` name: "A", optional: false, range: [ + 13, 14, - 15, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 24, + 0, + 23, ], type: "TSTypeAliasDeclaration", typeAnnotation: { range: [ - 21, - 24, + 20, + 23, ], type: "TSAnyKeyword", }, @@ -7091,23 +7091,23 @@ snapshot[`Plugin - TSTypeAliasDeclaration 3`] = ` name: "T", optional: false, range: [ + 15, 16, - 17, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ + 15, 16, - 17, ], type: "TSTypeParameter", }, ], range: [ - 15, - 18, + 14, + 17, ], type: "TSTypeParameterDeclaration", }, @@ -7120,15 +7120,15 @@ snapshot[`Plugin - TSNonNullExpression 2`] = ` name: "a", optional: false, range: [ + 0, 1, - 2, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 3, + 0, + 2, ], type: "TSNonNullExpression", } @@ -7137,15 +7137,15 @@ snapshot[`Plugin - TSNonNullExpression 2`] = ` snapshot[`Plugin - TSUnionType 1`] = ` { range: [ - 10, - 15, + 9, + 14, ], type: "TSUnionType", types: [ { range: [ + 9, 10, - 11, ], type: "TSTypeReference", typeArguments: null, @@ -7153,8 +7153,8 @@ snapshot[`Plugin - TSUnionType 1`] = ` name: "B", optional: false, range: [ + 9, 10, - 11, ], type: "Identifier", typeAnnotation: null, @@ -7162,8 +7162,8 @@ snapshot[`Plugin - TSUnionType 1`] = ` }, { range: [ + 13, 14, - 15, ], type: "TSTypeReference", typeArguments: null, @@ -7171,8 +7171,8 @@ snapshot[`Plugin - TSUnionType 1`] = ` name: "C", optional: false, range: [ + 13, 14, - 15, ], type: "Identifier", typeAnnotation: null, @@ -7185,15 +7185,15 @@ snapshot[`Plugin - TSUnionType 1`] = ` snapshot[`Plugin - TSIntersectionType 1`] = ` { range: [ - 10, - 15, + 9, + 14, ], type: "TSIntersectionType", types: [ { range: [ + 9, 10, - 11, ], type: "TSTypeReference", typeArguments: null, @@ -7201,8 +7201,8 @@ snapshot[`Plugin - TSIntersectionType 1`] = ` name: "B", optional: false, range: [ + 9, 10, - 11, ], type: "Identifier", typeAnnotation: null, @@ -7210,8 +7210,8 @@ snapshot[`Plugin - TSIntersectionType 1`] = ` }, { range: [ + 13, 14, - 15, ], type: "TSTypeReference", typeArguments: null, @@ -7219,8 +7219,8 @@ snapshot[`Plugin - TSIntersectionType 1`] = ` name: "C", optional: false, range: [ + 13, 14, - 15, ], type: "Identifier", typeAnnotation: null, @@ -7235,8 +7235,8 @@ snapshot[`Plugin - TSModuleDeclaration 1`] = ` body: { body: [], range: [ - 10, - 12, + 9, + 11, ], type: "TSModuleBlock", }, @@ -7246,15 +7246,15 @@ snapshot[`Plugin - TSModuleDeclaration 1`] = ` name: "A", optional: false, range: [ + 7, 8, - 9, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 12, + 0, + 11, ], type: "TSModuleDeclaration", } @@ -7274,27 +7274,27 @@ snapshot[`Plugin - TSModuleDeclaration 2`] = ` name: "A", optional: false, range: [ + 35, 36, - 37, ], type: "Identifier", typeAnnotation: null, }, params: [], range: [ - 27, - 45, + 26, + 44, ], returnType: { range: [ - 39, - 45, + 38, + 44, ], type: "TSTypeAnnotation", typeAnnotation: { range: [ - 41, - 45, + 40, + 44, ], type: "TSVoidKeyword", }, @@ -7303,15 +7303,15 @@ snapshot[`Plugin - TSModuleDeclaration 2`] = ` typeParameters: null, }, range: [ - 20, - 45, + 19, + 44, ], type: "ExportNamedDeclaration", }, ], range: [ - 18, - 47, + 17, + 46, ], type: "TSModuleBlock", }, @@ -7321,15 +7321,15 @@ snapshot[`Plugin - TSModuleDeclaration 2`] = ` name: "A", optional: false, range: [ + 15, 16, - 17, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 47, + 0, + 46, ], type: "TSModuleDeclaration", } @@ -7340,8 +7340,8 @@ snapshot[`Plugin - TSModuleDeclaration + TSModuleBlock 1`] = ` body: { body: [], range: [ - 10, - 12, + 9, + 11, ], type: "TSModuleBlock", }, @@ -7351,15 +7351,15 @@ snapshot[`Plugin - TSModuleDeclaration + TSModuleBlock 1`] = ` name: "A", optional: false, range: [ + 7, 8, - 9, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 12, + 0, + 11, ], type: "TSModuleDeclaration", } @@ -7373,8 +7373,8 @@ snapshot[`Plugin - TSModuleDeclaration + TSModuleBlock 2`] = ` body: { body: [], range: [ - 27, - 29, + 26, + 28, ], type: "TSModuleBlock", }, @@ -7384,22 +7384,22 @@ snapshot[`Plugin - TSModuleDeclaration + TSModuleBlock 2`] = ` name: "B", optional: false, range: [ + 24, 25, - 26, ], type: "Identifier", typeAnnotation: null, }, range: [ - 15, - 29, + 14, + 28, ], type: "TSModuleDeclaration", }, ], range: [ - 13, - 31, + 12, + 30, ], type: "TSModuleBlock", }, @@ -7409,15 +7409,15 @@ snapshot[`Plugin - TSModuleDeclaration + TSModuleBlock 2`] = ` name: "A", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 1, - 31, + 0, + 30, ], type: "TSModuleDeclaration", } @@ -7429,22 +7429,22 @@ snapshot[`Plugin - TSQualifiedName 1`] = ` name: "a", optional: false, range: [ + 9, 10, - 11, ], type: "Identifier", typeAnnotation: null, }, range: [ - 10, - 13, + 9, + 12, ], right: { name: "b", optional: false, range: [ + 11, 12, - 13, ], type: "Identifier", typeAnnotation: null, @@ -7462,39 +7462,39 @@ snapshot[`Plugin - TSTypeLiteral 1`] = ` name: "a", optional: false, range: [ + 11, 12, - 13, ], type: "Identifier", typeAnnotation: null, }, optional: false, range: [ - 12, - 16, + 11, + 15, ], readonly: false, static: false, type: "TSPropertySignature", typeAnnotation: { range: [ - 13, - 16, + 12, + 15, ], type: "TSTypeAnnotation", typeAnnotation: { literal: { range: [ + 14, 15, - 16, ], raw: "1", type: "Literal", value: 1, }, range: [ + 14, 15, - 16, ], type: "TSLiteralType", }, @@ -7502,8 +7502,8 @@ snapshot[`Plugin - TSTypeLiteral 1`] = ` }, ], range: [ - 10, - 18, + 9, + 17, ], type: "TSTypeLiteral", } @@ -7512,14 +7512,14 @@ snapshot[`Plugin - TSTypeLiteral 1`] = ` snapshot[`Plugin - TSOptionalType 1`] = ` { range: [ - 11, - 18, + 10, + 17, ], type: "TSOptionalType", typeAnnotation: { range: [ - 11, - 17, + 10, + 16, ], type: "TSNumberKeyword", }, @@ -7529,21 +7529,21 @@ snapshot[`Plugin - TSOptionalType 1`] = ` snapshot[`Plugin - TSRestType 1`] = ` { range: [ - 11, - 22, + 10, + 21, ], type: "TSRestType", typeAnnotation: { elementType: { range: [ - 14, - 20, + 13, + 19, ], type: "TSNumberKeyword", }, range: [ - 14, - 22, + 13, + 21, ], type: "TSArrayType", }, @@ -7554,8 +7554,8 @@ snapshot[`Plugin - TSConditionalType 1`] = ` { checkType: { range: [ + 9, 10, - 11, ], type: "TSTypeReference", typeArguments: null, @@ -7563,8 +7563,8 @@ snapshot[`Plugin - TSConditionalType 1`] = ` name: "B", optional: false, range: [ + 9, 10, - 11, ], type: "Identifier", typeAnnotation: null, @@ -7572,8 +7572,8 @@ snapshot[`Plugin - TSConditionalType 1`] = ` }, extendsType: { range: [ + 19, 20, - 21, ], type: "TSTypeReference", typeArguments: null, @@ -7581,8 +7581,8 @@ snapshot[`Plugin - TSConditionalType 1`] = ` name: "C", optional: false, range: [ + 19, 20, - 21, ], type: "Identifier", typeAnnotation: null, @@ -7590,19 +7590,19 @@ snapshot[`Plugin - TSConditionalType 1`] = ` }, falseType: { range: [ - 33, - 39, + 32, + 38, ], type: "TSStringKeyword", }, range: [ - 10, - 39, + 9, + 38, ], trueType: { range: [ - 24, - 30, + 23, + 29, ], type: "TSNumberKeyword", }, @@ -7613,8 +7613,8 @@ snapshot[`Plugin - TSConditionalType 1`] = ` snapshot[`Plugin - TSInferType 1`] = ` { range: [ - 29, - 39, + 28, + 38, ], type: "TSInferType", typeParameter: { @@ -7626,16 +7626,16 @@ snapshot[`Plugin - TSInferType 1`] = ` name: "Item", optional: false, range: [ - 35, - 39, + 34, + 38, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ - 35, - 39, + 34, + 38, ], type: "TSTypeParameter", }, @@ -7646,14 +7646,14 @@ snapshot[`Plugin - TSTypeOperator 1`] = ` { operator: "keyof", range: [ - 10, - 17, + 9, + 16, ], type: "TSTypeOperator", typeAnnotation: { range: [ + 15, 16, - 17, ], type: "TSTypeReference", typeArguments: null, @@ -7661,8 +7661,8 @@ snapshot[`Plugin - TSTypeOperator 1`] = ` name: "B", optional: false, range: [ + 15, 16, - 17, ], type: "Identifier", typeAnnotation: null, @@ -7675,14 +7675,14 @@ snapshot[`Plugin - TSTypeOperator 2`] = ` { operator: "unique", range: [ - 21, - 34, + 20, + 33, ], type: "TSTypeOperator", typeAnnotation: { range: [ - 28, - 34, + 27, + 33, ], type: "TSSymbolKeyword", }, @@ -7693,15 +7693,15 @@ snapshot[`Plugin - TSTypeOperator 3`] = ` { operator: "readonly", range: [ - 10, - 21, + 9, + 20, ], type: "TSTypeOperator", typeAnnotation: { elementTypes: [], range: [ - 19, - 21, + 18, + 20, ], type: "TSTupleType", }, @@ -7713,15 +7713,15 @@ snapshot[`Plugin - TSMappedType 1`] = ` nameType: null, optional: undefined, range: [ - 13, - 41, + 12, + 40, ], readonly: undefined, type: "TSMappedType", typeAnnotation: { range: [ - 31, - 38, + 30, + 37, ], type: "TSBooleanKeyword", }, @@ -7730,14 +7730,14 @@ snapshot[`Plugin - TSMappedType 1`] = ` constraint: { operator: "keyof", range: [ - 21, - 28, + 20, + 27, ], type: "TSTypeOperator", typeAnnotation: { range: [ + 26, 27, - 28, ], type: "TSTypeReference", typeArguments: null, @@ -7745,8 +7745,8 @@ snapshot[`Plugin - TSMappedType 1`] = ` name: "T", optional: false, range: [ + 26, 27, - 28, ], type: "Identifier", typeAnnotation: null, @@ -7759,16 +7759,16 @@ snapshot[`Plugin - TSMappedType 1`] = ` name: "P", optional: false, range: [ + 15, 16, - 17, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ - 16, - 28, + 15, + 27, ], type: "TSTypeParameter", }, @@ -7780,16 +7780,16 @@ snapshot[`Plugin - TSMappedType 2`] = ` nameType: null, optional: undefined, range: [ - 13, - 45, + 12, + 44, ], readonly: true, type: "TSMappedType", typeAnnotation: { elementTypes: [], range: [ - 40, - 42, + 39, + 41, ], type: "TSTupleType", }, @@ -7798,14 +7798,14 @@ snapshot[`Plugin - TSMappedType 2`] = ` constraint: { operator: "keyof", range: [ - 30, - 37, + 29, + 36, ], type: "TSTypeOperator", typeAnnotation: { range: [ + 35, 36, - 37, ], type: "TSTypeReference", typeArguments: null, @@ -7813,8 +7813,8 @@ snapshot[`Plugin - TSMappedType 2`] = ` name: "T", optional: false, range: [ + 35, 36, - 37, ], type: "Identifier", typeAnnotation: null, @@ -7827,16 +7827,16 @@ snapshot[`Plugin - TSMappedType 2`] = ` name: "P", optional: false, range: [ + 24, 25, - 26, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ - 25, - 37, + 24, + 36, ], type: "TSTypeParameter", }, @@ -7848,16 +7848,16 @@ snapshot[`Plugin - TSMappedType 3`] = ` nameType: null, optional: undefined, range: [ - 13, - 46, + 12, + 45, ], readonly: "-", type: "TSMappedType", typeAnnotation: { elementTypes: [], range: [ - 41, - 43, + 40, + 42, ], type: "TSTupleType", }, @@ -7866,14 +7866,14 @@ snapshot[`Plugin - TSMappedType 3`] = ` constraint: { operator: "keyof", range: [ - 31, - 38, + 30, + 37, ], type: "TSTypeOperator", typeAnnotation: { range: [ + 36, 37, - 38, ], type: "TSTypeReference", typeArguments: null, @@ -7881,8 +7881,8 @@ snapshot[`Plugin - TSMappedType 3`] = ` name: "T", optional: false, range: [ + 36, 37, - 38, ], type: "Identifier", typeAnnotation: null, @@ -7895,16 +7895,16 @@ snapshot[`Plugin - TSMappedType 3`] = ` name: "P", optional: false, range: [ + 25, 26, - 27, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ - 26, - 38, + 25, + 37, ], type: "TSTypeParameter", }, @@ -7916,16 +7916,16 @@ snapshot[`Plugin - TSMappedType 4`] = ` nameType: null, optional: undefined, range: [ - 13, - 46, + 12, + 45, ], readonly: "+", type: "TSMappedType", typeAnnotation: { elementTypes: [], range: [ - 41, - 43, + 40, + 42, ], type: "TSTupleType", }, @@ -7934,14 +7934,14 @@ snapshot[`Plugin - TSMappedType 4`] = ` constraint: { operator: "keyof", range: [ - 31, - 38, + 30, + 37, ], type: "TSTypeOperator", typeAnnotation: { range: [ + 36, 37, - 38, ], type: "TSTypeReference", typeArguments: null, @@ -7949,8 +7949,8 @@ snapshot[`Plugin - TSMappedType 4`] = ` name: "T", optional: false, range: [ + 36, 37, - 38, ], type: "Identifier", typeAnnotation: null, @@ -7963,16 +7963,16 @@ snapshot[`Plugin - TSMappedType 4`] = ` name: "P", optional: false, range: [ + 25, 26, - 27, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ - 26, - 38, + 25, + 37, ], type: "TSTypeParameter", }, @@ -7984,15 +7984,15 @@ snapshot[`Plugin - TSMappedType 5`] = ` nameType: null, optional: true, range: [ - 13, - 42, + 12, + 41, ], readonly: undefined, type: "TSMappedType", typeAnnotation: { range: [ - 32, - 39, + 31, + 38, ], type: "TSBooleanKeyword", }, @@ -8001,14 +8001,14 @@ snapshot[`Plugin - TSMappedType 5`] = ` constraint: { operator: "keyof", range: [ - 21, - 28, + 20, + 27, ], type: "TSTypeOperator", typeAnnotation: { range: [ + 26, 27, - 28, ], type: "TSTypeReference", typeArguments: null, @@ -8016,8 +8016,8 @@ snapshot[`Plugin - TSMappedType 5`] = ` name: "T", optional: false, range: [ + 26, 27, - 28, ], type: "Identifier", typeAnnotation: null, @@ -8030,16 +8030,16 @@ snapshot[`Plugin - TSMappedType 5`] = ` name: "P", optional: false, range: [ + 15, 16, - 17, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ - 16, - 28, + 15, + 27, ], type: "TSTypeParameter", }, @@ -8051,15 +8051,15 @@ snapshot[`Plugin - TSMappedType 6`] = ` nameType: null, optional: "-", range: [ - 13, - 43, + 12, + 42, ], readonly: undefined, type: "TSMappedType", typeAnnotation: { range: [ - 33, - 40, + 32, + 39, ], type: "TSBooleanKeyword", }, @@ -8068,14 +8068,14 @@ snapshot[`Plugin - TSMappedType 6`] = ` constraint: { operator: "keyof", range: [ - 21, - 28, + 20, + 27, ], type: "TSTypeOperator", typeAnnotation: { range: [ + 26, 27, - 28, ], type: "TSTypeReference", typeArguments: null, @@ -8083,8 +8083,8 @@ snapshot[`Plugin - TSMappedType 6`] = ` name: "T", optional: false, range: [ + 26, 27, - 28, ], type: "Identifier", typeAnnotation: null, @@ -8097,16 +8097,16 @@ snapshot[`Plugin - TSMappedType 6`] = ` name: "P", optional: false, range: [ + 15, 16, - 17, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ - 16, - 28, + 15, + 27, ], type: "TSTypeParameter", }, @@ -8118,15 +8118,15 @@ snapshot[`Plugin - TSMappedType 7`] = ` nameType: null, optional: "+", range: [ - 13, - 43, + 12, + 42, ], readonly: undefined, type: "TSMappedType", typeAnnotation: { range: [ - 33, - 40, + 32, + 39, ], type: "TSBooleanKeyword", }, @@ -8135,14 +8135,14 @@ snapshot[`Plugin - TSMappedType 7`] = ` constraint: { operator: "keyof", range: [ - 21, - 28, + 20, + 27, ], type: "TSTypeOperator", typeAnnotation: { range: [ + 26, 27, - 28, ], type: "TSTypeReference", typeArguments: null, @@ -8150,8 +8150,8 @@ snapshot[`Plugin - TSMappedType 7`] = ` name: "T", optional: false, range: [ + 26, 27, - 28, ], type: "Identifier", typeAnnotation: null, @@ -8164,16 +8164,16 @@ snapshot[`Plugin - TSMappedType 7`] = ` name: "P", optional: false, range: [ + 15, 16, - 17, ], type: "Identifier", typeAnnotation: null, }, out: false, range: [ - 16, - 28, + 15, + 27, ], type: "TSTypeParameter", }, @@ -8184,16 +8184,16 @@ snapshot[`Plugin - TSLiteralType 1`] = ` { literal: { range: [ - 10, - 14, + 9, + 13, ], raw: "true", type: "Literal", value: true, }, range: [ - 10, - 14, + 9, + 13, ], type: "TSLiteralType", } @@ -8203,16 +8203,16 @@ snapshot[`Plugin - TSLiteralType 2`] = ` { literal: { range: [ - 10, - 15, + 9, + 14, ], raw: "false", type: "Literal", value: false, }, range: [ - 10, - 15, + 9, + 14, ], type: "TSLiteralType", } @@ -8222,16 +8222,16 @@ snapshot[`Plugin - TSLiteralType 3`] = ` { literal: { range: [ + 9, 10, - 11, ], raw: "1", type: "Literal", value: 1, }, range: [ + 9, 10, - 11, ], type: "TSLiteralType", } @@ -8241,16 +8241,16 @@ snapshot[`Plugin - TSLiteralType 4`] = ` { literal: { range: [ - 10, - 15, + 9, + 14, ], raw: '"foo"', type: "Literal", value: "foo", }, range: [ - 10, - 15, + 9, + 14, ], type: "TSLiteralType", } @@ -8262,8 +8262,8 @@ snapshot[`Plugin - TSTemplateLiteralType 1`] = ` { cooked: "a ", range: [ - 11, - 13, + 10, + 12, ], raw: "a ", tail: false, @@ -8272,8 +8272,8 @@ snapshot[`Plugin - TSTemplateLiteralType 1`] = ` { cooked: "", range: [ - 22, - 22, + 21, + 21, ], raw: "", tail: true, @@ -8281,15 +8281,15 @@ snapshot[`Plugin - TSTemplateLiteralType 1`] = ` }, ], range: [ - 10, - 23, + 9, + 22, ], type: "TSTemplateLiteralType", types: [ { range: [ - 15, - 21, + 14, + 20, ], type: "TSStringKeyword", }, @@ -8302,15 +8302,15 @@ snapshot[`Plugin - TSTupleType + TSArrayType 1`] = ` elementTypes: [ { range: [ - 11, - 17, + 10, + 16, ], type: "TSNumberKeyword", }, ], range: [ - 10, - 18, + 9, + 17, ], type: "TSTupleType", } @@ -8322,8 +8322,8 @@ snapshot[`Plugin - TSTupleType + TSArrayType 2`] = ` { elementType: { range: [ - 14, - 20, + 13, + 19, ], type: "TSNumberKeyword", }, @@ -8331,22 +8331,22 @@ snapshot[`Plugin - TSTupleType + TSArrayType 2`] = ` name: "x", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 11, - 20, + 10, + 19, ], type: "TSNamedTupleMember", }, ], range: [ - 10, - 21, + 9, + 20, ], type: "TSTupleType", } @@ -8358,8 +8358,8 @@ snapshot[`Plugin - TSTupleType + TSArrayType 3`] = ` { elementType: { range: [ - 14, - 20, + 13, + 19, ], type: "TSNumberKeyword", }, @@ -8367,22 +8367,22 @@ snapshot[`Plugin - TSTupleType + TSArrayType 3`] = ` name: "x", optional: false, range: [ + 10, 11, - 12, ], type: "Identifier", typeAnnotation: null, }, range: [ - 11, - 20, + 10, + 19, ], type: "TSNamedTupleMember", }, ], range: [ - 10, - 21, + 9, + 20, ], type: "TSTupleType", } @@ -8395,14 +8395,14 @@ snapshot[`Plugin - TSTupleType + TSArrayType 4`] = ` elementType: { elementType: { range: [ - 17, - 23, + 16, + 22, ], type: "TSNumberKeyword", }, range: [ - 17, - 25, + 16, + 24, ], type: "TSArrayType", }, @@ -8411,29 +8411,29 @@ snapshot[`Plugin - TSTupleType + TSArrayType 4`] = ` name: "x", optional: false, range: [ + 13, 14, - 15, ], type: "Identifier", typeAnnotation: null, }, range: [ - 11, - 16, + 10, + 15, ], type: "RestElement", typeAnnotation: null, }, range: [ - 11, - 25, + 10, + 24, ], type: "TSNamedTupleMember", }, ], range: [ - 10, - 26, + 9, + 25, ], type: "TSTupleType", } @@ -8443,14 +8443,14 @@ snapshot[`Plugin - TSArrayType 1`] = ` { elementType: { range: [ - 10, - 16, + 9, + 15, ], type: "TSNumberKeyword", }, range: [ - 10, - 18, + 9, + 17, ], type: "TSArrayType", } @@ -8462,15 +8462,15 @@ snapshot[`Plugin - TSTypeQuery 1`] = ` name: "B", optional: false, range: [ + 16, 17, - 18, ], type: "Identifier", typeAnnotation: null, }, range: [ - 10, - 18, + 9, + 17, ], type: "TSTypeQuery", typeArguments: null, @@ -8480,8 +8480,8 @@ snapshot[`Plugin - TSTypeQuery 1`] = ` snapshot[`Plugin - TS keywords 1`] = ` { range: [ - 10, - 13, + 9, + 12, ], type: "TSAnyKeyword", } @@ -8490,8 +8490,8 @@ snapshot[`Plugin - TS keywords 1`] = ` snapshot[`Plugin - TS keywords 2`] = ` { range: [ - 10, - 16, + 9, + 15, ], type: "TSBigIntKeyword", } @@ -8500,8 +8500,8 @@ snapshot[`Plugin - TS keywords 2`] = ` snapshot[`Plugin - TS keywords 3`] = ` { range: [ - 10, - 17, + 9, + 16, ], type: "TSBooleanKeyword", } @@ -8510,8 +8510,8 @@ snapshot[`Plugin - TS keywords 3`] = ` snapshot[`Plugin - TS keywords 4`] = ` { range: [ - 10, - 19, + 9, + 18, ], type: "TSIntrinsicKeyword", } @@ -8520,8 +8520,8 @@ snapshot[`Plugin - TS keywords 4`] = ` snapshot[`Plugin - TS keywords 5`] = ` { range: [ - 10, - 15, + 9, + 14, ], type: "TSNeverKeyword", } @@ -8530,8 +8530,8 @@ snapshot[`Plugin - TS keywords 5`] = ` snapshot[`Plugin - TS keywords 6`] = ` { range: [ - 10, - 14, + 9, + 13, ], type: "TSNullKeyword", } @@ -8540,8 +8540,8 @@ snapshot[`Plugin - TS keywords 6`] = ` snapshot[`Plugin - TS keywords 7`] = ` { range: [ - 10, - 16, + 9, + 15, ], type: "TSNumberKeyword", } @@ -8550,8 +8550,8 @@ snapshot[`Plugin - TS keywords 7`] = ` snapshot[`Plugin - TS keywords 8`] = ` { range: [ - 10, - 16, + 9, + 15, ], type: "TSObjectKeyword", } @@ -8560,8 +8560,8 @@ snapshot[`Plugin - TS keywords 8`] = ` snapshot[`Plugin - TS keywords 9`] = ` { range: [ - 10, - 16, + 9, + 15, ], type: "TSStringKeyword", } @@ -8570,8 +8570,8 @@ snapshot[`Plugin - TS keywords 9`] = ` snapshot[`Plugin - TS keywords 10`] = ` { range: [ - 10, - 16, + 9, + 15, ], type: "TSSymbolKeyword", } @@ -8580,8 +8580,8 @@ snapshot[`Plugin - TS keywords 10`] = ` snapshot[`Plugin - TS keywords 11`] = ` { range: [ - 10, - 19, + 9, + 18, ], type: "TSUndefinedKeyword", } @@ -8590,8 +8590,8 @@ snapshot[`Plugin - TS keywords 11`] = ` snapshot[`Plugin - TS keywords 12`] = ` { range: [ - 10, - 17, + 9, + 16, ], type: "TSUnknownKeyword", } @@ -8600,8 +8600,8 @@ snapshot[`Plugin - TS keywords 12`] = ` snapshot[`Plugin - TS keywords 13`] = ` { range: [ - 10, - 14, + 9, + 13, ], type: "TSVoidKeyword", } diff --git a/tests/unit/lint_plugin_test.ts b/tests/unit/lint_plugin_test.ts index 6c71501c461191..4f660be29f7e49 100644 --- a/tests/unit/lint_plugin_test.ts +++ b/tests/unit/lint_plugin_test.ts @@ -3,45 +3,14 @@ import { assertEquals } from "./test_util.ts"; import { assertSnapshot } from "@std/testing/snapshot"; -// TODO(@marvinhagemeister) Remove once we land "official" types -export interface LintReportData { - // deno-lint-ignore no-explicit-any - node: any; - message: string; -} -// TODO(@marvinhagemeister) Remove once we land "official" types -interface LintContext { - id: string; -} // TODO(@marvinhagemeister) Remove once we land "official" types // deno-lint-ignore no-explicit-any type LintVisitor = Record void>; -// TODO(@marvinhagemeister) Remove once we land "official" types -interface LintRule { - create(ctx: LintContext): LintVisitor; - destroy?(): void; -} - -// TODO(@marvinhagemeister) Remove once we land "official" types -interface LintPlugin { - name: string; - rules: Record; -} - -function runLintPlugin(plugin: LintPlugin, fileName: string, source: string) { - // deno-lint-ignore no-explicit-any - return (Deno as any)[(Deno as any).internal].runLintPlugin( - plugin, - fileName, - source, - ); -} - function testPlugin( source: string, - rule: LintRule, -) { + rule: Deno.lint.Rule, +): Deno.lint.Diagnostic[] { const plugin = { name: "test-plugin", rules: { @@ -49,7 +18,11 @@ function testPlugin( }, }; - return runLintPlugin(plugin, "source.tsx", source); + return Deno.lint.runPlugin( + plugin, + "source.tsx", + source, + ); } interface VisitResult { diff --git a/tests/unit/ops_test.ts b/tests/unit/ops_test.ts index 60f67cdca67ae8..9998ad6d80685b 100644 --- a/tests/unit/ops_test.ts +++ b/tests/unit/ops_test.ts @@ -1,6 +1,6 @@ // Copyright 2018-2025 the Deno authors. MIT license. -const EXPECTED_OP_COUNT = 13; +const EXPECTED_OP_COUNT = 14; Deno.test(function checkExposedOps() { // @ts-ignore TS doesn't allow to index with symbol From 408d581fc13fa1279aeafc52d08d35a03fd5cae2 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Wed, 5 Feb 2025 14:03:45 -0500 Subject: [PATCH 07/17] chore: fix lockfile on main (#27978) Every time I run `cargo test` or whatever the lockfile changes to this. --- Cargo.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b701073a76db85..5e41e795c57e75 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4877,6 +4877,12 @@ dependencies = [ "spin", ] +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "lcms2" version = "6.1.0" @@ -4900,12 +4906,6 @@ dependencies = [ "pkg-config", ] -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - [[package]] name = "libc" version = "0.2.168" From bc8554878eef0a4181c36e07c271423bec72827f Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Wed, 5 Feb 2025 23:08:10 +0000 Subject: [PATCH 08/17] fix(check): support sloppy imports with "compilerOptions.rootDirs" (#27973) --- Cargo.lock | 10 +- Cargo.toml | 3 +- cli/factory.rs | 26 +- cli/graph_util.rs | 13 +- cli/lib/standalone/binary.rs | 2 +- cli/lsp/analysis.rs | 7 +- cli/lsp/config.rs | 103 +- cli/lsp/diagnostics.rs | 25 +- cli/lsp/resolver.rs | 9 +- cli/resolver.rs | 10 +- cli/rt/run.rs | 38 +- cli/standalone/binary.rs | 2 +- cli/tools/info.rs | 13 +- cli/tools/lint/mod.rs | 4 +- cli/tools/lint/rules/mod.rs | 13 +- cli/tools/lint/rules/no_sloppy_imports.rs | 74 +- cli/tools/registry/mod.rs | 1 - cli/tools/registry/unfurl.rs | 83 +- resolvers/deno/Cargo.toml | 5 + resolvers/deno/factory.rs | 68 +- resolvers/deno/lib.rs | 56 +- resolvers/deno/sloppy_imports.rs | 582 ---- resolvers/deno/workspace.rs | 2812 +++++++++++++++++ resolvers/node/resolution.rs | 9 - .../__test__.jsonc | 4 + .../deno.json | 6 + .../subdir/mod.ts | 3 + .../subdir_types/import.ts | 1 + 28 files changed, 3043 insertions(+), 939 deletions(-) delete mode 100644 resolvers/deno/sloppy_imports.rs create mode 100644 resolvers/deno/workspace.rs create mode 100644 tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/__test__.jsonc create mode 100644 tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/deno.json create mode 100644 tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/subdir/mod.ts create mode 100644 tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/subdir_types/import.ts diff --git a/Cargo.lock b/Cargo.lock index 5e41e795c57e75..cef572f6869505 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1579,9 +1579,9 @@ dependencies = [ [[package]] name = "deno_config" -version = "0.47.1" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f7883c48549bab8e446a58c64ee3d106a13052d2ff5e864de765a60260cb02b" +checksum = "6c486df63f7fa0f2142c7eba286c7be87a3cd8c93f66f744fb5853a77cf4347b" dependencies = [ "boxed_error", "capacity_builder 0.5.0", @@ -2382,10 +2382,14 @@ dependencies = [ "deno_semver", "deno_terminal 0.2.0", "futures", + "import_map", + "indexmap 2.3.0", "log", "node_resolver", "once_cell", "parking_lot", + "serde", + "serde_json", "sys_traits", "test_server", "thiserror 2.0.3", @@ -8033,6 +8037,8 @@ dependencies = [ "getrandom", "libc", "parking_lot", + "serde", + "serde_json", "windows-sys 0.59.0", ] diff --git a/Cargo.toml b/Cargo.toml index 5af9ac3a88980d..99a2e0812dfa9e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -54,7 +54,7 @@ deno_ast = { version = "=0.44.0", features = ["transpiling"] } deno_core = { version = "0.333.0" } deno_bench_util = { version = "0.183.0", path = "./bench_util" } -deno_config = { version = "=0.47.1", features = ["workspace"] } +deno_config = { version = "=0.48.0", features = ["workspace"] } deno_lockfile = "=0.24.0" deno_media_type = { version = "=0.2.5", features = ["module_specifier"] } deno_npm = "=0.27.2" @@ -155,6 +155,7 @@ hyper = { version = "1.6.0", features = ["full"] } hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] } hyper-util = { version = "0.1.10", features = ["tokio", "client", "client-legacy", "server", "server-auto"] } hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] } +import_map = { version = "0.21.0", features = ["ext"] } indexmap = { version = "2", features = ["serde"] } ipnet = "2.3" jsonc-parser = { version = "=0.26.2", features = ["serde"] } diff --git a/cli/factory.rs b/cli/factory.rs index bd9b58073f654a..1ae4954f69ae30 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -9,7 +9,6 @@ use std::sync::Arc; use deno_cache_dir::npm::NpmCacheDir; use deno_config::workspace::Workspace; use deno_config::workspace::WorkspaceDirectory; -use deno_config::workspace::WorkspaceResolver; use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures::FutureExt; @@ -38,6 +37,7 @@ use deno_resolver::factory::ResolverFactoryOptions; use deno_resolver::factory::SpecifiedImportMapProvider; use deno_resolver::npm::managed::NpmResolutionCell; use deno_resolver::npm::DenoInNpmPackageChecker; +use deno_resolver::workspace::WorkspaceResolver; use deno_runtime::deno_fs; use deno_runtime::deno_fs::RealFs; use deno_runtime::deno_permissions::Permissions; @@ -97,7 +97,6 @@ use crate::resolver::CliDenoResolver; use crate::resolver::CliNpmGraphResolver; use crate::resolver::CliNpmReqResolver; use crate::resolver::CliResolver; -use crate::resolver::CliSloppyImportsResolver; use crate::resolver::FoundPackageJsonDepFlag; use crate::standalone::binary::DenoCompileBinaryWriter; use crate::sys::CliSys; @@ -160,7 +159,8 @@ struct CliSpecifiedImportMapProvider { impl SpecifiedImportMapProvider for CliSpecifiedImportMapProvider { async fn get( &self, - ) -> Result, AnyError> { + ) -> Result, AnyError> + { async fn resolve_import_map_value_from_specifier( specifier: &Url, file_fetcher: &CliFileFetcher, @@ -189,7 +189,7 @@ impl SpecifiedImportMapProvider for CliSpecifiedImportMapProvider { .with_context(|| { format!("Unable to load '{}' import map", specifier) })?; - Ok(Some(deno_config::workspace::SpecifiedImportMap { + Ok(Some(deno_resolver::workspace::SpecifiedImportMap { base_url: specifier, value, })) @@ -199,7 +199,7 @@ impl SpecifiedImportMapProvider for CliSpecifiedImportMapProvider { self.workspace_external_import_map_loader.get_or_load()? { let path_url = deno_path_util::url_from_file_path(&import_map.path)?; - Ok(Some(deno_config::workspace::SpecifiedImportMap { + Ok(Some(deno_resolver::workspace::SpecifiedImportMap { base_url: path_url, value: import_map.value.clone(), })) @@ -646,7 +646,6 @@ impl CliFactory { ResolverFactoryOptions { conditions_from_resolution_mode: Default::default(), node_resolution_cache: Some(Arc::new(NodeResolutionThreadLocalCache)), - no_sloppy_imports_cache: false, npm_system_info: self.flags.subcommand.npm_system_info(), specified_import_map: Some(Box::new(CliSpecifiedImportMapProvider { cli_options: self.cli_options()?.clone(), @@ -663,7 +662,7 @@ impl CliFactory { DenoSubcommand::Publish(_) => { // the node_modules directory is not published to jsr, so resolve // dependencies via the package.json rather than using node resolution - Some(deno_config::workspace::PackageJsonDepResolution::Enabled) + Some(deno_resolver::workspace::PackageJsonDepResolution::Enabled) } _ => None, }, @@ -672,12 +671,6 @@ impl CliFactory { }) } - pub fn sloppy_imports_resolver( - &self, - ) -> Result>, AnyError> { - self.resolver_factory()?.sloppy_imports_resolver() - } - pub fn workspace(&self) -> Result<&Arc, AnyError> { Ok(&self.workspace_directory()?.workspace) } @@ -790,10 +783,9 @@ impl CliFactory { } pub async fn lint_rule_provider(&self) -> Result { - Ok(LintRuleProvider::new( - self.sloppy_imports_resolver()?.cloned(), - Some(self.workspace_resolver().await?.clone()), - )) + Ok(LintRuleProvider::new(Some( + self.workspace_resolver().await?.clone(), + ))) } pub async fn node_resolver(&self) -> Result<&Arc, AnyError> { diff --git a/cli/graph_util.rs b/cli/graph_util.rs index 6a640b7cb01f19..861fa664bff05f 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -34,8 +34,7 @@ use deno_graph::SpecifierError; use deno_graph::WorkspaceFastCheckOption; use deno_path_util::url_to_file_path; use deno_resolver::npm::DenoInNpmPackageChecker; -use deno_resolver::sloppy_imports::SloppyImportsCachedFs; -use deno_resolver::sloppy_imports::SloppyImportsResolutionKind; +use deno_resolver::workspace::sloppy_imports_resolve; use deno_runtime::deno_node; use deno_runtime::deno_permissions::PermissionsContainer; use deno_semver::jsr::JsrDepPackageReq; @@ -62,7 +61,6 @@ use crate::npm::CliNpmResolver; use crate::resolver::CliCjsTracker; use crate::resolver::CliNpmGraphResolver; use crate::resolver::CliResolver; -use crate::resolver::CliSloppyImportsResolver; use crate::sys::CliSys; use crate::tools::check; use crate::tools::check::CheckError; @@ -949,11 +947,14 @@ pub fn maybe_additional_sloppy_imports_message( sys: &CliSys, specifier: &ModuleSpecifier, ) -> Option { + let (resolved, sloppy_reason) = sloppy_imports_resolve( + specifier, + deno_resolver::workspace::ResolutionKind::Execution, + sys.clone(), + )?; Some(format!( "{} {}", - CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(sys.clone())) - .resolve(specifier, SloppyImportsResolutionKind::Execution)? - .as_suggestion_message(), + sloppy_reason.suggestion_message_for_specifier(&resolved), RUN_WITH_SLOPPY_IMPORTS_MSG )) } diff --git a/cli/lib/standalone/binary.rs b/cli/lib/standalone/binary.rs index ae02197bf47a4a..516d120a2001a2 100644 --- a/cli/lib/standalone/binary.rs +++ b/cli/lib/standalone/binary.rs @@ -3,8 +3,8 @@ use std::borrow::Cow; use std::collections::BTreeMap; -use deno_config::workspace::PackageJsonDepResolution; use deno_media_type::MediaType; +use deno_resolver::workspace::PackageJsonDepResolution; use deno_runtime::deno_permissions::PermissionsOptions; use deno_runtime::deno_telemetry::OtelConfig; use deno_semver::Version; diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index 16a815e08ff7bb..6d62a68febe9c1 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -9,7 +9,6 @@ use std::path::Path; use deno_ast::SourceRange; use deno_ast::SourceRangedForSpanned; use deno_ast::SourceTextInfo; -use deno_config::workspace::MappedResolution; use deno_core::error::AnyError; use deno_core::serde::Deserialize; use deno_core::serde::Serialize; @@ -20,6 +19,7 @@ use deno_error::JsErrorBox; use deno_lint::diagnostic::LintDiagnosticRange; use deno_path_util::url_to_file_path; use deno_resolver::npm::managed::NpmResolutionCell; +use deno_resolver::workspace::MappedResolution; use deno_runtime::deno_node::PathClean; use deno_semver::jsr::JsrPackageNvReference; use deno_semver::jsr::JsrPackageReqReference; @@ -1348,11 +1348,10 @@ impl CodeActionCollection { let npm_ref = if let Ok(resolution) = workspace_resolver.resolve( &dep_key, document.specifier(), - deno_config::workspace::ResolutionKind::Execution, + deno_resolver::workspace::ResolutionKind::Execution, ) { let specifier = match resolution { - MappedResolution::Normal { specifier, .. } - | MappedResolution::ImportMap { specifier, .. } => specifier, + MappedResolution::Normal { specifier, .. } => specifier, _ => { return None; } diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index b76c54c7ed6b5f..2e02d839e4ad19 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -21,16 +21,12 @@ use deno_config::deno_json::TsConfig; use deno_config::deno_json::TsConfigWithIgnoredOptions; use deno_config::glob::FilePatterns; use deno_config::glob::PathOrPatternSet; -use deno_config::workspace::CreateResolverOptions; -use deno_config::workspace::PackageJsonDepResolution; -use deno_config::workspace::SpecifiedImportMap; use deno_config::workspace::VendorEnablement; use deno_config::workspace::Workspace; use deno_config::workspace::WorkspaceCache; use deno_config::workspace::WorkspaceDirectory; use deno_config::workspace::WorkspaceDirectoryEmptyOptions; use deno_config::workspace::WorkspaceDiscoverOptions; -use deno_config::workspace::WorkspaceResolver; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; @@ -49,7 +45,12 @@ use deno_npm::npm_rc::ResolvedNpmRc; use deno_package_json::PackageJsonCache; use deno_path_util::url_to_file_path; use deno_resolver::npmrc::discover_npmrc_from_workspace; -use deno_resolver::sloppy_imports::SloppyImportsCachedFs; +use deno_resolver::workspace::CreateResolverOptions; +use deno_resolver::workspace::FsCacheOptions; +use deno_resolver::workspace::PackageJsonDepResolution; +use deno_resolver::workspace::SloppyImportsOptions; +use deno_resolver::workspace::SpecifiedImportMap; +use deno_resolver::workspace::WorkspaceResolver; use deno_runtime::deno_node::PackageJson; use indexmap::IndexSet; use lsp_types::ClientCapabilities; @@ -65,7 +66,6 @@ use crate::args::LintFlags; use crate::args::LintOptions; use crate::file_fetcher::CliFileFetcher; use crate::lsp::logging::lsp_warn; -use crate::resolver::CliSloppyImportsResolver; use crate::sys::CliSys; use crate::tools::lint::CliLinter; use crate::tools::lint::CliLinterOptions; @@ -1206,7 +1206,6 @@ pub struct ConfigData { pub lockfile: Option>, pub npmrc: Option>, pub resolver: Arc>, - pub sloppy_imports_resolver: Option>, pub import_map_from_settings: Option, pub unstable: BTreeSet, watched_files: HashMap, @@ -1569,35 +1568,52 @@ impl ConfigData { None } }; - let resolver = member_dir + let unstable = member_dir .workspace - .create_resolver( - CliSys::default(), - CreateResolverOptions { - pkg_json_dep_resolution, - specified_import_map, + .unstable_features() + .iter() + .chain(settings.unstable.as_deref()) + .cloned() + .collect::>(); + let unstable_sloppy_imports = std::env::var("DENO_UNSTABLE_SLOPPY_IMPORTS") + .is_ok() + || unstable.contains("sloppy-imports"); + let resolver = WorkspaceResolver::from_workspace( + &member_dir.workspace, + CliSys::default(), + CreateResolverOptions { + pkg_json_dep_resolution, + specified_import_map, + sloppy_imports_options: if unstable_sloppy_imports { + SloppyImportsOptions::Enabled + } else { + SloppyImportsOptions::Disabled }, + fs_cache_options: FsCacheOptions::Disabled, + }, + ) + .inspect_err(|err| { + lsp_warn!( + " Failed to load resolver: {}", + err // will contain the specifier + ); + }) + .ok() + .unwrap_or_else(|| { + // create a dummy resolver + WorkspaceResolver::new_raw( + scope.clone(), + None, + member_dir.workspace.resolver_jsr_pkgs().collect(), + member_dir.workspace.package_jsons().cloned().collect(), + pkg_json_dep_resolution, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + CliSys::default(), ) - .inspect_err(|err| { - lsp_warn!( - " Failed to load resolver: {}", - err // will contain the specifier - ); - }) - .ok() - .unwrap_or_else(|| { - // create a dummy resolver - WorkspaceResolver::new_raw( - scope.clone(), - None, - member_dir.workspace.resolver_jsr_pkgs().collect(), - member_dir.workspace.package_jsons().cloned().collect(), - pkg_json_dep_resolution, - Default::default(), - Default::default(), - CliSys::default(), - ) - }); + }); if !resolver.diagnostics().is_empty() { lsp_warn!( " Resolver diagnostics:\n{}", @@ -1609,26 +1625,8 @@ impl ConfigData { .join("\n") ); } - let unstable = member_dir - .workspace - .unstable_features() - .iter() - .chain(settings.unstable.as_deref()) - .cloned() - .collect::>(); - let unstable_sloppy_imports = std::env::var("DENO_UNSTABLE_SLOPPY_IMPORTS") - .is_ok() - || unstable.contains("sloppy-imports"); - let sloppy_imports_resolver = unstable_sloppy_imports.then(|| { - Arc::new(CliSloppyImportsResolver::new( - SloppyImportsCachedFs::new_without_stat_cache(CliSys::default()), - )) - }); let resolver = Arc::new(resolver); - let lint_rule_provider = LintRuleProvider::new( - sloppy_imports_resolver.clone(), - Some(resolver.clone()), - ); + let lint_rule_provider = LintRuleProvider::new(Some(resolver.clone())); let lint_options = LintOptions::resolve( member_dir.dir_path(), @@ -1676,7 +1674,6 @@ impl ConfigData { canonicalized_scope, member_dir, resolver, - sloppy_imports_resolver, fmt_config, lint_config, test_config, diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 7283f2cf8f684a..3adbefd88843ab 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -27,9 +27,7 @@ use deno_graph::Resolution; use deno_graph::ResolutionError; use deno_graph::SpecifierError; use deno_lint::linter::LintConfig as DenoLintConfig; -use deno_resolver::sloppy_imports::SloppyImportsCachedFs; -use deno_resolver::sloppy_imports::SloppyImportsResolution; -use deno_resolver::sloppy_imports::SloppyImportsResolutionKind; +use deno_resolver::workspace::sloppy_imports_resolve; use deno_runtime::deno_node; use deno_runtime::tokio_util::create_basic_runtime; use deno_semver::jsr::JsrPackageReqReference; @@ -64,7 +62,6 @@ use crate::graph_util; use crate::graph_util::enhanced_resolution_error_message; use crate::lsp::logging::lsp_warn; use crate::lsp::lsp_custom::DiagnosticBatchNotificationParams; -use crate::resolver::CliSloppyImportsResolver; use crate::sys::CliSys; use crate::tools::lint::CliLinter; use crate::tools::lint::CliLinterOptions; @@ -1013,7 +1010,7 @@ fn generate_lint_diagnostics( Arc::new(LintConfig::new_with_base(PathBuf::from("/"))), Arc::new(CliLinter::new(CliLinterOptions { configured_rules: { - let lint_rule_provider = LintRuleProvider::new(None, None); + let lint_rule_provider = LintRuleProvider::new(None); lint_rule_provider.resolve_lint_rules(Default::default(), None) }, fix: false, @@ -1443,14 +1440,14 @@ impl DenoDiagnostic { pub fn to_lsp_diagnostic(&self, range: &lsp::Range) -> lsp::Diagnostic { fn no_local_message( specifier: &ModuleSpecifier, - maybe_sloppy_resolution: Option<&SloppyImportsResolution>, + suggestion_message: Option, ) -> String { let mut message = format!( "Unable to load a local module: {}\n", to_percent_decoded_str(specifier.as_ref()) ); - if let Some(res) = maybe_sloppy_resolution { - message.push_str(&res.as_suggestion_message()); + if let Some(suggestion_message) = suggestion_message { + message.push_str(&suggestion_message); message.push('.'); } else { message.push_str("Please check the file path."); @@ -1467,17 +1464,15 @@ impl DenoDiagnostic { Self::NotInstalledJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("JSR package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))), Self::NotInstalledNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("npm package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))), Self::NoLocal(specifier) => { - let maybe_sloppy_resolution = CliSloppyImportsResolver::new( - SloppyImportsCachedFs::new(CliSys::default()) - ).resolve(specifier, SloppyImportsResolutionKind::Execution); - let data = maybe_sloppy_resolution.as_ref().map(|res| { + let sloppy_resolution = sloppy_imports_resolve(specifier, deno_resolver::workspace::ResolutionKind::Execution, CliSys::default()); + let data = sloppy_resolution.as_ref().map(|(resolved, sloppy_reason)| { json!({ "specifier": specifier, - "to": res.as_specifier(), - "message": res.as_quick_fix_message(), + "to": resolved, + "message": sloppy_reason.quick_fix_message_for_specifier(resolved), }) }); - (lsp::DiagnosticSeverity::ERROR, no_local_message(specifier, maybe_sloppy_resolution.as_ref()), data) + (lsp::DiagnosticSeverity::ERROR, no_local_message(specifier, sloppy_resolution.as_ref().map(|(resolved, sloppy_reason)| sloppy_reason.suggestion_message_for_specifier(resolved))), data) }, Self::Redirect { from, to} => (lsp::DiagnosticSeverity::INFORMATION, format!("The import of \"{from}\" was redirected to \"{to}\"."), Some(json!({ "specifier": from, "redirect": to }))), Self::ResolutionError(err) => { diff --git a/cli/lsp/resolver.rs b/cli/lsp/resolver.rs index 81673b693b8a20..074372aab32994 100644 --- a/cli/lsp/resolver.rs +++ b/cli/lsp/resolver.rs @@ -12,8 +12,6 @@ use deno_ast::MediaType; use deno_cache_dir::npm::NpmCacheDir; use deno_cache_dir::HttpCache; use deno_config::deno_json::JsxImportSourceConfig; -use deno_config::workspace::PackageJsonDepResolution; -use deno_config::workspace::WorkspaceResolver; use deno_core::parking_lot::Mutex; use deno_core::url::Url; use deno_graph::GraphImport; @@ -29,6 +27,8 @@ use deno_resolver::npm::CreateInNpmPkgCheckerOptions; use deno_resolver::npm::DenoInNpmPackageChecker; use deno_resolver::npm::NpmReqResolverOptions; use deno_resolver::npmrc::create_default_npmrc; +use deno_resolver::workspace::PackageJsonDepResolution; +use deno_resolver::workspace::WorkspaceResolver; use deno_resolver::DenoResolverOptions; use deno_resolver::NodeAndNpmReqResolver; use deno_semver::jsr::JsrPackageReqReference; @@ -844,9 +844,6 @@ impl<'a> ResolverFactory<'a> { } _ => None, }, - sloppy_imports_resolver: self - .config_data - .and_then(|d| d.sloppy_imports_resolver.clone()), workspace_resolver: self .config_data .map(|d| d.resolver.clone()) @@ -860,6 +857,8 @@ impl<'a> ResolverFactory<'a> { PackageJsonDepResolution::Disabled, Default::default(), Default::default(), + Default::default(), + Default::default(), self.sys.clone(), )) }), diff --git a/cli/resolver.rs b/cli/resolver.rs index 5dcdadb7ff579d..b9ca0eb3069dd8 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -4,8 +4,6 @@ use std::sync::Arc; use async_trait::async_trait; use dashmap::DashSet; -use deno_config::workspace::MappedResolutionDiagnostic; -use deno_config::workspace::MappedResolutionError; use deno_core::ModuleSpecifier; use deno_error::JsErrorBox; use deno_graph::source::ResolveError; @@ -14,8 +12,8 @@ use deno_graph::NpmLoadError; use deno_graph::NpmResolvePkgReqsResult; use deno_npm::resolution::NpmResolutionError; use deno_resolver::npm::DenoInNpmPackageChecker; -use deno_resolver::sloppy_imports::SloppyImportsCachedFs; -use deno_resolver::sloppy_imports::SloppyImportsResolver; +use deno_resolver::workspace::MappedResolutionDiagnostic; +use deno_resolver::workspace::MappedResolutionError; use deno_runtime::colors; use deno_runtime::deno_node::is_builtin_node_module; use deno_semver::package::PackageReq; @@ -35,14 +33,10 @@ pub type CliCjsTracker = deno_resolver::cjs::CjsTracker; pub type CliIsCjsResolver = deno_resolver::cjs::IsCjsResolver; -pub type CliSloppyImportsCachedFs = SloppyImportsCachedFs; -pub type CliSloppyImportsResolver = - SloppyImportsResolver; pub type CliDenoResolver = deno_resolver::DenoResolver< DenoInNpmPackageChecker, DenoIsBuiltInNodeModuleChecker, CliNpmResolver, - CliSloppyImportsCachedFs, CliSys, >; pub type CliNpmReqResolver = deno_resolver::npm::NpmReqResolver< diff --git a/cli/rt/run.rs b/cli/rt/run.rs index 3087682ef9fc06..1eca838cba643b 100644 --- a/cli/rt/run.rs +++ b/cli/rt/run.rs @@ -7,9 +7,7 @@ use std::sync::Arc; use std::sync::OnceLock; use deno_cache_dir::npm::NpmCacheDir; -use deno_config::workspace::MappedResolution; use deno_config::workspace::ResolverWorkspaceJsrPackage; -use deno_config::workspace::WorkspaceResolver; use deno_core::error::AnyError; use deno_core::error::ModuleLoaderError; use deno_core::futures::future::LocalBoxFuture; @@ -59,9 +57,9 @@ use deno_resolver::npm::NpmReqResolver; use deno_resolver::npm::NpmReqResolverOptions; use deno_resolver::npm::NpmResolver; use deno_resolver::npm::NpmResolverCreateOptions; -use deno_resolver::sloppy_imports::SloppyImportsCachedFs; -use deno_resolver::sloppy_imports::SloppyImportsResolutionKind; -use deno_resolver::sloppy_imports::SloppyImportsResolver; +use deno_resolver::workspace::MappedResolution; +use deno_resolver::workspace::SloppyImportsOptions; +use deno_resolver::workspace::WorkspaceResolver; use deno_runtime::code_cache::CodeCache; use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::create_host_defined_options; @@ -107,8 +105,6 @@ struct SharedModuleLoaderState { npm_module_loader: Arc, npm_registry_permission_checker: NpmRegistryReadPermissionChecker, npm_req_resolver: Arc, - sloppy_imports_resolver: - Option>>, vfs: Arc, workspace_resolver: WorkspaceResolver, } @@ -210,7 +206,7 @@ impl ModuleLoader for EmbeddedModuleLoader { let mapped_resolution = self.shared.workspace_resolver.resolve( raw_specifier, &referrer, - deno_config::workspace::ResolutionKind::Execution, + deno_resolver::workspace::ResolutionKind::Execution, ); match mapped_resolution { @@ -289,8 +285,7 @@ impl ModuleLoader for EmbeddedModuleLoader { ) } }, - Ok(MappedResolution::Normal { specifier, .. }) - | Ok(MappedResolution::ImportMap { specifier, .. }) => { + Ok(MappedResolution::Normal { specifier, .. }) => { if let Ok(reference) = NpmPackageReqReference::from_specifier(&specifier) { @@ -322,18 +317,6 @@ impl ModuleLoader for EmbeddedModuleLoader { } } - // do sloppy imports resolution if enabled - let specifier = if let Some(sloppy_imports_resolver) = - &self.shared.sloppy_imports_resolver - { - sloppy_imports_resolver - .resolve(&specifier, SloppyImportsResolutionKind::Execution) - .map(|s| s.into_specifier()) - .unwrap_or(specifier) - } else { - specifier - }; - Ok( self .shared @@ -832,10 +815,6 @@ pub async fn run( pkg_json_resolver.clone(), sys.clone(), )); - let sloppy_imports_resolver = - metadata.unstable_config.sloppy_imports.then(|| { - SloppyImportsResolver::new(SloppyImportsCachedFs::new(sys.clone())) - }); let workspace_resolver = { let import_map = match metadata.workspace_resolver.import_map { Some(import_map) => Some( @@ -883,6 +862,12 @@ pub async fn run( .collect(), pkg_jsons, metadata.workspace_resolver.pkg_json_resolution, + if metadata.unstable_config.sloppy_imports { + SloppyImportsOptions::Enabled + } else { + SloppyImportsOptions::Disabled + }, + Default::default(), Default::default(), Default::default(), sys.clone(), @@ -915,7 +900,6 @@ pub async fn run( )), npm_registry_permission_checker, npm_req_resolver, - sloppy_imports_resolver, vfs: vfs.clone(), workspace_resolver, }), diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs index 4f0ce5a3c00f63..ccd0b641852108 100644 --- a/cli/standalone/binary.rs +++ b/cli/standalone/binary.rs @@ -16,7 +16,6 @@ use capacity_builder::BytesAppendable; use deno_ast::MediaType; use deno_ast::ModuleKind; use deno_ast::ModuleSpecifier; -use deno_config::workspace::WorkspaceResolver; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; @@ -49,6 +48,7 @@ use deno_npm::resolution::SerializedNpmResolutionSnapshot; use deno_npm::NpmSystemInfo; use deno_path_util::url_from_directory_path; use deno_path_util::url_to_file_path; +use deno_resolver::workspace::WorkspaceResolver; use indexmap::IndexMap; use node_resolver::analyze::CjsAnalysis; use node_resolver::analyze::CjsCodeAnalyzer; diff --git a/cli/tools/info.rs b/cli/tools/info.rs index e6bf1baa36b5d2..f7368f90af224c 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -59,21 +59,18 @@ pub async fn info( let maybe_import_specifier = if let Ok(resolved) = resolver.resolve( &specifier, &cwd_url, - deno_config::workspace::ResolutionKind::Execution, + deno_resolver::workspace::ResolutionKind::Execution, ) { match resolved { - deno_config::workspace::MappedResolution::Normal { - specifier, .. - } - | deno_config::workspace::MappedResolution::ImportMap { + deno_resolver::workspace::MappedResolution::Normal { specifier, .. } - | deno_config::workspace::MappedResolution::WorkspaceJsrPackage { + | deno_resolver::workspace::MappedResolution::WorkspaceJsrPackage { specifier, .. } => Some(specifier), - deno_config::workspace::MappedResolution::WorkspaceNpmPackage { + deno_resolver::workspace::MappedResolution::WorkspaceNpmPackage { target_pkg_json, sub_path, .. @@ -88,7 +85,7 @@ pub async fn info( )? .into_url()?, ), - deno_config::workspace::MappedResolution::PackageJson { + deno_resolver::workspace::MappedResolution::PackageJson { alias, sub_path, dep_result, diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs index 74c46d4c189112..04734151a67839 100644 --- a/cli/tools/lint/mod.rs +++ b/cli/tools/lint/mod.rs @@ -499,7 +499,7 @@ fn collect_lint_files( #[allow(clippy::print_stdout)] pub fn print_rules_list(json: bool, maybe_rules_tags: Option>) { - let rule_provider = LintRuleProvider::new(None, None); + let rule_provider = LintRuleProvider::new(None); let mut all_rules = rule_provider.all_rules(); let configured_rules = rule_provider.resolve_lint_rules( LintRulesConfig { @@ -686,7 +686,7 @@ mod tests { } fn get_all_rules() -> Vec { - let rule_provider = LintRuleProvider::new(None, None); + let rule_provider = LintRuleProvider::new(None); let configured_rules = rule_provider.resolve_lint_rules(Default::default(), None); let mut all_rules = configured_rules diff --git a/cli/tools/lint/rules/mod.rs b/cli/tools/lint/rules/mod.rs index 9f2cee24fa9b21..99ce3331d3ec78 100644 --- a/cli/tools/lint/rules/mod.rs +++ b/cli/tools/lint/rules/mod.rs @@ -7,15 +7,14 @@ use std::sync::Arc; use deno_ast::ModuleSpecifier; use deno_config::deno_json::ConfigFile; use deno_config::deno_json::LintRulesConfig; -use deno_config::workspace::WorkspaceResolver; use deno_core::anyhow::bail; use deno_core::error::AnyError; use deno_graph::ModuleGraph; use deno_lint::diagnostic::LintDiagnostic; use deno_lint::rules::LintRule; use deno_lint::tags; +use deno_resolver::workspace::WorkspaceResolver; -use crate::resolver::CliSloppyImportsResolver; use crate::sys::CliSys; mod no_sloppy_imports; @@ -141,19 +140,14 @@ impl ConfiguredRules { } pub struct LintRuleProvider { - sloppy_imports_resolver: Option>, workspace_resolver: Option>>, } impl LintRuleProvider { pub fn new( - sloppy_imports_resolver: Option>, workspace_resolver: Option>>, ) -> Self { - Self { - sloppy_imports_resolver, - workspace_resolver, - } + Self { workspace_resolver } } pub fn resolve_lint_rules_err_empty( @@ -172,7 +166,6 @@ impl LintRuleProvider { let deno_lint_rules = deno_lint::rules::get_all_rules(); let cli_lint_rules = vec![CliLintRule(CliLintRuleKind::Extended( Box::new(no_sloppy_imports::NoSloppyImportsRule::new( - self.sloppy_imports_resolver.clone(), self.workspace_resolver.clone(), )), ))]; @@ -274,7 +267,7 @@ mod test { include: None, tags: None, }; - let rules_provider = LintRuleProvider::new(None, None); + let rules_provider = LintRuleProvider::new(None); let rules = rules_provider.resolve_lint_rules(rules_config, None); let mut rule_names = rules .rules diff --git a/cli/tools/lint/rules/no_sloppy_imports.rs b/cli/tools/lint/rules/no_sloppy_imports.rs index 34eeef521d95ae..48b0e5015f8807 100644 --- a/cli/tools/lint/rules/no_sloppy_imports.rs +++ b/cli/tools/lint/rules/no_sloppy_imports.rs @@ -6,7 +6,6 @@ use std::collections::HashMap; use std::sync::Arc; use deno_ast::SourceRange; -use deno_config::workspace::WorkspaceResolver; use deno_error::JsErrorBox; use deno_graph::source::ResolutionKind; use deno_graph::source::ResolveError; @@ -17,31 +16,25 @@ use deno_lint::diagnostic::LintFix; use deno_lint::diagnostic::LintFixChange; use deno_lint::rules::LintRule; use deno_lint::tags; -use deno_resolver::sloppy_imports::SloppyImportsResolution; -use deno_resolver::sloppy_imports::SloppyImportsResolutionKind; +use deno_resolver::workspace::SloppyImportsResolutionReason; +use deno_resolver::workspace::WorkspaceResolver; use text_lines::LineAndColumnIndex; use super::ExtendedLintRule; use crate::graph_util::CliJsrUrlProvider; -use crate::resolver::CliSloppyImportsResolver; use crate::sys::CliSys; #[derive(Debug)] pub struct NoSloppyImportsRule { - sloppy_imports_resolver: Option>, // None for making printing out the lint rules easy workspace_resolver: Option>>, } impl NoSloppyImportsRule { pub fn new( - sloppy_imports_resolver: Option>, workspace_resolver: Option>>, ) -> Self { - NoSloppyImportsRule { - sloppy_imports_resolver, - workspace_resolver, - } + NoSloppyImportsRule { workspace_resolver } } } @@ -54,7 +47,11 @@ impl ExtendedLintRule for NoSloppyImportsRule { // do sloppy import resolution because sloppy import // resolution requires knowing about the surrounding files // in addition to the current one - self.sloppy_imports_resolver.is_none() || self.workspace_resolver.is_none() + let Some(workspace_resolver) = &self.workspace_resolver else { + return true; + }; + !workspace_resolver.sloppy_imports_enabled() + && !workspace_resolver.has_compiler_options_root_dirs() } fn help_docs_url(&self) -> Cow<'static, str> { @@ -75,16 +72,12 @@ impl LintRule for NoSloppyImportsRule { let Some(workspace_resolver) = &self.workspace_resolver else { return; }; - let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver else { - return; - }; if context.specifier().scheme() != "file" { return; } let resolver = SloppyImportCaptureResolver { workspace_resolver, - sloppy_imports_resolver, captures: Default::default(), }; @@ -102,7 +95,9 @@ impl LintRule for NoSloppyImportsRule { maybe_npm_resolver: None, }); - for (referrer, sloppy_import) in resolver.captures.borrow_mut().drain() { + for (referrer, (specifier, sloppy_reason)) in + resolver.captures.borrow_mut().drain() + { let start_range = context.text_info().loc_to_source_pos(LineAndColumnIndex { line_index: referrer.range.start.line, @@ -126,10 +121,12 @@ impl LintRule for NoSloppyImportsRule { custom_docs_url: Some(DOCS_URL.to_string()), fixes: context .specifier() - .make_relative(sloppy_import.as_specifier()) + .make_relative(&specifier) .map(|relative| { vec![LintFix { - description: Cow::Owned(sloppy_import.as_quick_fix_message()), + description: Cow::Owned( + sloppy_reason.quick_fix_message_for_specifier(&specifier), + ), changes: vec![LintFixChange { new_text: Cow::Owned({ let relative = if relative.starts_with("../") { @@ -176,8 +173,9 @@ impl LintRule for NoSloppyImportsRule { #[derive(Debug)] struct SloppyImportCaptureResolver<'a> { workspace_resolver: &'a WorkspaceResolver, - sloppy_imports_resolver: &'a CliSloppyImportsResolver, - captures: RefCell>, + captures: RefCell< + HashMap, + >, } impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> { @@ -194,45 +192,37 @@ impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> { &referrer_range.specifier, match resolution_kind { ResolutionKind::Execution => { - deno_config::workspace::ResolutionKind::Execution + deno_resolver::workspace::ResolutionKind::Execution } ResolutionKind::Types => { - deno_config::workspace::ResolutionKind::Types + deno_resolver::workspace::ResolutionKind::Types } }, ) .map_err(|err| ResolveError::Other(JsErrorBox::from_err(err)))?; match resolution { - deno_config::workspace::MappedResolution::Normal { - specifier, .. - } - | deno_config::workspace::MappedResolution::ImportMap { - specifier, .. - } => match self.sloppy_imports_resolver.resolve( - &specifier, - match resolution_kind { - ResolutionKind::Execution => SloppyImportsResolutionKind::Execution, - ResolutionKind::Types => SloppyImportsResolutionKind::Types, - }, - ) { - Some(res) => { + deno_resolver::workspace::MappedResolution::Normal { + specifier, + sloppy_reason, + .. + } => { + if let Some(sloppy_reason) = sloppy_reason { self .captures .borrow_mut() .entry(referrer_range.clone()) - .or_insert_with(|| res.clone()); - Ok(res.into_specifier()) + .or_insert_with(|| (specifier.clone(), sloppy_reason)); } - None => Ok(specifier), - }, - deno_config::workspace::MappedResolution::WorkspaceJsrPackage { + Ok(specifier) + } + deno_resolver::workspace::MappedResolution::WorkspaceJsrPackage { .. } - | deno_config::workspace::MappedResolution::WorkspaceNpmPackage { + | deno_resolver::workspace::MappedResolution::WorkspaceNpmPackage { .. } - | deno_config::workspace::MappedResolution::PackageJson { .. } => { + | deno_resolver::workspace::MappedResolution::PackageJson { .. } => { // this error is ignored Err(ResolveError::Other(JsErrorBox::generic(""))) } diff --git a/cli/tools/registry/mod.rs b/cli/tools/registry/mod.rs index c2ed94e8473aee..1834774d0c4dad 100644 --- a/cli/tools/registry/mod.rs +++ b/cli/tools/registry/mod.rs @@ -120,7 +120,6 @@ pub async fn publish( } let specifier_unfurler = Arc::new(SpecifierUnfurler::new( - cli_factory.sloppy_imports_resolver()?.cloned(), cli_factory.workspace_resolver().await?.clone(), cli_options.unstable_bare_node_builtins(), )); diff --git a/cli/tools/registry/unfurl.rs b/cli/tools/registry/unfurl.rs index 469a19fdf4124f..526b4cbdcb71de 100644 --- a/cli/tools/registry/unfurl.rs +++ b/cli/tools/registry/unfurl.rs @@ -15,9 +15,6 @@ use deno_ast::ParsedSource; use deno_ast::SourceRange; use deno_ast::SourceTextInfo; use deno_ast::SourceTextProvider; -use deno_config::workspace::MappedResolution; -use deno_config::workspace::PackageJsonDepResolution; -use deno_config::workspace::WorkspaceResolver; use deno_core::anyhow; use deno_core::ModuleSpecifier; use deno_graph::DependencyDescriptor; @@ -27,12 +24,13 @@ use deno_graph::StaticDependencyKind; use deno_graph::TypeScriptReference; use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepWorkspaceReq; -use deno_resolver::sloppy_imports::SloppyImportsResolutionKind; +use deno_resolver::workspace::MappedResolution; +use deno_resolver::workspace::PackageJsonDepResolution; +use deno_resolver::workspace::WorkspaceResolver; use deno_runtime::deno_node::is_builtin_node_module; use deno_semver::Version; use deno_semver::VersionReq; -use crate::resolver::CliSloppyImportsResolver; use crate::sys::CliSys; #[derive(Debug, Clone)] @@ -190,14 +188,12 @@ enum UnfurlSpecifierError { } pub struct SpecifierUnfurler { - sloppy_imports_resolver: Option>, workspace_resolver: Arc>, bare_node_builtins: bool, } impl SpecifierUnfurler { pub fn new( - sloppy_imports_resolver: Option>, workspace_resolver: Arc>, bare_node_builtins: bool, ) -> Self { @@ -206,7 +202,6 @@ impl SpecifierUnfurler { PackageJsonDepResolution::Enabled ); Self { - sloppy_imports_resolver, workspace_resolver, bare_node_builtins, } @@ -216,7 +211,7 @@ impl SpecifierUnfurler { &self, referrer: &ModuleSpecifier, specifier: &str, - resolution_kind: SloppyImportsResolutionKind, + resolution_kind: deno_resolver::workspace::ResolutionKind, text_info: &SourceTextInfo, range: &deno_graph::PositionRange, diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic), @@ -251,16 +246,15 @@ impl SpecifierUnfurler { &self, referrer: &ModuleSpecifier, specifier: &str, - resolution_kind: SloppyImportsResolutionKind, + resolution_kind: deno_resolver::workspace::ResolutionKind, ) -> Result, UnfurlSpecifierError> { - let resolved = if let Ok(resolved) = self.workspace_resolver.resolve( - specifier, - referrer, - resolution_kind.into(), - ) { + let resolved = if let Ok(resolved) = + self + .workspace_resolver + .resolve(specifier, referrer, resolution_kind) + { match resolved { - MappedResolution::Normal { specifier, .. } - | MappedResolution::ImportMap { specifier, .. } => Some(specifier), + MappedResolution::Normal { specifier, .. } => Some(specifier), MappedResolution::WorkspaceJsrPackage { pkg_req_ref, .. } => { Some(ModuleSpecifier::parse(&pkg_req_ref.to_string()).unwrap()) } @@ -398,15 +392,6 @@ impl SpecifierUnfurler { // } else { // resolved // }; - let resolved = - if let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver { - sloppy_imports_resolver - .resolve(&resolved, resolution_kind) - .map(|res| res.into_specifier()) - .unwrap_or(resolved) - } else { - resolved - }; let relative_resolved = relative_url(&resolved, referrer); if relative_resolved == specifier { Ok(None) // nothing to unfurl @@ -464,7 +449,7 @@ impl SpecifierUnfurler { let maybe_unfurled = self.unfurl_specifier_reporting_diagnostic( module_url, specifier, - SloppyImportsResolutionKind::Execution, // dynamic imports are always execution + deno_resolver::workspace::ResolutionKind::Execution, // dynamic imports are always execution text_info, &dep.argument_range, diagnostic_reporter, @@ -492,7 +477,7 @@ impl SpecifierUnfurler { let unfurled = self.unfurl_specifier_reporting_diagnostic( module_url, specifier, - SloppyImportsResolutionKind::Execution, // dynamic imports are always execution + deno_resolver::workspace::ResolutionKind::Execution, // dynamic imports are always execution text_info, &dep.argument_range, diagnostic_reporter, @@ -538,7 +523,7 @@ impl SpecifierUnfurler { let analyze_specifier = |specifier: &str, range: &deno_graph::PositionRange, - resolution_kind: SloppyImportsResolutionKind, + resolution_kind: deno_resolver::workspace::ResolutionKind, text_changes: &mut Vec, diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic)| { if let Some(unfurled) = self.unfurl_specifier_reporting_diagnostic( @@ -559,18 +544,18 @@ impl SpecifierUnfurler { match dep { DependencyDescriptor::Static(dep) => { let resolution_kind = if parsed_source.media_type().is_declaration() { - SloppyImportsResolutionKind::Types + deno_resolver::workspace::ResolutionKind::Types } else { match dep.kind { StaticDependencyKind::Export | StaticDependencyKind::Import | StaticDependencyKind::ExportEquals | StaticDependencyKind::ImportEquals => { - SloppyImportsResolutionKind::Execution + deno_resolver::workspace::ResolutionKind::Execution } StaticDependencyKind::ExportType | StaticDependencyKind::ImportType => { - SloppyImportsResolutionKind::Types + deno_resolver::workspace::ResolutionKind::Types } } }; @@ -616,7 +601,7 @@ impl SpecifierUnfurler { analyze_specifier( &specifier_with_range.text, &specifier_with_range.range, - SloppyImportsResolutionKind::Types, + deno_resolver::workspace::ResolutionKind::Types, &mut text_changes, diagnostic_reporter, ); @@ -625,7 +610,7 @@ impl SpecifierUnfurler { analyze_specifier( &jsdoc.specifier.text, &jsdoc.specifier.range, - SloppyImportsResolutionKind::Types, + deno_resolver::workspace::ResolutionKind::Types, &mut text_changes, diagnostic_reporter, ); @@ -634,7 +619,7 @@ impl SpecifierUnfurler { analyze_specifier( &specifier_with_range.text, &specifier_with_range.range, - SloppyImportsResolutionKind::Execution, + deno_resolver::workspace::ResolutionKind::Execution, &mut text_changes, diagnostic_reporter, ); @@ -643,7 +628,7 @@ impl SpecifierUnfurler { analyze_specifier( &specifier_with_range.text, &specifier_with_range.range, - SloppyImportsResolutionKind::Types, + deno_resolver::workspace::ResolutionKind::Types, &mut text_changes, diagnostic_reporter, ); @@ -700,7 +685,7 @@ mod tests { use deno_config::workspace::ResolverWorkspaceJsrPackage; use deno_core::serde_json::json; use deno_core::url::Url; - use deno_resolver::sloppy_imports::SloppyImportsCachedFs; + use deno_resolver::workspace::SloppyImportsOptions; use deno_runtime::deno_node::PackageJson; use deno_semver::Version; use import_map::ImportMapWithDiagnostics; @@ -760,18 +745,14 @@ mod tests { exports: IndexMap::from([(".".to_string(), "mod.ts".to_string())]), }], vec![Arc::new(package_json)], - deno_config::workspace::PackageJsonDepResolution::Enabled, + deno_resolver::workspace::PackageJsonDepResolution::Enabled, + SloppyImportsOptions::Enabled, + Default::default(), Default::default(), Default::default(), CliSys::default(), ); - let unfurler = SpecifierUnfurler::new( - Some(Arc::new(CliSloppyImportsResolver::new( - SloppyImportsCachedFs::new(CliSys::default()), - ))), - Arc::new(workspace_resolver), - true, - ); + let unfurler = SpecifierUnfurler::new(Arc::new(workspace_resolver), true); // Unfurling TS file should apply changes. { @@ -926,18 +907,14 @@ export type * from "./c.d.ts"; Arc::new(pkg_json_subtract), Arc::new(pkg_json_publishing), ], - deno_config::workspace::PackageJsonDepResolution::Enabled, + deno_resolver::workspace::PackageJsonDepResolution::Enabled, + Default::default(), + Default::default(), Default::default(), Default::default(), sys.clone(), ); - let unfurler = SpecifierUnfurler::new( - Some(Arc::new(CliSloppyImportsResolver::new( - SloppyImportsCachedFs::new(sys), - ))), - Arc::new(workspace_resolver), - true, - ); + let unfurler = SpecifierUnfurler::new(Arc::new(workspace_resolver), true); { let source_code = r#"import add from "add"; diff --git a/resolvers/deno/Cargo.toml b/resolvers/deno/Cargo.toml index b333ba19c53970..92f509a4cff6a7 100644 --- a/resolvers/deno/Cargo.toml +++ b/resolvers/deno/Cargo.toml @@ -33,13 +33,18 @@ deno_path_util.workspace = true deno_semver.workspace = true deno_terminal.workspace = true futures.workspace = true +import_map.workspace = true +indexmap.workspace = true log.workspace = true node_resolver.workspace = true once_cell.workspace = true parking_lot.workspace = true +serde.workspace = true +serde_json.workspace = true sys_traits.workspace = true thiserror.workspace = true url.workspace = true [dev-dependencies] +sys_traits = { workspace = true, features = ["memory", "real", "serde_json"] } test_util.workspace = true diff --git a/resolvers/deno/factory.rs b/resolvers/deno/factory.rs index 57f78a546c021a..4e1a59788c6129 100644 --- a/resolvers/deno/factory.rs +++ b/resolvers/deno/factory.rs @@ -12,7 +12,6 @@ use deno_cache_dir::HttpCacheRc; use deno_cache_dir::LocalHttpCache; use deno_config::deno_json::NodeModulesDirMode; use deno_config::workspace::FolderConfigs; -use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::VendorEnablement; use deno_config::workspace::WorkspaceDirectory; use deno_config::workspace::WorkspaceDirectoryEmptyOptions; @@ -61,12 +60,13 @@ use crate::npm::NpmResolverCreateOptions; use crate::npmrc::discover_npmrc_from_workspace; use crate::npmrc::NpmRcDiscoverError; use crate::npmrc::ResolvedNpmRcRc; -use crate::sloppy_imports::SloppyImportsCachedFs; -use crate::sloppy_imports::SloppyImportsResolver; -use crate::sloppy_imports::SloppyImportsResolverRc; use crate::sync::new_rc; use crate::sync::MaybeSend; use crate::sync::MaybeSync; +use crate::workspace::FsCacheOptions; +use crate::workspace::PackageJsonDepResolution; +use crate::workspace::SloppyImportsOptions; +use crate::workspace::WorkspaceResolver; use crate::DefaultDenoResolverRc; use crate::DenoResolver; use crate::DenoResolverOptions; @@ -133,7 +133,7 @@ pub trait SpecifiedImportMapProvider: { async fn get( &self, - ) -> Result, anyhow::Error>; + ) -> Result, anyhow::Error>; } #[derive(Debug, Clone)] @@ -560,7 +560,6 @@ impl WorkspaceFactory { #[derive(Debug, Default)] pub struct ResolverFactoryOptions { pub conditions_from_resolution_mode: ConditionsFromResolutionMode, - pub no_sloppy_imports_cache: bool, pub npm_system_info: NpmSystemInfo, pub node_resolution_cache: Option, pub package_json_cache: Option, @@ -593,8 +592,6 @@ pub struct ResolverFactory { npm_resolver: Deferred>, npm_resolution: NpmResolutionCellRc, pkg_json_resolver: Deferred>, - sloppy_imports_resolver: - Deferred>>>, workspace_factory: WorkspaceFactoryRc, workspace_resolver: async_once_cell::OnceCell>, } @@ -616,7 +613,6 @@ impl ResolverFactory { npm_resolution: Default::default(), npm_resolver: Default::default(), pkg_json_resolver: Default::default(), - sloppy_imports_resolver: Default::default(), workspace_factory, workspace_resolver: Default::default(), options, @@ -646,7 +642,6 @@ impl ResolverFactory { .workspace_directory()? .workspace .vendor_dir_path(), - sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(), workspace_resolver: self.workspace_resolver().await?.clone(), }))) } @@ -770,38 +765,6 @@ impl ResolverFactory { }) } - pub fn sloppy_imports_resolver( - &self, - ) -> Result< - Option<&SloppyImportsResolverRc>>, - anyhow::Error, - > { - self - .sloppy_imports_resolver - .get_or_try_init(|| { - let enabled = self.options.unstable_sloppy_imports - || self - .workspace_factory - .workspace_directory()? - .workspace - .has_unstable("sloppy-imports"); - if enabled { - Ok(Some(new_rc(SloppyImportsResolver::new( - if self.options.no_sloppy_imports_cache { - SloppyImportsCachedFs::new_without_stat_cache( - self.workspace_factory.sys.clone(), - ) - } else { - SloppyImportsCachedFs::new(self.workspace_factory.sys.clone()) - }, - )))) - } else { - Ok(None) - } - }) - .map(|v| v.as_ref()) - } - pub async fn workspace_resolver( &self, ) -> Result<&WorkspaceResolverRc, anyhow::Error> { @@ -815,7 +778,7 @@ impl ResolverFactory { Some(import_map) => import_map.get().await?, None => None, }; - let options = deno_config::workspace::CreateResolverOptions { + let options = crate::workspace::CreateResolverOptions { pkg_json_dep_resolution: match self .options .package_json_dep_resolution @@ -834,9 +797,24 @@ impl ResolverFactory { } }, specified_import_map, + sloppy_imports_options: if self.options.unstable_sloppy_imports + || self + .workspace_factory + .workspace_directory()? + .workspace + .has_unstable("sloppy-imports") + { + SloppyImportsOptions::Enabled + } else { + SloppyImportsOptions::Disabled + }, + fs_cache_options: FsCacheOptions::Enabled, }; - let resolver = workspace - .create_resolver(self.workspace_factory.sys.clone(), options)?; + let resolver = WorkspaceResolver::from_workspace( + workspace, + self.workspace_factory.sys.clone(), + options, + )?; if !resolver.diagnostics().is_empty() { // todo(dsherret): do not log this in this crate... that should be // a CLI responsibility diff --git a/resolvers/deno/lib.rs b/resolvers/deno/lib.rs index 26272af3bcb1ca..4ecc222ba6f8d7 100644 --- a/resolvers/deno/lib.rs +++ b/resolvers/deno/lib.rs @@ -7,11 +7,6 @@ use std::path::PathBuf; use boxed_error::Boxed; use deno_cache_dir::npm::NpmCacheDir; -use deno_config::workspace::MappedResolution; -use deno_config::workspace::MappedResolutionDiagnostic; -use deno_config::workspace::MappedResolutionError; -use deno_config::workspace::WorkspaceResolvePkgJsonFolderError; -use deno_config::workspace::WorkspaceResolver; use deno_error::JsError; use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValueParseError; @@ -31,9 +26,6 @@ use npm::NpmReqResolverRc; use npm::ResolveIfForNpmPackageErrorKind; use npm::ResolvePkgFolderFromDenoReqError; use npm::ResolveReqWithSubPathErrorKind; -use sloppy_imports::SloppyImportResolverFs; -use sloppy_imports::SloppyImportsResolutionKind; -use sloppy_imports::SloppyImportsResolverRc; use sys_traits::FsCanonicalize; use sys_traits::FsMetadata; use sys_traits::FsRead; @@ -41,12 +33,18 @@ use sys_traits::FsReadDir; use thiserror::Error; use url::Url; +use crate::workspace::MappedResolution; +use crate::workspace::MappedResolutionDiagnostic; +use crate::workspace::MappedResolutionError; +use crate::workspace::WorkspaceResolvePkgJsonFolderError; +use crate::workspace::WorkspaceResolver; + pub mod cjs; pub mod factory; pub mod npm; pub mod npmrc; -pub mod sloppy_imports; mod sync; +pub mod workspace; #[allow(clippy::disallowed_types)] pub type WorkspaceResolverRc = @@ -128,7 +126,6 @@ pub struct DenoResolverOptions< TInNpmPackageChecker: InNpmPackageChecker, TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, TNpmPackageFolderResolver: NpmPackageFolderResolver, - TSloppyImportResolverFs: SloppyImportResolverFs, TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, > { pub in_npm_pkg_checker: TInNpmPackageChecker, @@ -140,8 +137,6 @@ pub struct DenoResolverOptions< TSys, >, >, - pub sloppy_imports_resolver: - Option>, pub workspace_resolver: WorkspaceResolverRc, /// Whether "bring your own node_modules" is enabled where Deno does not /// setup the node_modules directories automatically, but instead uses @@ -155,14 +150,12 @@ pub type DenoResolverRc< TInNpmPackageChecker, TIsBuiltInNodeModuleChecker, TNpmPackageFolderResolver, - TSloppyImportResolverFs, TSys, > = crate::sync::MaybeArc< DenoResolver< TInNpmPackageChecker, TIsBuiltInNodeModuleChecker, TNpmPackageFolderResolver, - TSloppyImportResolverFs, TSys, >, >; @@ -173,7 +166,6 @@ pub type DefaultDenoResolverRc = DenoResolverRc< npm::DenoInNpmPackageChecker, node_resolver::DenoIsBuiltInNodeModuleChecker, npm::NpmResolver, - sloppy_imports::SloppyImportsCachedFs, TSys, >; @@ -184,7 +176,6 @@ pub struct DenoResolver< TInNpmPackageChecker: InNpmPackageChecker, TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, TNpmPackageFolderResolver: NpmPackageFolderResolver, - TSloppyImportResolverFs: SloppyImportResolverFs, TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, > { in_npm_pkg_checker: TInNpmPackageChecker, @@ -196,8 +187,6 @@ pub struct DenoResolver< TSys, >, >, - sloppy_imports_resolver: - Option>, workspace_resolver: WorkspaceResolverRc, is_byonm: bool, maybe_vendor_specifier: Option, @@ -207,14 +196,12 @@ impl< TInNpmPackageChecker: InNpmPackageChecker, TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, TNpmPackageFolderResolver: NpmPackageFolderResolver, - TSloppyImportResolverFs: SloppyImportResolverFs, TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir, > DenoResolver< TInNpmPackageChecker, TIsBuiltInNodeModuleChecker, TNpmPackageFolderResolver, - TSloppyImportResolverFs, TSys, > { @@ -223,14 +210,12 @@ impl< TInNpmPackageChecker, TIsBuiltInNodeModuleChecker, TNpmPackageFolderResolver, - TSloppyImportResolverFs, TSys, >, ) -> Self { Self { in_npm_pkg_checker: options.in_npm_pkg_checker, node_and_npm_resolver: options.node_and_req_resolver, - sloppy_imports_resolver: options.sloppy_imports_resolver, workspace_resolver: options.workspace_resolver, is_byonm: options.is_byonm, maybe_vendor_specifier: options @@ -277,33 +262,10 @@ impl< MappedResolution::Normal { specifier, maybe_diagnostic: current_diagnostic, - } - | MappedResolution::ImportMap { - specifier, - maybe_diagnostic: current_diagnostic, + .. } => { maybe_diagnostic = current_diagnostic; - // do sloppy imports resolution if enabled - if let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver { - Ok( - sloppy_imports_resolver - .resolve( - &specifier, - match resolution_kind { - NodeResolutionKind::Execution => { - SloppyImportsResolutionKind::Execution - } - NodeResolutionKind::Types => { - SloppyImportsResolutionKind::Types - } - }, - ) - .map(|s| s.into_specifier()) - .unwrap_or(specifier), - ) - } else { - Ok(specifier) - } + Ok(specifier) } MappedResolution::WorkspaceJsrPackage { specifier, .. } => { Ok(specifier) diff --git a/resolvers/deno/sloppy_imports.rs b/resolvers/deno/sloppy_imports.rs deleted file mode 100644 index 8a43be16aa3917..00000000000000 --- a/resolvers/deno/sloppy_imports.rs +++ /dev/null @@ -1,582 +0,0 @@ -// Copyright 2018-2025 the Deno authors. MIT license. - -use std::borrow::Cow; -use std::path::Path; -use std::path::PathBuf; - -use deno_media_type::MediaType; -use deno_path_util::url_from_file_path; -use deno_path_util::url_to_file_path; -use sys_traits::FsMetadata; -use sys_traits::FsMetadataValue; -use url::Url; - -use crate::sync::MaybeDashMap; - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum SloppyImportsFsEntry { - File, - Dir, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum SloppyImportsResolution { - /// Ex. `./file.js` to `./file.ts` - JsToTs(Url), - /// Ex. `./file` to `./file.ts` - NoExtension(Url), - /// Ex. `./dir` to `./dir/index.ts` - Directory(Url), -} - -impl SloppyImportsResolution { - pub fn as_specifier(&self) -> &Url { - match self { - Self::JsToTs(specifier) => specifier, - Self::NoExtension(specifier) => specifier, - Self::Directory(specifier) => specifier, - } - } - - pub fn into_specifier(self) -> Url { - match self { - Self::JsToTs(specifier) => specifier, - Self::NoExtension(specifier) => specifier, - Self::Directory(specifier) => specifier, - } - } - - pub fn as_suggestion_message(&self) -> String { - format!("Maybe {}", self.as_base_message()) - } - - pub fn as_quick_fix_message(&self) -> String { - let message = self.as_base_message(); - let mut chars = message.chars(); - format!( - "{}{}.", - chars.next().unwrap().to_uppercase(), - chars.as_str() - ) - } - - fn as_base_message(&self) -> String { - match self { - SloppyImportsResolution::JsToTs(specifier) => { - let media_type = MediaType::from_specifier(specifier); - format!("change the extension to '{}'", media_type.as_ts_extension()) - } - SloppyImportsResolution::NoExtension(specifier) => { - let media_type = MediaType::from_specifier(specifier); - format!("add a '{}' extension", media_type.as_ts_extension()) - } - SloppyImportsResolution::Directory(specifier) => { - let file_name = specifier - .path() - .rsplit_once('/') - .map(|(_, file_name)| file_name) - .unwrap_or(specifier.path()); - format!("specify path to '{}' file in directory instead", file_name) - } - } - } -} - -/// The kind of resolution currently being done. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum SloppyImportsResolutionKind { - /// Resolving for code that will be executed. - Execution, - /// Resolving for code that will be used for type information. - Types, -} - -impl SloppyImportsResolutionKind { - pub fn is_types(&self) -> bool { - *self == SloppyImportsResolutionKind::Types - } -} - -impl From - for deno_config::workspace::ResolutionKind -{ - fn from(value: SloppyImportsResolutionKind) -> Self { - match value { - SloppyImportsResolutionKind::Execution => Self::Execution, - SloppyImportsResolutionKind::Types => Self::Types, - } - } -} - -pub trait SloppyImportResolverFs { - fn stat_sync(&self, path: &Path) -> Option; - - fn is_file(&self, path: &Path) -> bool { - self.stat_sync(path) == Some(SloppyImportsFsEntry::File) - } -} - -#[allow(clippy::disallowed_types)] -pub type SloppyImportsResolverRc = - crate::sync::MaybeArc>; - -#[derive(Debug)] -pub struct SloppyImportsResolver { - fs: Fs, -} - -impl SloppyImportsResolver { - pub fn new(fs: Fs) -> Self { - Self { fs } - } - - pub fn resolve( - &self, - specifier: &Url, - resolution_kind: SloppyImportsResolutionKind, - ) -> Option { - fn path_without_ext( - path: &Path, - media_type: MediaType, - ) -> Option> { - let old_path_str = path.to_string_lossy(); - match media_type { - MediaType::Unknown => Some(old_path_str), - _ => old_path_str - .strip_suffix(media_type.as_ts_extension()) - .map(|s| Cow::Owned(s.to_string())), - } - } - - fn media_types_to_paths( - path_no_ext: &str, - original_media_type: MediaType, - probe_media_type_types: Vec, - reason: SloppyImportsResolutionReason, - ) -> Vec<(PathBuf, SloppyImportsResolutionReason)> { - probe_media_type_types - .into_iter() - .filter(|media_type| *media_type != original_media_type) - .map(|media_type| { - ( - PathBuf::from(format!( - "{}{}", - path_no_ext, - media_type.as_ts_extension() - )), - reason, - ) - }) - .collect::>() - } - - if specifier.scheme() != "file" { - return None; - } - - let path = url_to_file_path(specifier).ok()?; - - #[derive(Clone, Copy)] - enum SloppyImportsResolutionReason { - JsToTs, - NoExtension, - Directory, - } - - let probe_paths: Vec<(PathBuf, SloppyImportsResolutionReason)> = - match self.fs.stat_sync(&path) { - Some(SloppyImportsFsEntry::File) => { - if resolution_kind.is_types() { - let media_type = MediaType::from_specifier(specifier); - // attempt to resolve the .d.ts file before the .js file - let probe_media_type_types = match media_type { - MediaType::JavaScript => { - vec![(MediaType::Dts), MediaType::JavaScript] - } - MediaType::Mjs => { - vec![MediaType::Dmts, MediaType::Dts, MediaType::Mjs] - } - MediaType::Cjs => { - vec![MediaType::Dcts, MediaType::Dts, MediaType::Cjs] - } - _ => return None, - }; - let path_no_ext = path_without_ext(&path, media_type)?; - media_types_to_paths( - &path_no_ext, - media_type, - probe_media_type_types, - SloppyImportsResolutionReason::JsToTs, - ) - } else { - return None; - } - } - entry @ None | entry @ Some(SloppyImportsFsEntry::Dir) => { - let media_type = MediaType::from_specifier(specifier); - let probe_media_type_types = match media_type { - MediaType::JavaScript => ( - if resolution_kind.is_types() { - vec![MediaType::TypeScript, MediaType::Tsx, MediaType::Dts] - } else { - vec![MediaType::TypeScript, MediaType::Tsx] - }, - SloppyImportsResolutionReason::JsToTs, - ), - MediaType::Jsx => { - (vec![MediaType::Tsx], SloppyImportsResolutionReason::JsToTs) - } - MediaType::Mjs => ( - if resolution_kind.is_types() { - vec![MediaType::Mts, MediaType::Dmts, MediaType::Dts] - } else { - vec![MediaType::Mts] - }, - SloppyImportsResolutionReason::JsToTs, - ), - MediaType::Cjs => ( - if resolution_kind.is_types() { - vec![MediaType::Cts, MediaType::Dcts, MediaType::Dts] - } else { - vec![MediaType::Cts] - }, - SloppyImportsResolutionReason::JsToTs, - ), - MediaType::TypeScript - | MediaType::Mts - | MediaType::Cts - | MediaType::Dts - | MediaType::Dmts - | MediaType::Dcts - | MediaType::Tsx - | MediaType::Json - | MediaType::Wasm - | MediaType::Css - | MediaType::SourceMap => { - return None; - } - MediaType::Unknown => ( - if resolution_kind.is_types() { - vec![ - MediaType::TypeScript, - MediaType::Tsx, - MediaType::Mts, - MediaType::Dts, - MediaType::Dmts, - MediaType::Dcts, - MediaType::JavaScript, - MediaType::Jsx, - MediaType::Mjs, - ] - } else { - vec![ - MediaType::TypeScript, - MediaType::JavaScript, - MediaType::Tsx, - MediaType::Jsx, - MediaType::Mts, - MediaType::Mjs, - ] - }, - SloppyImportsResolutionReason::NoExtension, - ), - }; - let mut probe_paths = match path_without_ext(&path, media_type) { - Some(path_no_ext) => media_types_to_paths( - &path_no_ext, - media_type, - probe_media_type_types.0, - probe_media_type_types.1, - ), - None => vec![], - }; - - if matches!(entry, Some(SloppyImportsFsEntry::Dir)) { - // try to resolve at the index file - if resolution_kind.is_types() { - probe_paths.push(( - path.join("index.ts"), - SloppyImportsResolutionReason::Directory, - )); - - probe_paths.push(( - path.join("index.mts"), - SloppyImportsResolutionReason::Directory, - )); - probe_paths.push(( - path.join("index.d.ts"), - SloppyImportsResolutionReason::Directory, - )); - probe_paths.push(( - path.join("index.d.mts"), - SloppyImportsResolutionReason::Directory, - )); - probe_paths.push(( - path.join("index.js"), - SloppyImportsResolutionReason::Directory, - )); - probe_paths.push(( - path.join("index.mjs"), - SloppyImportsResolutionReason::Directory, - )); - probe_paths.push(( - path.join("index.tsx"), - SloppyImportsResolutionReason::Directory, - )); - probe_paths.push(( - path.join("index.jsx"), - SloppyImportsResolutionReason::Directory, - )); - } else { - probe_paths.push(( - path.join("index.ts"), - SloppyImportsResolutionReason::Directory, - )); - probe_paths.push(( - path.join("index.mts"), - SloppyImportsResolutionReason::Directory, - )); - probe_paths.push(( - path.join("index.tsx"), - SloppyImportsResolutionReason::Directory, - )); - probe_paths.push(( - path.join("index.js"), - SloppyImportsResolutionReason::Directory, - )); - probe_paths.push(( - path.join("index.mjs"), - SloppyImportsResolutionReason::Directory, - )); - probe_paths.push(( - path.join("index.jsx"), - SloppyImportsResolutionReason::Directory, - )); - } - } - if probe_paths.is_empty() { - return None; - } - probe_paths - } - }; - - for (probe_path, reason) in probe_paths { - if self.fs.is_file(&probe_path) { - if let Ok(specifier) = url_from_file_path(&probe_path) { - match reason { - SloppyImportsResolutionReason::JsToTs => { - return Some(SloppyImportsResolution::JsToTs(specifier)); - } - SloppyImportsResolutionReason::NoExtension => { - return Some(SloppyImportsResolution::NoExtension(specifier)); - } - SloppyImportsResolutionReason::Directory => { - return Some(SloppyImportsResolution::Directory(specifier)); - } - } - } - } - } - - None - } -} - -#[derive(Debug)] -pub struct SloppyImportsCachedFs { - sys: TSys, - cache: Option>>, -} - -impl SloppyImportsCachedFs { - pub fn new(sys: TSys) -> Self { - Self { - sys, - cache: Some(Default::default()), - } - } - - pub fn new_without_stat_cache(sys: TSys) -> Self { - Self { sys, cache: None } - } -} - -impl SloppyImportResolverFs for SloppyImportsCachedFs { - fn stat_sync(&self, path: &Path) -> Option { - if let Some(cache) = &self.cache { - if let Some(entry) = cache.get(path) { - return *entry; - } - } - - let entry = self.sys.fs_metadata(path).ok().and_then(|stat| { - if stat.file_type().is_file() { - Some(SloppyImportsFsEntry::File) - } else if stat.file_type().is_dir() { - Some(SloppyImportsFsEntry::Dir) - } else { - None - } - }); - - if let Some(cache) = &self.cache { - cache.insert(path.to_owned(), entry); - } - entry - } -} - -#[cfg(test)] -mod test { - use test_util::TestContext; - - use super::*; - - #[test] - fn test_unstable_sloppy_imports() { - fn resolve(specifier: &Url) -> Option { - resolve_with_resolution_kind( - specifier, - SloppyImportsResolutionKind::Execution, - ) - } - - fn resolve_types(specifier: &Url) -> Option { - resolve_with_resolution_kind( - specifier, - SloppyImportsResolutionKind::Types, - ) - } - - fn resolve_with_resolution_kind( - specifier: &Url, - resolution_kind: SloppyImportsResolutionKind, - ) -> Option { - struct RealSloppyImportsResolverFs; - impl SloppyImportResolverFs for RealSloppyImportsResolverFs { - fn stat_sync(&self, path: &Path) -> Option { - #[allow(clippy::disallowed_methods)] - let stat = std::fs::metadata(path).ok()?; - if stat.is_dir() { - Some(SloppyImportsFsEntry::Dir) - } else if stat.is_file() { - Some(SloppyImportsFsEntry::File) - } else { - None - } - } - } - - SloppyImportsResolver::new(RealSloppyImportsResolverFs) - .resolve(specifier, resolution_kind) - } - - let context = TestContext::default(); - let temp_dir = context.temp_dir().path(); - - // scenarios like resolving ./example.js to ./example.ts - for (ext_from, ext_to) in [("js", "ts"), ("js", "tsx"), ("mjs", "mts")] { - let ts_file = temp_dir.join(format!("file.{}", ext_to)); - ts_file.write(""); - assert_eq!(resolve(&ts_file.url_file()), None); - assert_eq!( - resolve( - &temp_dir - .url_dir() - .join(&format!("file.{}", ext_from)) - .unwrap() - ), - Some(SloppyImportsResolution::JsToTs(ts_file.url_file())), - ); - ts_file.remove_file(); - } - - // no extension scenarios - for ext in ["js", "ts", "js", "tsx", "jsx", "mjs", "mts"] { - let file = temp_dir.join(format!("file.{}", ext)); - file.write(""); - assert_eq!( - resolve( - &temp_dir - .url_dir() - .join("file") // no ext - .unwrap() - ), - Some(SloppyImportsResolution::NoExtension(file.url_file())) - ); - file.remove_file(); - } - - // .ts and .js exists, .js specified (goes to specified) - { - let ts_file = temp_dir.join("file.ts"); - ts_file.write(""); - let js_file = temp_dir.join("file.js"); - js_file.write(""); - assert_eq!(resolve(&js_file.url_file()), None); - } - - // only js exists, .js specified - { - let js_only_file = temp_dir.join("js_only.js"); - js_only_file.write(""); - assert_eq!(resolve(&js_only_file.url_file()), None); - assert_eq!(resolve_types(&js_only_file.url_file()), None); - } - - // resolving a directory to an index file - { - let routes_dir = temp_dir.join("routes"); - routes_dir.create_dir_all(); - let index_file = routes_dir.join("index.ts"); - index_file.write(""); - assert_eq!( - resolve(&routes_dir.url_file()), - Some(SloppyImportsResolution::Directory(index_file.url_file())), - ); - } - - // both a directory and a file with specifier is present - { - let api_dir = temp_dir.join("api"); - api_dir.create_dir_all(); - let bar_file = api_dir.join("bar.ts"); - bar_file.write(""); - let api_file = temp_dir.join("api.ts"); - api_file.write(""); - assert_eq!( - resolve(&api_dir.url_file()), - Some(SloppyImportsResolution::NoExtension(api_file.url_file())), - ); - } - } - - #[test] - fn test_sloppy_import_resolution_suggestion_message() { - // directory - assert_eq!( - SloppyImportsResolution::Directory( - Url::parse("file:///dir/index.js").unwrap() - ) - .as_suggestion_message(), - "Maybe specify path to 'index.js' file in directory instead" - ); - // no ext - assert_eq!( - SloppyImportsResolution::NoExtension( - Url::parse("file:///dir/index.mjs").unwrap() - ) - .as_suggestion_message(), - "Maybe add a '.mjs' extension" - ); - // js to ts - assert_eq!( - SloppyImportsResolution::JsToTs( - Url::parse("file:///dir/index.mts").unwrap() - ) - .as_suggestion_message(), - "Maybe change the extension to '.mts'" - ); - } -} diff --git a/resolvers/deno/workspace.rs b/resolvers/deno/workspace.rs new file mode 100644 index 00000000000000..b7169497222435 --- /dev/null +++ b/resolvers/deno/workspace.rs @@ -0,0 +1,2812 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +// use super::UrlRc; + +use std::borrow::Cow; +use std::collections::BTreeMap; +use std::fmt; +use std::path::Path; +use std::path::PathBuf; + +use deno_config::deno_json::ConfigFile; +use deno_config::deno_json::ConfigFileError; +use deno_config::workspace::ResolverWorkspaceJsrPackage; +use deno_config::workspace::Workspace; +use deno_error::JsError; +use deno_media_type::MediaType; +use deno_package_json::PackageJsonDepValue; +use deno_package_json::PackageJsonDepValueParseError; +use deno_package_json::PackageJsonDepWorkspaceReq; +use deno_package_json::PackageJsonDepsRc; +use deno_package_json::PackageJsonRc; +use deno_path_util::url_from_directory_path; +use deno_path_util::url_from_file_path; +use deno_path_util::url_to_file_path; +use deno_semver::jsr::JsrPackageReqReference; +use deno_semver::package::PackageReq; +use deno_semver::RangeSetOrTag; +use deno_semver::Version; +use deno_semver::VersionReq; +use import_map::specifier::SpecifierError; +use import_map::ImportMap; +use import_map::ImportMapDiagnostic; +use import_map::ImportMapError; +use import_map::ImportMapErrorKind; +use import_map::ImportMapWithDiagnostics; +use indexmap::IndexMap; +use node_resolver::NodeResolutionKind; +use serde::Deserialize; +use serde::Serialize; +use sys_traits::FsMetadata; +use sys_traits::FsMetadataValue; +use sys_traits::FsRead; +use thiserror::Error; +use url::Url; + +use crate::sync::new_rc; +use crate::sync::MaybeDashMap; + +#[allow(clippy::disallowed_types)] +type UrlRc = crate::sync::MaybeArc; + +#[derive(Debug)] +struct PkgJsonResolverFolderConfig { + deps: PackageJsonDepsRc, + pkg_json: PackageJsonRc, +} + +#[derive(Debug, Error, JsError)] +pub enum WorkspaceResolverCreateError { + #[class(inherit)] + #[error("Failed loading import map specified in '{referrer}'")] + ImportMapFetch { + referrer: Url, + #[source] + #[inherit] + source: Box, + }, + #[class(inherit)] + #[error(transparent)] + ImportMap( + #[from] + #[inherit] + ImportMapError, + ), +} + +/// Whether to resolve dependencies by reading the dependencies list +/// from a package.json +#[derive( + Debug, Default, Serialize, Deserialize, Copy, Clone, PartialEq, Eq, +)] +pub enum PackageJsonDepResolution { + /// Resolves based on the dep entries in the package.json. + #[default] + Enabled, + /// Doesn't use the package.json to resolve dependencies. Let's the caller + /// resolve based on the file system. + Disabled, +} + +#[derive( + Debug, Default, Serialize, Deserialize, Copy, Clone, PartialEq, Eq, +)] +pub enum SloppyImportsOptions { + Enabled, + #[default] + Disabled, +} + +/// Toggle FS metadata caching when probing files for sloppy imports and +/// `compilerOptions.rootDirs` resolution. +#[derive( + Debug, Default, Serialize, Deserialize, Copy, Clone, PartialEq, Eq, +)] +pub enum FsCacheOptions { + #[default] + Enabled, + Disabled, +} + +#[derive(Debug, Default, Clone)] +pub struct CreateResolverOptions { + pub pkg_json_dep_resolution: PackageJsonDepResolution, + pub specified_import_map: Option, + pub sloppy_imports_options: SloppyImportsOptions, + pub fs_cache_options: FsCacheOptions, +} + +#[derive(Debug, Clone)] +pub struct SpecifiedImportMap { + pub base_url: Url, + pub value: serde_json::Value, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum MappedResolutionDiagnostic { + ConstraintNotMatchedLocalVersion { + /// If it was for a patch (true) or workspace (false) member. + is_patch: bool, + reference: JsrPackageReqReference, + local_version: Version, + }, +} + +impl std::fmt::Display for MappedResolutionDiagnostic { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::ConstraintNotMatchedLocalVersion { + is_patch, + reference, + local_version, + } => { + write!( + f, + "{0} '{1}@{2}' was not used because it did not match '{1}@{3}'", + if *is_patch { + "Patch" + } else { + "Workspace member" + }, + reference.req().name, + local_version, + reference.req().version_req + ) + } + } + } +} + +#[derive(Debug, Clone)] +pub enum MappedResolution<'a> { + Normal { + specifier: Url, + used_import_map: bool, + sloppy_reason: Option, + used_compiler_options_root_dirs: bool, + maybe_diagnostic: Option>, + }, + WorkspaceJsrPackage { + specifier: Url, + pkg_req_ref: JsrPackageReqReference, + }, + /// Resolved a bare specifier to a package.json that was a workspace member. + WorkspaceNpmPackage { + target_pkg_json: &'a PackageJsonRc, + pkg_name: &'a str, + sub_path: Option, + }, + PackageJson { + pkg_json: &'a PackageJsonRc, + alias: &'a str, + sub_path: Option, + dep_result: &'a Result, + }, +} + +#[derive(Debug, Clone, Error, JsError)] +#[class(type)] +pub enum WorkspaceResolveError { + #[error("Failed joining '{}' to '{}'. {:#}", .sub_path, .base, .error)] + InvalidExportPath { + base: Url, + sub_path: String, + error: url::ParseError, + }, + #[error("Unknown export '{}' for '{}'.\n Package exports:\n{}", export_name, package_name, .exports.iter().map(|e| format!(" * {}", e)).collect::>().join("\n"))] + UnknownExport { + package_name: String, + export_name: String, + exports: Vec, + }, +} + +#[derive(Debug, Error, JsError)] +pub enum MappedResolutionError { + #[class(inherit)] + #[error(transparent)] + Specifier(#[from] SpecifierError), + #[class(inherit)] + #[error(transparent)] + ImportMap(#[from] ImportMapError), + #[class(inherit)] + #[error(transparent)] + Workspace(#[from] WorkspaceResolveError), +} + +impl MappedResolutionError { + pub fn is_unmapped_bare_specifier(&self) -> bool { + match self { + MappedResolutionError::Specifier(err) => match err { + SpecifierError::InvalidUrl(_) => false, + SpecifierError::ImportPrefixMissing { .. } => true, + }, + MappedResolutionError::ImportMap(err) => { + matches!(**err, ImportMapErrorKind::UnmappedBareSpecifier(_, _)) + } + MappedResolutionError::Workspace(_) => false, + } + } +} + +#[derive(Error, Debug, JsError)] +#[class(inherit)] +#[error(transparent)] +pub struct WorkspaceResolvePkgJsonFolderError( + Box, +); + +impl WorkspaceResolvePkgJsonFolderError { + pub fn as_kind(&self) -> &WorkspaceResolvePkgJsonFolderErrorKind { + &self.0 + } + + pub fn into_kind(self) -> WorkspaceResolvePkgJsonFolderErrorKind { + *self.0 + } +} + +impl From for WorkspaceResolvePkgJsonFolderError +where + WorkspaceResolvePkgJsonFolderErrorKind: From, +{ + fn from(err: E) -> Self { + WorkspaceResolvePkgJsonFolderError(Box::new( + WorkspaceResolvePkgJsonFolderErrorKind::from(err), + )) + } +} + +#[derive(Debug, Error, JsError, Clone, PartialEq, Eq)] +#[class(type)] +pub enum WorkspaceResolvePkgJsonFolderErrorKind { + #[error("Could not find package.json with name '{0}' in workspace.")] + NotFound(String), + #[error("Found package.json in workspace, but version '{1}' didn't satisy constraint '{0}'.")] + VersionNotSatisfied(VersionReq, Version), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum CachedMetadataFsEntry { + File, + Dir, +} + +#[derive(Debug)] +struct CachedMetadataFs { + sys: TSys, + cache: Option>>, +} + +impl CachedMetadataFs { + fn new(sys: TSys, options: FsCacheOptions) -> Self { + Self { + sys, + cache: match options { + FsCacheOptions::Enabled => Some(Default::default()), + FsCacheOptions::Disabled => None, + }, + } + } + + fn stat_sync(&self, path: &Path) -> Option { + if let Some(cache) = &self.cache { + if let Some(entry) = cache.get(path) { + return *entry; + } + } + let entry = self.sys.fs_metadata(path).ok().and_then(|stat| { + if stat.file_type().is_file() { + Some(CachedMetadataFsEntry::File) + } else if stat.file_type().is_dir() { + Some(CachedMetadataFsEntry::Dir) + } else { + None + } + }); + if let Some(cache) = &self.cache { + cache.insert(path.to_owned(), entry); + } + entry + } + + fn is_file(&self, path: &Path) -> bool { + self.stat_sync(path) == Some(CachedMetadataFsEntry::File) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum SloppyImportsResolutionReason { + /// Ex. `./file.js` to `./file.ts` + JsToTs, + /// Ex. `./file` to `./file.ts` + NoExtension, + /// Ex. `./dir` to `./dir/index.ts` + Directory, +} + +impl SloppyImportsResolutionReason { + pub fn suggestion_message_for_specifier(&self, specifier: &Url) -> String { + format!("Maybe {}", self.base_message_for_specifier(specifier)) + } + + pub fn quick_fix_message_for_specifier(&self, specifier: &Url) -> String { + let message = self.base_message_for_specifier(specifier); + let mut chars = message.chars(); + format!( + "{}{}.", + chars.next().unwrap().to_uppercase(), + chars.as_str() + ) + } + + fn base_message_for_specifier(&self, specifier: &Url) -> String { + match self { + Self::JsToTs => { + let media_type = MediaType::from_specifier(specifier); + format!("change the extension to '{}'", media_type.as_ts_extension()) + } + Self::NoExtension => { + let media_type = MediaType::from_specifier(specifier); + format!("add a '{}' extension", media_type.as_ts_extension()) + } + Self::Directory => { + let file_name = specifier + .path() + .rsplit_once('/') + .map(|(_, file_name)| file_name) + .unwrap_or(specifier.path()); + format!("specify path to '{}' file in directory instead", file_name) + } + } + } +} + +#[derive(Debug)] +struct SloppyImportsResolver { + fs: CachedMetadataFs, + enabled: bool, +} + +impl SloppyImportsResolver { + fn new(fs: CachedMetadataFs, options: SloppyImportsOptions) -> Self { + Self { + fs, + enabled: match options { + SloppyImportsOptions::Enabled => true, + SloppyImportsOptions::Disabled => false, + }, + } + } + + fn resolve( + &self, + specifier: &Url, + resolution_kind: ResolutionKind, + ) -> Option<(Url, SloppyImportsResolutionReason)> { + if !self.enabled { + return None; + } + + fn path_without_ext( + path: &Path, + media_type: MediaType, + ) -> Option> { + let old_path_str = path.to_string_lossy(); + match media_type { + MediaType::Unknown => Some(old_path_str), + _ => old_path_str + .strip_suffix(media_type.as_ts_extension()) + .map(|s| Cow::Owned(s.to_string())), + } + } + + fn media_types_to_paths( + path_no_ext: &str, + original_media_type: MediaType, + probe_media_type_types: Vec, + reason: SloppyImportsResolutionReason, + ) -> Vec<(PathBuf, SloppyImportsResolutionReason)> { + probe_media_type_types + .into_iter() + .filter(|media_type| *media_type != original_media_type) + .map(|media_type| { + ( + PathBuf::from(format!( + "{}{}", + path_no_ext, + media_type.as_ts_extension() + )), + reason, + ) + }) + .collect::>() + } + + if specifier.scheme() != "file" { + return None; + } + + let path = url_to_file_path(specifier).ok()?; + + let probe_paths: Vec<(PathBuf, SloppyImportsResolutionReason)> = + match self.fs.stat_sync(&path) { + Some(CachedMetadataFsEntry::File) => { + if resolution_kind.is_types() { + let media_type = MediaType::from_specifier(specifier); + // attempt to resolve the .d.ts file before the .js file + let probe_media_type_types = match media_type { + MediaType::JavaScript => { + vec![(MediaType::Dts), MediaType::JavaScript] + } + MediaType::Mjs => { + vec![MediaType::Dmts, MediaType::Dts, MediaType::Mjs] + } + MediaType::Cjs => { + vec![MediaType::Dcts, MediaType::Dts, MediaType::Cjs] + } + _ => return None, + }; + let path_no_ext = path_without_ext(&path, media_type)?; + media_types_to_paths( + &path_no_ext, + media_type, + probe_media_type_types, + SloppyImportsResolutionReason::JsToTs, + ) + } else { + return None; + } + } + entry @ None | entry @ Some(CachedMetadataFsEntry::Dir) => { + let media_type = MediaType::from_specifier(specifier); + let probe_media_type_types = match media_type { + MediaType::JavaScript => ( + if resolution_kind.is_types() { + vec![MediaType::TypeScript, MediaType::Tsx, MediaType::Dts] + } else { + vec![MediaType::TypeScript, MediaType::Tsx] + }, + SloppyImportsResolutionReason::JsToTs, + ), + MediaType::Jsx => { + (vec![MediaType::Tsx], SloppyImportsResolutionReason::JsToTs) + } + MediaType::Mjs => ( + if resolution_kind.is_types() { + vec![MediaType::Mts, MediaType::Dmts, MediaType::Dts] + } else { + vec![MediaType::Mts] + }, + SloppyImportsResolutionReason::JsToTs, + ), + MediaType::Cjs => ( + if resolution_kind.is_types() { + vec![MediaType::Cts, MediaType::Dcts, MediaType::Dts] + } else { + vec![MediaType::Cts] + }, + SloppyImportsResolutionReason::JsToTs, + ), + MediaType::TypeScript + | MediaType::Mts + | MediaType::Cts + | MediaType::Dts + | MediaType::Dmts + | MediaType::Dcts + | MediaType::Tsx + | MediaType::Json + | MediaType::Wasm + | MediaType::Css + | MediaType::SourceMap => { + return None; + } + MediaType::Unknown => ( + if resolution_kind.is_types() { + vec![ + MediaType::TypeScript, + MediaType::Tsx, + MediaType::Mts, + MediaType::Dts, + MediaType::Dmts, + MediaType::Dcts, + MediaType::JavaScript, + MediaType::Jsx, + MediaType::Mjs, + ] + } else { + vec![ + MediaType::TypeScript, + MediaType::JavaScript, + MediaType::Tsx, + MediaType::Jsx, + MediaType::Mts, + MediaType::Mjs, + ] + }, + SloppyImportsResolutionReason::NoExtension, + ), + }; + let mut probe_paths = match path_without_ext(&path, media_type) { + Some(path_no_ext) => media_types_to_paths( + &path_no_ext, + media_type, + probe_media_type_types.0, + probe_media_type_types.1, + ), + None => vec![], + }; + + if matches!(entry, Some(CachedMetadataFsEntry::Dir)) { + // try to resolve at the index file + if resolution_kind.is_types() { + probe_paths.push(( + path.join("index.ts"), + SloppyImportsResolutionReason::Directory, + )); + + probe_paths.push(( + path.join("index.mts"), + SloppyImportsResolutionReason::Directory, + )); + probe_paths.push(( + path.join("index.d.ts"), + SloppyImportsResolutionReason::Directory, + )); + probe_paths.push(( + path.join("index.d.mts"), + SloppyImportsResolutionReason::Directory, + )); + probe_paths.push(( + path.join("index.js"), + SloppyImportsResolutionReason::Directory, + )); + probe_paths.push(( + path.join("index.mjs"), + SloppyImportsResolutionReason::Directory, + )); + probe_paths.push(( + path.join("index.tsx"), + SloppyImportsResolutionReason::Directory, + )); + probe_paths.push(( + path.join("index.jsx"), + SloppyImportsResolutionReason::Directory, + )); + } else { + probe_paths.push(( + path.join("index.ts"), + SloppyImportsResolutionReason::Directory, + )); + probe_paths.push(( + path.join("index.mts"), + SloppyImportsResolutionReason::Directory, + )); + probe_paths.push(( + path.join("index.tsx"), + SloppyImportsResolutionReason::Directory, + )); + probe_paths.push(( + path.join("index.js"), + SloppyImportsResolutionReason::Directory, + )); + probe_paths.push(( + path.join("index.mjs"), + SloppyImportsResolutionReason::Directory, + )); + probe_paths.push(( + path.join("index.jsx"), + SloppyImportsResolutionReason::Directory, + )); + } + } + if probe_paths.is_empty() { + return None; + } + probe_paths + } + }; + + for (probe_path, reason) in probe_paths { + if self.fs.is_file(&probe_path) { + if let Ok(specifier) = url_from_file_path(&probe_path) { + return Some((specifier, reason)); + } + } + } + + None + } +} + +pub fn sloppy_imports_resolve( + specifier: &Url, + resolution_kind: ResolutionKind, + sys: TSys, +) -> Option<(Url, SloppyImportsResolutionReason)> { + SloppyImportsResolver::new( + CachedMetadataFs::new(sys, FsCacheOptions::Enabled), + SloppyImportsOptions::Enabled, + ) + .resolve(specifier, resolution_kind) +} + +#[allow(clippy::disallowed_types)] +type SloppyImportsResolverRc = + crate::sync::MaybeArc>; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum CompilerOptionsRootDirsDiagnostic { + InvalidType(Url), + InvalidEntryType(Url, usize), + UnexpectedError(Url, String), + UnexpectedEntryError(Url, usize, String), +} + +impl fmt::Display for CompilerOptionsRootDirsDiagnostic { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::InvalidType(s) => write!(f, "Invalid value for \"compilerOptions.rootDirs\" (\"{s}\"). Expected a string."), + Self::InvalidEntryType(s, i) => write!(f, "Invalid value for \"compilerOptions.rootDirs[{i}]\" (\"{s}\"). Expected a string."), + Self::UnexpectedError(s, message) => write!(f, "Unexpected error while parsing \"compilerOptions.rootDirs\" (\"{s}\"): {message}"), + Self::UnexpectedEntryError(s, i, message) => write!(f, "Unexpected error while parsing \"compilerOptions.rootDirs[{i}]\" (\"{s}\"): {message}"), + } + } +} + +#[derive(Debug)] +struct CompilerOptionsRootDirsResolver { + root_dirs_from_root: Vec, + root_dirs_by_member: BTreeMap>>, + diagnostics: Vec, + sloppy_imports_resolver: SloppyImportsResolverRc, +} + +impl CompilerOptionsRootDirsResolver { + fn from_workspace( + workspace: &Workspace, + sloppy_imports_resolver: SloppyImportsResolverRc, + ) -> Self { + let mut diagnostics: Vec = Vec::new(); + fn get_root_dirs( + config_file: &ConfigFile, + dir_url: &Url, + diagnostics: &mut Vec, + ) -> Option> { + let dir_path = url_to_file_path(dir_url) + .inspect_err(|err| { + diagnostics.push(CompilerOptionsRootDirsDiagnostic::UnexpectedError( + config_file.specifier.clone(), + err.to_string(), + )); + }) + .ok()?; + let root_dirs = config_file + .json + .compiler_options + .as_ref()? + .as_object()? + .get("rootDirs")? + .as_array(); + if root_dirs.is_none() { + diagnostics.push(CompilerOptionsRootDirsDiagnostic::InvalidType( + config_file.specifier.clone(), + )); + } + let root_dirs = root_dirs? + .iter() + .enumerate() + .filter_map(|(i, s)| { + let s = s.as_str(); + if s.is_none() { + diagnostics.push( + CompilerOptionsRootDirsDiagnostic::InvalidEntryType( + config_file.specifier.clone(), + i, + ), + ); + } + url_from_directory_path(&dir_path.join(s?)) + .inspect_err(|err| { + diagnostics.push( + CompilerOptionsRootDirsDiagnostic::UnexpectedEntryError( + config_file.specifier.clone(), + i, + err.to_string(), + ), + ); + }) + .ok() + }) + .collect(); + Some(root_dirs) + } + let root_deno_json = workspace.root_deno_json(); + let root_dirs_from_root = root_deno_json + .and_then(|c| { + let root_dir_url = c + .specifier + .join(".") + .inspect_err(|err| { + diagnostics.push( + CompilerOptionsRootDirsDiagnostic::UnexpectedError( + c.specifier.clone(), + err.to_string(), + ), + ); + }) + .ok()?; + get_root_dirs(c, &root_dir_url, &mut diagnostics) + }) + .unwrap_or_default(); + let root_dirs_by_member = workspace + .resolver_deno_jsons() + .filter_map(|c| { + if let Some(root_deno_json) = root_deno_json { + if c.specifier == root_deno_json.specifier { + return None; + } + } + let dir_url = c + .specifier + .join(".") + .inspect_err(|err| { + diagnostics.push( + CompilerOptionsRootDirsDiagnostic::UnexpectedError( + c.specifier.clone(), + err.to_string(), + ), + ); + }) + .ok()?; + let root_dirs = get_root_dirs(c, &dir_url, &mut diagnostics); + Some((dir_url, root_dirs)) + }) + .collect(); + Self { + root_dirs_from_root, + root_dirs_by_member, + diagnostics, + sloppy_imports_resolver, + } + } + + fn new_raw( + root_dirs_from_root: Vec, + root_dirs_by_member: BTreeMap>>, + sloppy_imports_resolver: SloppyImportsResolverRc, + ) -> Self { + Self { + root_dirs_from_root, + root_dirs_by_member, + diagnostics: Default::default(), + sloppy_imports_resolver, + } + } + + fn resolve_types( + &self, + specifier: &Url, + referrer: &Url, + ) -> Option<(Url, Option)> { + if specifier.scheme() != "file" || referrer.scheme() != "file" { + return None; + } + let root_dirs = self + .root_dirs_by_member + .iter() + .rfind(|(s, _)| referrer.as_str().starts_with(s.as_str())) + .and_then(|(_, r)| r.as_ref()) + .unwrap_or(&self.root_dirs_from_root); + let (matched_root_dir, suffix) = root_dirs + .iter() + .filter_map(|r| { + let suffix = specifier.as_str().strip_prefix(r.as_str())?; + Some((r, suffix)) + }) + .max_by_key(|(r, _)| r.as_str().len())?; + for root_dir in root_dirs { + if root_dir == matched_root_dir { + continue; + } + let Ok(candidate_specifier) = root_dir.join(suffix) else { + continue; + }; + let Ok(candidate_path) = url_to_file_path(&candidate_specifier) else { + continue; + }; + if self.sloppy_imports_resolver.fs.is_file(&candidate_path) { + return Some((candidate_specifier, None)); + } else if let Some((candidate_specifier, sloppy_reason)) = self + .sloppy_imports_resolver + .resolve(&candidate_specifier, ResolutionKind::Types) + { + return Some((candidate_specifier, Some(sloppy_reason))); + } + } + None + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ResolutionKind { + /// Resolving for code that will be executed. + Execution, + /// Resolving for code that will be used for type information. + Types, +} + +impl ResolutionKind { + pub fn is_types(&self) -> bool { + *self == ResolutionKind::Types + } +} + +impl From for ResolutionKind { + fn from(value: NodeResolutionKind) -> Self { + match value { + NodeResolutionKind::Execution => Self::Execution, + NodeResolutionKind::Types => Self::Types, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum WorkspaceResolverDiagnostic<'a> { + ImportMap(&'a ImportMapDiagnostic), + CompilerOptionsRootDirs(&'a CompilerOptionsRootDirsDiagnostic), +} + +impl fmt::Display for WorkspaceResolverDiagnostic<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::ImportMap(d) => write!(f, "Import map: {d}"), + Self::CompilerOptionsRootDirs(d) => d.fmt(f), + } + } +} + +#[derive(Debug)] +pub struct WorkspaceResolver { + workspace_root: UrlRc, + jsr_pkgs: Vec, + maybe_import_map: Option, + pkg_jsons: BTreeMap, + pkg_json_dep_resolution: PackageJsonDepResolution, + sloppy_imports_options: SloppyImportsOptions, + fs_cache_options: FsCacheOptions, + sloppy_imports_resolver: SloppyImportsResolverRc, + compiler_options_root_dirs_resolver: CompilerOptionsRootDirsResolver, +} + +impl WorkspaceResolver { + pub fn from_workspace( + workspace: &Workspace, + sys: TSys, + options: CreateResolverOptions, + ) -> Result { + fn resolve_import_map( + sys: &impl FsRead, + workspace: &Workspace, + specified_import_map: Option, + ) -> Result, WorkspaceResolverCreateError> + { + let root_deno_json = workspace.root_deno_json(); + let deno_jsons = workspace.resolver_deno_jsons().collect::>(); + + let (import_map_url, import_map) = match specified_import_map { + Some(SpecifiedImportMap { + base_url, + value: import_map, + }) => (base_url, import_map), + None => { + if !deno_jsons.iter().any(|p| p.is_package()) + && !deno_jsons.iter().any(|c| { + c.json.import_map.is_some() + || c.json.scopes.is_some() + || c.json.imports.is_some() + || c + .json + .compiler_options + .as_ref() + .and_then(|v| v.as_object()?.get("rootDirs")?.as_array()) + .is_some_and(|a| a.len() > 1) + }) + { + // no configs have an import map and none are a package, so exit + return Ok(None); + } + + let config_specified_import_map = match root_deno_json.as_ref() { + Some(deno_json) => deno_json + .to_import_map_value(sys) + .map_err(|source| WorkspaceResolverCreateError::ImportMapFetch { + referrer: deno_json.specifier.clone(), + source: Box::new(source), + })? + .unwrap_or_else(|| { + ( + Cow::Borrowed(&deno_json.specifier), + serde_json::Value::Object(Default::default()), + ) + }), + None => ( + Cow::Owned(workspace.root_dir().join("deno.json").unwrap()), + serde_json::Value::Object(Default::default()), + ), + }; + let base_import_map_config = import_map::ext::ImportMapConfig { + base_url: config_specified_import_map.0.into_owned(), + import_map_value: config_specified_import_map.1, + }; + let child_import_map_configs = deno_jsons + .iter() + .filter(|f| { + Some(&f.specifier) + != root_deno_json.as_ref().map(|c| &c.specifier) + }) + .map(|config| import_map::ext::ImportMapConfig { + base_url: config.specifier.clone(), + import_map_value: { + // don't include scopes here + let mut value = serde_json::Map::with_capacity(1); + if let Some(imports) = &config.json.imports { + value.insert("imports".to_string(), imports.clone()); + } + value.into() + }, + }) + .collect::>(); + let (import_map_url, import_map) = + ::import_map::ext::create_synthetic_import_map( + base_import_map_config, + child_import_map_configs, + ); + let import_map = import_map::ext::expand_import_map_value(import_map); + log::debug!( + "Workspace config generated this import map {}", + serde_json::to_string_pretty(&import_map).unwrap() + ); + (import_map_url, import_map) + } + }; + Ok(Some(import_map::parse_from_value( + import_map_url, + import_map, + )?)) + } + + let maybe_import_map = + resolve_import_map(&sys, workspace, options.specified_import_map)?; + let jsr_pkgs = workspace.resolver_jsr_pkgs().collect::>(); + let pkg_jsons = workspace + .resolver_pkg_jsons() + .map(|(dir_url, pkg_json)| { + let deps = pkg_json.resolve_local_package_json_deps(); + ( + dir_url.clone(), + PkgJsonResolverFolderConfig { + deps: deps.clone(), + pkg_json: pkg_json.clone(), + }, + ) + }) + .collect::>(); + + let fs = CachedMetadataFs::new(sys, options.fs_cache_options); + let sloppy_imports_resolver = new_rc(SloppyImportsResolver::new( + fs, + options.sloppy_imports_options, + )); + let compiler_options_root_dirs_resolver = + CompilerOptionsRootDirsResolver::from_workspace( + workspace, + sloppy_imports_resolver.clone(), + ); + + Ok(Self { + workspace_root: workspace.root_dir().clone(), + pkg_json_dep_resolution: options.pkg_json_dep_resolution, + jsr_pkgs, + maybe_import_map, + pkg_jsons, + sloppy_imports_options: options.sloppy_imports_options, + fs_cache_options: options.fs_cache_options, + sloppy_imports_resolver, + compiler_options_root_dirs_resolver, + }) + } + + /// Creates a new WorkspaceResolver from the specified import map and package.jsons. + /// + /// Generally, create this from a Workspace instead. + #[allow(clippy::too_many_arguments)] + pub fn new_raw( + workspace_root: UrlRc, + maybe_import_map: Option, + jsr_pkgs: Vec, + pkg_jsons: Vec, + pkg_json_dep_resolution: PackageJsonDepResolution, + sloppy_imports_options: SloppyImportsOptions, + fs_cache_options: FsCacheOptions, + root_dirs_from_root: Vec, + root_dirs_by_member: BTreeMap>>, + sys: TSys, + ) -> Self { + let maybe_import_map = + maybe_import_map.map(|import_map| ImportMapWithDiagnostics { + import_map, + diagnostics: Default::default(), + }); + let pkg_jsons = pkg_jsons + .into_iter() + .map(|pkg_json| { + let deps = pkg_json.resolve_local_package_json_deps(); + ( + new_rc( + url_from_directory_path(pkg_json.path.parent().unwrap()).unwrap(), + ), + PkgJsonResolverFolderConfig { + deps: deps.clone(), + pkg_json, + }, + ) + }) + .collect::>(); + let fs = CachedMetadataFs::new(sys, fs_cache_options); + let sloppy_imports_resolver = + new_rc(SloppyImportsResolver::new(fs, sloppy_imports_options)); + let compiler_options_root_dirs_resolver = + CompilerOptionsRootDirsResolver::new_raw( + root_dirs_from_root, + root_dirs_by_member, + sloppy_imports_resolver.clone(), + ); + Self { + workspace_root, + jsr_pkgs, + maybe_import_map, + pkg_jsons, + pkg_json_dep_resolution, + sloppy_imports_options, + fs_cache_options, + sloppy_imports_resolver, + compiler_options_root_dirs_resolver, + } + } + + /// Prepare the workspace resolver for serialization + /// + /// The most significant preparation involves converting + /// absolute paths into relative (based on `root_dir_url`). + /// It also takes care of pre-serializing non-serde internal data. + pub fn to_serializable( + &self, + root_dir_url: &Url, + ) -> SerializableWorkspaceResolver { + let root_dir_url = BaseUrl(root_dir_url); + SerializableWorkspaceResolver { + import_map: self.maybe_import_map().map(|i| { + SerializedWorkspaceResolverImportMap { + specifier: root_dir_url.make_relative_if_descendant(i.base_url()), + json: Cow::Owned(i.to_json()), + } + }), + jsr_pkgs: self + .jsr_packages() + .map(|pkg| SerializedResolverWorkspaceJsrPackage { + relative_base: root_dir_url.make_relative_if_descendant(&pkg.base), + name: Cow::Borrowed(&pkg.name), + version: Cow::Borrowed(&pkg.version), + exports: Cow::Borrowed(&pkg.exports), + }) + .collect(), + package_jsons: self + .package_jsons() + .map(|pkg_json| { + ( + root_dir_url + .make_relative_if_descendant(&pkg_json.specifier()) + .into_owned(), + serde_json::to_value(pkg_json).unwrap(), + ) + }) + .collect(), + pkg_json_resolution: self.pkg_json_dep_resolution(), + sloppy_imports_options: self.sloppy_imports_options, + fs_cache_options: self.fs_cache_options, + root_dirs_from_root: self + .compiler_options_root_dirs_resolver + .root_dirs_from_root + .iter() + .map(|s| root_dir_url.make_relative_if_descendant(s)) + .collect(), + root_dirs_by_member: self + .compiler_options_root_dirs_resolver + .root_dirs_by_member + .iter() + .map(|(s, r)| { + ( + root_dir_url.make_relative_if_descendant(s), + r.as_ref().map(|r| { + r.iter() + .map(|s| root_dir_url.make_relative_if_descendant(s)) + .collect() + }), + ) + }) + .collect(), + } + } + + /// Deserialize a `WorkspaceResolver` + /// + /// Deserialization of `WorkspaceResolver`s is made in two steps. First + /// the serialized data must be deserialized in to `SerializableWorkspaceResolver` + /// (usually with serde), and then this method converts it into a `WorkspaceResolver`. + /// + /// This second step involves mainly converting the relative paths within + /// `SerializableWorkspaceResolver` into absolute paths using `root_dir_url`. + pub fn try_from_serializable( + root_dir_url: Url, + serializable_workspace_resolver: SerializableWorkspaceResolver, + sys: TSys, + ) -> Result { + let import_map = match serializable_workspace_resolver.import_map { + Some(import_map) => Some( + import_map::parse_from_json_with_options( + root_dir_url.join(&import_map.specifier).unwrap(), + &import_map.json, + import_map::ImportMapOptions { + address_hook: None, + expand_imports: true, + }, + )? + .import_map, + ), + None => None, + }; + let pkg_jsons = serializable_workspace_resolver + .package_jsons + .into_iter() + .map(|(relative_path, json)| { + let path = + url_to_file_path(&root_dir_url.join(&relative_path).unwrap()) + .unwrap(); + let pkg_json = + deno_package_json::PackageJson::load_from_value(path, json); + PackageJsonRc::new(pkg_json) + }) + .collect(); + let jsr_packages = serializable_workspace_resolver + .jsr_pkgs + .into_iter() + .map(|pkg| ResolverWorkspaceJsrPackage { + is_patch: false, // only used for enhancing the diagnostics, which are discarded when serializing + base: root_dir_url.join(&pkg.relative_base).unwrap(), + name: pkg.name.into_owned(), + version: pkg.version.into_owned(), + exports: pkg.exports.into_owned(), + }) + .collect(); + let root_dirs_from_root = serializable_workspace_resolver + .root_dirs_from_root + .iter() + .map(|s| root_dir_url.join(s).unwrap()) + .collect(); + let root_dirs_by_member = serializable_workspace_resolver + .root_dirs_by_member + .iter() + .map(|(s, r)| { + ( + root_dir_url.join(s).unwrap(), + r.as_ref() + .map(|r| r.iter().map(|s| root_dir_url.join(s).unwrap()).collect()), + ) + }) + .collect(); + Ok(Self::new_raw( + UrlRc::new(root_dir_url), + import_map, + jsr_packages, + pkg_jsons, + serializable_workspace_resolver.pkg_json_resolution, + serializable_workspace_resolver.sloppy_imports_options, + serializable_workspace_resolver.fs_cache_options, + root_dirs_from_root, + root_dirs_by_member, + sys, + )) + } + + pub fn maybe_import_map(&self) -> Option<&ImportMap> { + self.maybe_import_map.as_ref().map(|c| &c.import_map) + } + + pub fn package_jsons(&self) -> impl Iterator { + self.pkg_jsons.values().map(|c| &c.pkg_json) + } + + pub fn jsr_packages( + &self, + ) -> impl Iterator { + self.jsr_pkgs.iter() + } + + pub fn diagnostics(&self) -> Vec> { + self + .compiler_options_root_dirs_resolver + .diagnostics + .iter() + .map(WorkspaceResolverDiagnostic::CompilerOptionsRootDirs) + .chain( + self + .maybe_import_map + .as_ref() + .iter() + .flat_map(|c| &c.diagnostics) + .map(WorkspaceResolverDiagnostic::ImportMap), + ) + .collect() + } + + pub fn resolve<'a>( + &'a self, + specifier: &str, + referrer: &Url, + resolution_kind: ResolutionKind, + ) -> Result, MappedResolutionError> { + // 1. Attempt to resolve with the import map and normally first + let mut used_import_map = false; + let resolve_result = if let Some(import_map) = &self.maybe_import_map { + used_import_map = true; + import_map + .import_map + .resolve(specifier, referrer) + .map_err(MappedResolutionError::ImportMap) + } else { + import_map::specifier::resolve_import(specifier, referrer) + .map_err(MappedResolutionError::Specifier) + }; + let resolve_error = match resolve_result { + Ok(mut specifier) => { + let mut used_compiler_options_root_dirs = false; + let mut sloppy_reason = None; + if let Some((probed_specifier, probed_sloppy_reason)) = self + .sloppy_imports_resolver + .resolve(&specifier, resolution_kind) + { + specifier = probed_specifier; + sloppy_reason = Some(probed_sloppy_reason); + } else if resolution_kind.is_types() { + if let Some((probed_specifier, probed_sloppy_reason)) = self + .compiler_options_root_dirs_resolver + .resolve_types(&specifier, referrer) + { + used_compiler_options_root_dirs = true; + specifier = probed_specifier; + sloppy_reason = probed_sloppy_reason; + } + } + return self.maybe_resolve_specifier_to_workspace_jsr_pkg( + MappedResolution::Normal { + specifier, + used_import_map, + used_compiler_options_root_dirs, + sloppy_reason, + maybe_diagnostic: None, + }, + ); + } + Err(err) => err, + }; + + // 2. Try to resolve the bare specifier to a workspace member + if resolve_error.is_unmapped_bare_specifier() { + for member in &self.jsr_pkgs { + if let Some(path) = specifier.strip_prefix(&member.name) { + if path.is_empty() || path.starts_with('/') { + let path = path.strip_prefix('/').unwrap_or(path); + let pkg_req_ref = match JsrPackageReqReference::from_str(&format!( + "jsr:{}{}/{}", + member.name, + member + .version + .as_ref() + .map(|v| format!("@^{}", v)) + .unwrap_or_else(String::new), + path + )) { + Ok(pkg_req_ref) => pkg_req_ref, + Err(_) => { + // Ignore the error as it will be surfaced as a diagnostic + // in workspace.diagnostics() routine. + continue; + } + }; + return self.resolve_workspace_jsr_pkg(member, pkg_req_ref); + } + } + } + } + + if self.pkg_json_dep_resolution == PackageJsonDepResolution::Enabled { + // 2. Attempt to resolve from the package.json dependencies. + let mut previously_found_dir = false; + for (dir_url, pkg_json_folder) in self.pkg_jsons.iter().rev() { + if !referrer.as_str().starts_with(dir_url.as_str()) { + if previously_found_dir { + break; + } else { + continue; + } + } + previously_found_dir = true; + + for (bare_specifier, dep_result) in pkg_json_folder + .deps + .dependencies + .iter() + .chain(pkg_json_folder.deps.dev_dependencies.iter()) + { + if let Some(path) = specifier.strip_prefix(bare_specifier.as_str()) { + if path.is_empty() || path.starts_with('/') { + let sub_path = path.strip_prefix('/').unwrap_or(path); + return Ok(MappedResolution::PackageJson { + pkg_json: &pkg_json_folder.pkg_json, + alias: bare_specifier, + sub_path: if sub_path.is_empty() { + None + } else { + Some(sub_path.to_string()) + }, + dep_result, + }); + } + } + } + } + + // 3. Finally try to resolve to a workspace npm package if inside the workspace. + if referrer.as_str().starts_with(self.workspace_root.as_str()) { + for pkg_json_folder in self.pkg_jsons.values() { + let Some(name) = &pkg_json_folder.pkg_json.name else { + continue; + }; + let Some(path) = specifier.strip_prefix(name) else { + continue; + }; + if path.is_empty() || path.starts_with('/') { + let sub_path = path.strip_prefix('/').unwrap_or(path); + return Ok(MappedResolution::WorkspaceNpmPackage { + target_pkg_json: &pkg_json_folder.pkg_json, + pkg_name: name, + sub_path: if sub_path.is_empty() { + None + } else { + Some(sub_path.to_string()) + }, + }); + } + } + } + } + + // wasn't found, so surface the initial resolve error + Err(resolve_error) + } + + fn maybe_resolve_specifier_to_workspace_jsr_pkg<'a>( + &'a self, + resolution: MappedResolution<'a>, + ) -> Result, MappedResolutionError> { + let specifier = match resolution { + MappedResolution::Normal { ref specifier, .. } => specifier, + _ => return Ok(resolution), + }; + if specifier.scheme() != "jsr" { + return Ok(resolution); + } + let mut maybe_diagnostic = None; + if let Ok(package_req_ref) = + JsrPackageReqReference::from_specifier(specifier) + { + for pkg in &self.jsr_pkgs { + if pkg.name == package_req_ref.req().name { + if let Some(version) = &pkg.version { + if package_req_ref.req().version_req.matches(version) { + return self.resolve_workspace_jsr_pkg(pkg, package_req_ref); + } else { + maybe_diagnostic = Some(Box::new( + MappedResolutionDiagnostic::ConstraintNotMatchedLocalVersion { + is_patch: pkg.is_patch, + reference: package_req_ref.clone(), + local_version: version.clone(), + }, + )); + } + } else { + // always resolve to workspace packages with no version + return self.resolve_workspace_jsr_pkg(pkg, package_req_ref); + } + } + } + } + Ok(match resolution { + MappedResolution::Normal { + specifier, + used_import_map, + used_compiler_options_root_dirs, + sloppy_reason, + .. + } => MappedResolution::Normal { + specifier, + used_import_map, + used_compiler_options_root_dirs, + sloppy_reason, + maybe_diagnostic, + }, + _ => return Ok(resolution), + }) + } + + fn resolve_workspace_jsr_pkg<'a>( + &'a self, + pkg: &'a ResolverWorkspaceJsrPackage, + pkg_req_ref: JsrPackageReqReference, + ) -> Result, MappedResolutionError> { + let export_name = pkg_req_ref.export_name(); + match pkg.exports.get(export_name.as_ref()) { + Some(sub_path) => match pkg.base.join(sub_path) { + Ok(specifier) => Ok(MappedResolution::WorkspaceJsrPackage { + specifier, + pkg_req_ref, + }), + Err(err) => Err( + WorkspaceResolveError::InvalidExportPath { + base: pkg.base.clone(), + sub_path: sub_path.to_string(), + error: err, + } + .into(), + ), + }, + None => Err( + WorkspaceResolveError::UnknownExport { + package_name: pkg.name.clone(), + export_name: export_name.to_string(), + exports: pkg.exports.keys().cloned().collect(), + } + .into(), + ), + } + } + + pub fn resolve_workspace_pkg_json_folder_for_npm_specifier( + &self, + pkg_req: &PackageReq, + ) -> Option<&Path> { + if pkg_req.version_req.tag().is_some() { + return None; + } + + self + .resolve_workspace_pkg_json_folder_for_pkg_json_dep( + &pkg_req.name, + &PackageJsonDepWorkspaceReq::VersionReq(pkg_req.version_req.clone()), + ) + .ok() + } + + pub fn resolve_workspace_pkg_json_folder_for_pkg_json_dep( + &self, + name: &str, + workspace_version_req: &PackageJsonDepWorkspaceReq, + ) -> Result<&Path, WorkspaceResolvePkgJsonFolderError> { + // this is not conditional on pkg_json_dep_resolution because we want + // to be able to do this resolution to figure out mapping an npm specifier + // to a workspace folder when using BYONM + let pkg_json = self + .package_jsons() + .find(|p| p.name.as_deref() == Some(name)); + let Some(pkg_json) = pkg_json else { + return Err( + WorkspaceResolvePkgJsonFolderErrorKind::NotFound(name.to_string()) + .into(), + ); + }; + match workspace_version_req { + PackageJsonDepWorkspaceReq::VersionReq(version_req) => { + match version_req.inner() { + RangeSetOrTag::RangeSet(set) => { + if let Some(version) = pkg_json + .version + .as_ref() + .and_then(|v| Version::parse_from_npm(v).ok()) + { + if set.satisfies(&version) { + Ok(pkg_json.dir_path()) + } else { + Err( + WorkspaceResolvePkgJsonFolderErrorKind::VersionNotSatisfied( + version_req.clone(), + version, + ) + .into(), + ) + } + } else { + // just match it + Ok(pkg_json.dir_path()) + } + } + RangeSetOrTag::Tag(_) => { + // always match tags + Ok(pkg_json.dir_path()) + } + } + } + PackageJsonDepWorkspaceReq::Tilde | PackageJsonDepWorkspaceReq::Caret => { + // always match tilde and caret requirements + Ok(pkg_json.dir_path()) + } + } + } + + pub fn pkg_json_dep_resolution(&self) -> PackageJsonDepResolution { + self.pkg_json_dep_resolution + } + + pub fn sloppy_imports_enabled(&self) -> bool { + match self.sloppy_imports_options { + SloppyImportsOptions::Enabled => true, + SloppyImportsOptions::Disabled => false, + } + } + + pub fn has_compiler_options_root_dirs(&self) -> bool { + !self + .compiler_options_root_dirs_resolver + .root_dirs_from_root + .is_empty() + || self + .compiler_options_root_dirs_resolver + .root_dirs_by_member + .values() + .flatten() + .any(|r| !r.is_empty()) + } +} + +#[derive(Deserialize, Serialize)] +pub struct SerializedWorkspaceResolverImportMap<'a> { + #[serde(borrow)] + pub specifier: Cow<'a, str>, + #[serde(borrow)] + pub json: Cow<'a, str>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct SerializedResolverWorkspaceJsrPackage<'a> { + #[serde(borrow)] + pub relative_base: Cow<'a, str>, + #[serde(borrow)] + pub name: Cow<'a, str>, + pub version: Cow<'a, Option>, + pub exports: Cow<'a, IndexMap>, +} + +#[derive(Deserialize, Serialize)] +pub struct SerializableWorkspaceResolver<'a> { + #[serde(borrow)] + pub import_map: Option>, + #[serde(borrow)] + pub jsr_pkgs: Vec>, + pub package_jsons: Vec<(String, serde_json::Value)>, + pub pkg_json_resolution: PackageJsonDepResolution, + pub sloppy_imports_options: SloppyImportsOptions, + pub fs_cache_options: FsCacheOptions, + pub root_dirs_from_root: Vec>, + pub root_dirs_by_member: BTreeMap, Option>>>, +} + +#[derive(Debug, Clone, Copy)] +struct BaseUrl<'a>(&'a Url); + +impl BaseUrl<'_> { + fn make_relative_if_descendant<'a>(&self, target: &'a Url) -> Cow<'a, str> { + if target.scheme() != "file" { + return Cow::Borrowed(target.as_str()); + } + + match self.0.make_relative(target) { + Some(relative) => { + if relative.starts_with("../") { + Cow::Borrowed(target.as_str()) + } else { + Cow::Owned(relative) + } + } + None => Cow::Borrowed(target.as_str()), + } + } +} + +#[cfg(test)] +mod test { + use std::path::Path; + use std::path::PathBuf; + + use deno_config::workspace::WorkspaceDirectory; + use deno_config::workspace::WorkspaceDiscoverOptions; + use deno_config::workspace::WorkspaceDiscoverStart; + use deno_path_util::url_from_directory_path; + use deno_path_util::url_from_file_path; + use deno_semver::VersionReq; + use serde_json::json; + use sys_traits::impls::InMemorySys; + use sys_traits::FsCanonicalize; + use url::Url; + + use super::*; + + pub struct UnreachableSys; + + impl sys_traits::BaseFsMetadata for UnreachableSys { + type Metadata = sys_traits::impls::RealFsMetadata; + + #[doc(hidden)] + fn base_fs_metadata( + &self, + _path: &Path, + ) -> std::io::Result { + unreachable!() + } + + #[doc(hidden)] + fn base_fs_symlink_metadata( + &self, + _path: &Path, + ) -> std::io::Result { + unreachable!() + } + } + + impl sys_traits::BaseFsRead for UnreachableSys { + fn base_fs_read( + &self, + _path: &Path, + ) -> std::io::Result> { + unreachable!() + } + } + + fn root_dir() -> PathBuf { + if cfg!(windows) { + PathBuf::from("C:\\Users\\user") + } else { + PathBuf::from("/home/user") + } + } + + #[test] + fn pkg_json_resolution() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": [ + "a", + "b", + "c", + ] + }), + ); + sys.fs_insert_json( + root_dir().join("a/deno.json"), + json!({ + "imports": { + "b": "./index.js", + }, + }), + ); + sys.fs_insert_json( + root_dir().join("b/package.json"), + json!({ + "dependencies": { + "pkg": "npm:pkg@^1.0.0", + }, + }), + ); + sys.fs_insert_json( + root_dir().join("c/package.json"), + json!({ + "name": "pkg", + "version": "0.5.0" + }), + ); + let workspace = workspace_at_start_dir(&sys, &root_dir()); + let resolver = create_resolver(&workspace); + assert_eq!(resolver.diagnostics(), Vec::new()); + let resolve = |name: &str, referrer: &str| { + resolver.resolve( + name, + &url_from_file_path(&deno_path_util::normalize_path( + root_dir().join(referrer), + )) + .unwrap(), + ResolutionKind::Execution, + ) + }; + match resolve("pkg", "b/index.js").unwrap() { + MappedResolution::PackageJson { + alias, + sub_path, + dep_result, + .. + } => { + assert_eq!(alias, "pkg"); + assert_eq!(sub_path, None); + dep_result.as_ref().unwrap(); + } + value => unreachable!("{:?}", value), + } + match resolve("pkg/sub-path", "b/index.js").unwrap() { + MappedResolution::PackageJson { + alias, + sub_path, + dep_result, + .. + } => { + assert_eq!(alias, "pkg"); + assert_eq!(sub_path.unwrap(), "sub-path"); + dep_result.as_ref().unwrap(); + } + value => unreachable!("{:?}", value), + } + + // pkg is not a dependency in this folder, so it should resolve + // to the workspace member + match resolve("pkg", "index.js").unwrap() { + MappedResolution::WorkspaceNpmPackage { + pkg_name, + sub_path, + target_pkg_json, + } => { + assert_eq!(pkg_name, "pkg"); + assert_eq!(sub_path, None); + assert_eq!(target_pkg_json.dir_path(), root_dir().join("c")); + } + _ => unreachable!(), + } + match resolve("pkg/sub-path", "index.js").unwrap() { + MappedResolution::WorkspaceNpmPackage { + pkg_name, + sub_path, + target_pkg_json, + } => { + assert_eq!(pkg_name, "pkg"); + assert_eq!(sub_path.unwrap(), "sub-path"); + assert_eq!(target_pkg_json.dir_path(), root_dir().join("c")); + } + _ => unreachable!(), + } + + // won't resolve the package outside the workspace + assert!(resolve("pkg", "../outside-workspace.js").is_err()); + } + + #[test] + fn single_pkg_no_import_map() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "name": "@scope/pkg", + "version": "1.0.0", + "exports": "./mod.ts" + }), + ); + let workspace = workspace_at_start_dir(&sys, &root_dir()); + let resolver = create_resolver(&workspace); + assert_eq!(resolver.diagnostics(), Vec::new()); + let result = resolver + .resolve( + "@scope/pkg", + &url_from_file_path(&root_dir().join("file.ts")).unwrap(), + ResolutionKind::Execution, + ) + .unwrap(); + match result { + MappedResolution::WorkspaceJsrPackage { specifier, .. } => { + assert_eq!( + specifier, + url_from_file_path(&root_dir().join("mod.ts")).unwrap() + ); + } + _ => unreachable!(), + } + } + + #[test] + fn resolve_workspace_pkg_json_folder() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": [ + "a", + "b", + "no-version" + ] + }), + ); + sys.fs_insert_json( + root_dir().join("a/package.json"), + json!({ + "name": "@scope/a", + "version": "1.0.0", + }), + ); + sys.fs_insert_json( + root_dir().join("b/package.json"), + json!({ + "name": "@scope/b", + "version": "2.0.0", + }), + ); + sys.fs_insert_json( + root_dir().join("no-version/package.json"), + json!({ + "name": "@scope/no-version", + }), + ); + let workspace = workspace_at_start_dir(&sys, &root_dir()); + let resolver = create_resolver(&workspace); + // resolve for pkg json dep + { + let resolve = |name: &str, req: &str| { + resolver.resolve_workspace_pkg_json_folder_for_pkg_json_dep( + name, + &PackageJsonDepWorkspaceReq::VersionReq( + VersionReq::parse_from_npm(req).unwrap(), + ), + ) + }; + assert_eq!( + resolve("non-existent", "*").map_err(|e| e.into_kind()), + Err(WorkspaceResolvePkgJsonFolderErrorKind::NotFound( + "non-existent".to_string() + )) + ); + assert_eq!( + resolve("@scope/a", "6").map_err(|e| e.into_kind()), + Err(WorkspaceResolvePkgJsonFolderErrorKind::VersionNotSatisfied( + VersionReq::parse_from_npm("6").unwrap(), + Version::parse_from_npm("1.0.0").unwrap(), + )) + ); + assert_eq!(resolve("@scope/a", "1").unwrap(), root_dir().join("a")); + assert_eq!(resolve("@scope/a", "*").unwrap(), root_dir().join("a")); + assert_eq!( + resolve("@scope/a", "workspace").unwrap(), + root_dir().join("a") + ); + assert_eq!(resolve("@scope/b", "2").unwrap(), root_dir().join("b")); + // just match any tags with the workspace + assert_eq!(resolve("@scope/a", "latest").unwrap(), root_dir().join("a")); + + // match any version for a pkg with no version + assert_eq!( + resolve("@scope/no-version", "1").unwrap(), + root_dir().join("no-version") + ); + assert_eq!( + resolve("@scope/no-version", "20").unwrap(), + root_dir().join("no-version") + ); + } + // resolve for specifier + { + let resolve = |pkg_req: &str| { + resolver.resolve_workspace_pkg_json_folder_for_npm_specifier( + &PackageReq::from_str(pkg_req).unwrap(), + ) + }; + assert_eq!(resolve("non-existent@*"), None); + assert_eq!( + resolve("@scope/no-version@1").unwrap(), + root_dir().join("no-version") + ); + + // won't match for tags + assert_eq!(resolve("@scope/a@workspace"), None); + assert_eq!(resolve("@scope/a@latest"), None); + } + } + + #[test] + fn resolve_workspace_pkg_json_workspace_deno_json_import_map() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["*"] + }), + ); + sys.fs_insert_json( + root_dir().join("a/package.json"), + json!({ + "name": "@scope/a", + "version": "1.0.0", + }), + ); + sys.fs_insert_json( + root_dir().join("a/deno.json"), + json!({ + "name": "@scope/jsr-pkg", + "version": "1.0.0", + "exports": "./mod.ts" + }), + ); + + let workspace = workspace_at_start_dir(&sys, &root_dir()); + let resolver = create_resolver(&workspace); + { + let resolution = resolver + .resolve( + "@scope/jsr-pkg", + &url_from_file_path(&root_dir().join("b.ts")).unwrap(), + ResolutionKind::Execution, + ) + .unwrap(); + match resolution { + MappedResolution::WorkspaceJsrPackage { specifier, .. } => { + assert_eq!( + specifier, + url_from_file_path(&root_dir().join("a/mod.ts")).unwrap() + ); + } + _ => unreachable!(), + } + } + { + let resolution_err = resolver + .resolve( + "@scope/jsr-pkg/not-found-export", + &url_from_file_path(&root_dir().join("b.ts")).unwrap(), + ResolutionKind::Execution, + ) + .unwrap_err(); + match resolution_err { + MappedResolutionError::Workspace( + WorkspaceResolveError::UnknownExport { + package_name, + export_name, + exports, + }, + ) => { + assert_eq!(package_name, "@scope/jsr-pkg"); + assert_eq!(export_name, "./not-found-export"); + assert_eq!(exports, vec!["."]); + } + _ => unreachable!(), + } + } + } + + #[test] + fn root_member_imports_and_scopes() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["member"], + "imports": { + "@scope/pkg": "jsr:@scope/pkg@1", + }, + "scopes": { + "https://deno.land/x/": { + "@scope/pkg": "jsr:@scope/pkg@2", + }, + }, + }), + ); + // Overrides `rootDirs` from workspace root. + sys.fs_insert_json( + root_dir().join("member/deno.json"), + json!({ + "imports": { + "@scope/pkg": "jsr:@scope/pkg@3", + }, + // will ignore this scopes because it's not in the root + "scopes": { + "https://deno.land/x/other": { + "@scope/pkg": "jsr:@scope/pkg@4", + }, + }, + }), + ); + + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + let resolver = WorkspaceResolver::from_workspace( + &workspace_dir.workspace, + sys.clone(), + super::CreateResolverOptions { + pkg_json_dep_resolution: PackageJsonDepResolution::Enabled, + specified_import_map: None, + sloppy_imports_options: SloppyImportsOptions::Disabled, + fs_cache_options: FsCacheOptions::Enabled, + }, + ) + .unwrap(); + assert_eq!( + serde_json::from_str::( + &resolver.maybe_import_map().unwrap().to_json() + ) + .unwrap(), + json!({ + "imports": { + "@scope/pkg": "jsr:@scope/pkg@1", + "@scope/pkg/": "jsr:/@scope/pkg@1/", + }, + "scopes": { + "https://deno.land/x/": { + "@scope/pkg": "jsr:@scope/pkg@2", + "@scope/pkg/": "jsr:/@scope/pkg@2/", + }, + "./member/": { + "@scope/pkg": "jsr:@scope/pkg@3", + "@scope/pkg/": "jsr:/@scope/pkg@3/", + }, + }, + }), + ); + } + + #[test] + fn resolve_sloppy_imports() { + let sys = InMemorySys::default(); + let root_url = url_from_file_path( + &sys_traits::impls::RealSys.fs_canonicalize("/").unwrap(), + ) + .unwrap(); + let fs = CachedMetadataFs::new(sys.clone(), FsCacheOptions::Enabled); + let sloppy_imports_resolver = + SloppyImportsResolver::new(fs, SloppyImportsOptions::Enabled); + + // scenarios like resolving ./example.js to ./example.ts + for (file_from, file_to) in [ + ("file1.js", "file1.ts"), + ("file2.js", "file2.tsx"), + ("file3.mjs", "file3.mts"), + ] { + let specifier = root_url.join(file_to).unwrap(); + sys.fs_insert(url_to_file_path(&specifier).unwrap(), ""); + let sloppy_specifier = root_url.join(file_from).unwrap(); + assert_eq!( + sloppy_imports_resolver.resolve(&specifier, ResolutionKind::Execution), + None, + ); + assert_eq!( + sloppy_imports_resolver + .resolve(&sloppy_specifier, ResolutionKind::Execution), + Some((specifier, SloppyImportsResolutionReason::JsToTs)), + ); + } + + // no extension scenarios + for file in [ + "file10.js", + "file11.ts", + "file12.js", + "file13.tsx", + "file14.jsx", + "file15.mjs", + "file16.mts", + ] { + let specifier = root_url.join(file).unwrap(); + sys.fs_insert(url_to_file_path(&specifier).unwrap(), ""); + let sloppy_specifier = + root_url.join(file.split_once('.').unwrap().0).unwrap(); + assert_eq!( + sloppy_imports_resolver.resolve(&specifier, ResolutionKind::Execution), + None, + ); + assert_eq!( + sloppy_imports_resolver + .resolve(&sloppy_specifier, ResolutionKind::Execution), + Some((specifier, SloppyImportsResolutionReason::NoExtension)), + ); + } + + // .ts and .js exists, .js specified (goes to specified) + { + let ts_specifier = root_url.join("ts_and_js.ts").unwrap(); + sys.fs_insert(url_to_file_path(&ts_specifier).unwrap(), ""); + let js_specifier = root_url.join("ts_and_js.js").unwrap(); + sys.fs_insert(url_to_file_path(&js_specifier).unwrap(), ""); + assert_eq!( + sloppy_imports_resolver + .resolve(&js_specifier, ResolutionKind::Execution), + None, + ); + } + + // only js exists, .js specified + { + let specifier = root_url.join("js_only.js").unwrap(); + sys.fs_insert(url_to_file_path(&specifier).unwrap(), ""); + assert_eq!( + sloppy_imports_resolver.resolve(&specifier, ResolutionKind::Execution), + None, + ); + assert_eq!( + sloppy_imports_resolver.resolve(&specifier, ResolutionKind::Types), + None, + ); + } + + // resolving a directory to an index file + { + let specifier = root_url.join("routes/index.ts").unwrap(); + sys.fs_insert(url_to_file_path(&specifier).unwrap(), ""); + let sloppy_specifier = root_url.join("routes").unwrap(); + assert_eq!( + sloppy_imports_resolver + .resolve(&sloppy_specifier, ResolutionKind::Execution), + Some((specifier, SloppyImportsResolutionReason::Directory)), + ); + } + + // both a directory and a file with specifier is present + { + let specifier = root_url.join("api.ts").unwrap(); + sys.fs_insert(url_to_file_path(&specifier).unwrap(), ""); + let bar_specifier = root_url.join("api/bar.ts").unwrap(); + sys.fs_insert(url_to_file_path(&bar_specifier).unwrap(), ""); + let sloppy_specifier = root_url.join("api").unwrap(); + assert_eq!( + sloppy_imports_resolver + .resolve(&sloppy_specifier, ResolutionKind::Execution), + Some((specifier, SloppyImportsResolutionReason::NoExtension)), + ); + } + } + + #[test] + fn test_sloppy_import_resolution_suggestion_message() { + // directory + assert_eq!( + SloppyImportsResolutionReason::Directory + .suggestion_message_for_specifier( + &Url::parse("file:///dir/index.js").unwrap() + ) + .as_str(), + "Maybe specify path to 'index.js' file in directory instead" + ); + // no ext + assert_eq!( + SloppyImportsResolutionReason::NoExtension + .suggestion_message_for_specifier( + &Url::parse("file:///dir/index.mjs").unwrap() + ) + .as_str(), + "Maybe add a '.mjs' extension" + ); + // js to ts + assert_eq!( + SloppyImportsResolutionReason::JsToTs + .suggestion_message_for_specifier( + &Url::parse("file:///dir/index.mts").unwrap() + ) + .as_str(), + "Maybe change the extension to '.mts'" + ); + } + + #[test] + fn resolve_compiler_options_root_dirs() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["member", "member2"], + "compilerOptions": { + "rootDirs": ["member", "member2", "member2_types"], + }, + }), + ); + // Overrides `rootDirs` from workspace root. + sys.fs_insert_json( + root_dir().join("member/deno.json"), + json!({ + "compilerOptions": { + "rootDirs": ["foo", "foo_types"], + }, + }), + ); + // Use `rootDirs` from workspace root. + sys.fs_insert_json(root_dir().join("member2/deno.json"), json!({})); + sys.fs_insert(root_dir().join("member/foo_types/import.ts"), ""); + sys.fs_insert(root_dir().join("member2_types/import.ts"), ""); + // This file should be ignored. It would be used if `member/deno.json` had + // no `rootDirs`. + sys.fs_insert(root_dir().join("member2_types/foo/import.ts"), ""); + + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + let resolver = WorkspaceResolver::from_workspace( + &workspace_dir.workspace, + sys.clone(), + super::CreateResolverOptions { + pkg_json_dep_resolution: PackageJsonDepResolution::Enabled, + specified_import_map: None, + sloppy_imports_options: SloppyImportsOptions::Disabled, + fs_cache_options: FsCacheOptions::Enabled, + }, + ) + .unwrap(); + let root_dir_url = workspace_dir.workspace.root_dir(); + + let referrer = root_dir_url.join("member/foo/mod.ts").unwrap(); + let resolution = resolver + .resolve("./import.ts", &referrer, ResolutionKind::Types) + .unwrap(); + let MappedResolution::Normal { + specifier, + sloppy_reason, + used_compiler_options_root_dirs, + .. + } = &resolution + else { + unreachable!("{:#?}", &resolution); + }; + assert_eq!( + specifier.as_str(), + root_dir_url + .join("member/foo_types/import.ts") + .unwrap() + .as_str() + ); + assert_eq!(sloppy_reason, &None); + assert!(used_compiler_options_root_dirs); + + let referrer = root_dir_url.join("member2/mod.ts").unwrap(); + let resolution = resolver + .resolve("./import.ts", &referrer, ResolutionKind::Types) + .unwrap(); + let MappedResolution::Normal { + specifier, + sloppy_reason, + used_compiler_options_root_dirs, + .. + } = &resolution + else { + unreachable!("{:#?}", &resolution); + }; + assert_eq!( + specifier.as_str(), + root_dir_url + .join("member2_types/import.ts") + .unwrap() + .as_str() + ); + assert_eq!(sloppy_reason, &None); + assert!(used_compiler_options_root_dirs); + + // Ignore rootDirs for `ResolutionKind::Execution`. + let referrer = root_dir_url.join("member/foo/mod.ts").unwrap(); + let resolution = resolver + .resolve("./import.ts", &referrer, ResolutionKind::Execution) + .unwrap(); + let MappedResolution::Normal { + specifier, + sloppy_reason, + used_compiler_options_root_dirs, + .. + } = &resolution + else { + unreachable!("{:#?}", &resolution); + }; + assert_eq!( + specifier.as_str(), + root_dir_url.join("member/foo/import.ts").unwrap().as_str() + ); + assert_eq!(sloppy_reason, &None); + assert!(!used_compiler_options_root_dirs); + + // Ignore rootDirs for `ResolutionKind::Execution`. + let referrer = root_dir_url.join("member2/mod.ts").unwrap(); + let resolution = resolver + .resolve("./import.ts", &referrer, ResolutionKind::Execution) + .unwrap(); + let MappedResolution::Normal { + specifier, + sloppy_reason, + used_compiler_options_root_dirs, + .. + } = &resolution + else { + unreachable!("{:#?}", &resolution); + }; + assert_eq!( + specifier.as_str(), + root_dir_url.join("member2/import.ts").unwrap().as_str() + ); + assert_eq!(sloppy_reason, &None); + assert!(!used_compiler_options_root_dirs); + } + + #[test] + fn resolve_compiler_options_root_dirs_and_sloppy_imports() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "compilerOptions": { + "rootDirs": ["subdir", "subdir_types"], + }, + }), + ); + sys.fs_insert(root_dir().join("subdir_types/import.ts"), ""); + + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + let resolver = WorkspaceResolver::from_workspace( + &workspace_dir.workspace, + sys.clone(), + super::CreateResolverOptions { + pkg_json_dep_resolution: PackageJsonDepResolution::Enabled, + specified_import_map: None, + sloppy_imports_options: SloppyImportsOptions::Enabled, + fs_cache_options: FsCacheOptions::Enabled, + }, + ) + .unwrap(); + let root_dir_url = workspace_dir.workspace.root_dir(); + + let referrer = root_dir_url.join("subdir/mod.ts").unwrap(); + let resolution = resolver + .resolve("./import", &referrer, ResolutionKind::Types) + .unwrap(); + let MappedResolution::Normal { + specifier, + sloppy_reason, + used_compiler_options_root_dirs, + .. + } = &resolution + else { + unreachable!("{:#?}", &resolution); + }; + assert_eq!( + specifier.as_str(), + root_dir_url + .join("subdir_types/import.ts") + .unwrap() + .as_str() + ); + assert_eq!( + sloppy_reason, + &Some(SloppyImportsResolutionReason::NoExtension) + ); + assert!(used_compiler_options_root_dirs); + } + + #[test] + fn specified_import_map() { + let sys = InMemorySys::default(); + sys.fs_insert_json(root_dir().join("deno.json"), json!({})); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + let resolver = WorkspaceResolver::from_workspace( + &workspace_dir.workspace, + sys, + super::CreateResolverOptions { + pkg_json_dep_resolution: PackageJsonDepResolution::Enabled, + specified_import_map: Some(SpecifiedImportMap { + base_url: url_from_directory_path(&root_dir()).unwrap(), + value: json!({ + "imports": { + "b": "./b/mod.ts", + }, + }), + }), + sloppy_imports_options: SloppyImportsOptions::Disabled, + fs_cache_options: FsCacheOptions::Enabled, + }, + ) + .unwrap(); + let root = url_from_directory_path(&root_dir()).unwrap(); + match resolver + .resolve( + "b", + &root.join("main.ts").unwrap(), + ResolutionKind::Execution, + ) + .unwrap() + { + MappedResolution::Normal { specifier, .. } => { + assert_eq!(specifier, root.join("b/mod.ts").unwrap()); + } + _ => unreachable!(), + } + } + + #[test] + fn workspace_specified_import_map() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./a"] + }), + ); + sys.fs_insert_json(root_dir().join("a").join("deno.json"), json!({})); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + WorkspaceResolver::from_workspace( + &workspace_dir.workspace, + UnreachableSys, + super::CreateResolverOptions { + pkg_json_dep_resolution: PackageJsonDepResolution::Enabled, + specified_import_map: Some(SpecifiedImportMap { + base_url: url_from_directory_path(&root_dir()).unwrap(), + value: json!({ + "imports": { + "b": "./b/mod.ts", + }, + }), + }), + sloppy_imports_options: SloppyImportsOptions::Disabled, + fs_cache_options: FsCacheOptions::Enabled, + }, + ) + .unwrap(); + } + + #[test] + fn resolves_patch_member_with_version() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "patch": ["../patch"] + }), + ); + sys.fs_insert_json( + root_dir().join("../patch/deno.json"), + json!({ + "name": "@scope/patch", + "version": "1.0.0", + "exports": "./mod.ts" + }), + ); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + let resolver = create_resolver(&workspace_dir); + let root = url_from_directory_path(&root_dir()).unwrap(); + match resolver + .resolve( + "@scope/patch", + &root.join("main.ts").unwrap(), + ResolutionKind::Execution, + ) + .unwrap() + { + MappedResolution::WorkspaceJsrPackage { specifier, .. } => { + assert_eq!(specifier, root.join("../patch/mod.ts").unwrap()); + } + _ => unreachable!(), + } + // matching version + match resolver + .resolve( + "jsr:@scope/patch@1", + &root.join("main.ts").unwrap(), + ResolutionKind::Execution, + ) + .unwrap() + { + MappedResolution::WorkspaceJsrPackage { specifier, .. } => { + assert_eq!(specifier, root.join("../patch/mod.ts").unwrap()); + } + _ => unreachable!(), + } + // not matching version + match resolver + .resolve( + "jsr:@scope/patch@2", + &root.join("main.ts").unwrap(), + ResolutionKind::Execution, + ) + .unwrap() + { + MappedResolution::Normal { + specifier, + maybe_diagnostic, + .. + } => { + assert_eq!(specifier, Url::parse("jsr:@scope/patch@2").unwrap()); + assert_eq!( + maybe_diagnostic, + Some(Box::new( + MappedResolutionDiagnostic::ConstraintNotMatchedLocalVersion { + is_patch: true, + reference: JsrPackageReqReference::from_str("jsr:@scope/patch@2") + .unwrap(), + local_version: Version::parse_from_npm("1.0.0").unwrap(), + } + )) + ); + } + _ => unreachable!(), + } + } + + #[test] + fn resolves_patch_member_no_version() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "patch": ["../patch"] + }), + ); + sys.fs_insert_json( + root_dir().join("../patch/deno.json"), + json!({ + "name": "@scope/patch", + "exports": "./mod.ts" + }), + ); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + let resolver = create_resolver(&workspace_dir); + let root = url_from_directory_path(&root_dir()).unwrap(); + match resolver + .resolve( + "@scope/patch", + &root.join("main.ts").unwrap(), + ResolutionKind::Execution, + ) + .unwrap() + { + MappedResolution::WorkspaceJsrPackage { specifier, .. } => { + assert_eq!(specifier, root.join("../patch/mod.ts").unwrap()); + } + _ => unreachable!(), + } + // always resolves, no matter what version + match resolver + .resolve( + "jsr:@scope/patch@12", + &root.join("main.ts").unwrap(), + ResolutionKind::Execution, + ) + .unwrap() + { + MappedResolution::WorkspaceJsrPackage { specifier, .. } => { + assert_eq!(specifier, root.join("../patch/mod.ts").unwrap()); + } + _ => unreachable!(), + } + } + + #[test] + fn resolves_workspace_member() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member"] + }), + ); + sys.fs_insert_json( + root_dir().join("./member/deno.json"), + json!({ + "name": "@scope/member", + "version": "1.0.0", + "exports": "./mod.ts" + }), + ); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + let resolver = create_resolver(&workspace_dir); + let root = url_from_directory_path(&root_dir()).unwrap(); + match resolver + .resolve( + "@scope/member", + &root.join("main.ts").unwrap(), + ResolutionKind::Execution, + ) + .unwrap() + { + MappedResolution::WorkspaceJsrPackage { specifier, .. } => { + assert_eq!(specifier, root.join("./member/mod.ts").unwrap()); + } + _ => unreachable!(), + } + // matching version + match resolver + .resolve( + "jsr:@scope/member@1", + &root.join("main.ts").unwrap(), + ResolutionKind::Execution, + ) + .unwrap() + { + MappedResolution::WorkspaceJsrPackage { specifier, .. } => { + assert_eq!(specifier, root.join("./member/mod.ts").unwrap()); + } + _ => unreachable!(), + } + // not matching version + match resolver + .resolve( + "jsr:@scope/member@2", + &root.join("main.ts").unwrap(), + ResolutionKind::Execution, + ) + .unwrap() + { + MappedResolution::Normal { + specifier, + maybe_diagnostic, + .. + } => { + assert_eq!(specifier, Url::parse("jsr:@scope/member@2").unwrap()); + assert_eq!( + maybe_diagnostic, + Some(Box::new( + MappedResolutionDiagnostic::ConstraintNotMatchedLocalVersion { + is_patch: false, + reference: JsrPackageReqReference::from_str( + "jsr:@scope/member@2" + ) + .unwrap(), + local_version: Version::parse_from_npm("1.0.0").unwrap(), + } + )) + ); + } + _ => unreachable!(), + } + } + + #[test] + fn resolves_patch_workspace() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "imports": { + "@std/fs": "jsr:@std/fs@0.200.0" + }, + "patch": ["../patch"] + }), + ); + sys.fs_insert_json( + root_dir().join("../patch/deno.json"), + json!({ + "workspace": ["./member"] + }), + ); + sys.fs_insert_json( + root_dir().join("../patch/member/deno.json"), + json!({ + "name": "@scope/patch", + "version": "1.0.0", + "exports": "./mod.ts", + "imports": { + "@std/fs": "jsr:@std/fs@1" + } + }), + ); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + let resolver = create_resolver(&workspace_dir); + let root = url_from_directory_path(&root_dir()).unwrap(); + match resolver + .resolve( + "jsr:@scope/patch@1", + &root.join("main.ts").unwrap(), + ResolutionKind::Execution, + ) + .unwrap() + { + MappedResolution::WorkspaceJsrPackage { specifier, .. } => { + assert_eq!(specifier, root.join("../patch/member/mod.ts").unwrap()); + } + _ => unreachable!(), + } + // resolving @std/fs from root + match resolver + .resolve( + "@std/fs", + &root.join("main.ts").unwrap(), + ResolutionKind::Execution, + ) + .unwrap() + { + MappedResolution::Normal { specifier, .. } => { + assert_eq!(specifier, Url::parse("jsr:@std/fs@0.200.0").unwrap()); + } + _ => unreachable!(), + } + // resolving @std/fs in patched package + match resolver + .resolve( + "@std/fs", + &root.join("../patch/member/mod.ts").unwrap(), + ResolutionKind::Execution, + ) + .unwrap() + { + MappedResolution::Normal { specifier, .. } => { + assert_eq!(specifier, Url::parse("jsr:@std/fs@1").unwrap()); + } + _ => unreachable!(), + } + } + + #[test] + fn invalid_package_name_with_slashes() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./libs/math"] + }), + ); + sys.fs_insert_json( + root_dir().join("libs/math/deno.json"), + json!({ + "name": "@deno-test/libs/math", // Invalid package name containing slashes + "version": "1.0.0", + "exports": "./mod.ts" + }), + ); + let workspace = workspace_at_start_dir(&sys, &root_dir()); + let resolver = create_resolver(&workspace); + let result = resolver.resolve( + "@deno-test/libs/math", + &url_from_file_path(&root_dir().join("main.ts")).unwrap(), + ResolutionKind::Execution, + ); + // Resolve shouldn't panic and tt should result in unmapped + // bare specifier error as the package name is invalid. + assert!(result.err().unwrap().is_unmapped_bare_specifier()); + + let diagnostics = workspace.workspace.diagnostics(); + assert_eq!(diagnostics.len(), 1); + assert!(diagnostics + .first() + .unwrap() + .to_string() + .starts_with(r#"Invalid workspace member name "@deno-test/libs/math"."#)); + } + + fn create_resolver( + workspace_dir: &WorkspaceDirectory, + ) -> WorkspaceResolver { + WorkspaceResolver::from_workspace( + &workspace_dir.workspace, + UnreachableSys, + super::CreateResolverOptions { + pkg_json_dep_resolution: PackageJsonDepResolution::Enabled, + specified_import_map: None, + sloppy_imports_options: SloppyImportsOptions::Disabled, + fs_cache_options: FsCacheOptions::Enabled, + }, + ) + .unwrap() + } + + fn workspace_at_start_dir( + sys: &InMemorySys, + start_dir: &Path, + ) -> WorkspaceDirectory { + WorkspaceDirectory::discover( + sys, + WorkspaceDiscoverStart::Paths(&[start_dir.to_path_buf()]), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + ..Default::default() + }, + ) + .unwrap() + } +} diff --git a/resolvers/node/resolution.rs b/resolvers/node/resolution.rs index 495c71bc8383ce..4ed7f9229bd8a3 100644 --- a/resolvers/node/resolution.rs +++ b/resolvers/node/resolution.rs @@ -110,15 +110,6 @@ pub enum NodeResolutionKind { Types, } -impl From for deno_config::workspace::ResolutionKind { - fn from(value: NodeResolutionKind) -> Self { - match value { - NodeResolutionKind::Execution => Self::Execution, - NodeResolutionKind::Types => Self::Types, - } - } -} - impl NodeResolutionKind { pub fn is_types(&self) -> bool { matches!(self, NodeResolutionKind::Types) diff --git a/tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/__test__.jsonc b/tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/__test__.jsonc new file mode 100644 index 00000000000000..bf7e7e1c94c814 --- /dev/null +++ b/tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "check --quiet subdir/mod.ts", + "output": "" +} diff --git a/tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/deno.json b/tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/deno.json new file mode 100644 index 00000000000000..d2148a5bb5be73 --- /dev/null +++ b/tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/deno.json @@ -0,0 +1,6 @@ +{ + "compilerOptions": { + "rootDirs": ["subdir", "subdir_types"] + }, + "unstable": ["sloppy-imports"] +} diff --git a/tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/subdir/mod.ts b/tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/subdir/mod.ts new file mode 100644 index 00000000000000..1de59ebda0eca9 --- /dev/null +++ b/tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/subdir/mod.ts @@ -0,0 +1,3 @@ +import type { someType } from "./import"; +const foo: someType = ""; +console.log(foo); diff --git a/tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/subdir_types/import.ts b/tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/subdir_types/import.ts new file mode 100644 index 00000000000000..423236068f24f4 --- /dev/null +++ b/tests/specs/check/compiler_options_root_dirs_and_sloppy_imports/subdir_types/import.ts @@ -0,0 +1 @@ +export type someType = string; From ece384c094eae0a22c8f862fe1deeab45f937813 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Thu, 6 Feb 2025 08:52:39 +0530 Subject: [PATCH 09/17] fix(ext/node): implement `DatabaseSync#applyChangeset()` (#27967) https://nodejs.org/api/sqlite.html#databaseapplychangesetchangeset-options ```js const sourceDb = new DatabaseSync(':memory:'); const targetDb = new DatabaseSync(':memory:'); sourceDb.exec('CREATE TABLE data(key INTEGER PRIMARY KEY, value TEXT)'); targetDb.exec('CREATE TABLE data(key INTEGER PRIMARY KEY, value TEXT)'); const session = sourceDb.createSession(); const insert = sourceDb.prepare('INSERT INTO data (key, value) VALUES (?, ?)'); insert.run(1, 'hello'); insert.run(2, 'world'); const changeset = session.changeset(); targetDb.applyChangeset(changeset); // Now that the changeset has been applied, targetDb contains the same data as sourceDb. ``` --- ext/node/ops/sqlite/database.rs | 125 ++++++++++++++++++++++++++++++++ ext/node/ops/sqlite/mod.rs | 6 ++ tests/unit_node/sqlite_test.ts | 29 ++++++++ 3 files changed, 160 insertions(+) diff --git a/ext/node/ops/sqlite/database.rs b/ext/node/ops/sqlite/database.rs index 73063b6276ba90..e935a634cef6df 100644 --- a/ext/node/ops/sqlite/database.rs +++ b/ext/node/ops/sqlite/database.rs @@ -2,11 +2,16 @@ use std::cell::Cell; use std::cell::RefCell; +use std::ffi::c_char; +use std::ffi::c_void; +use std::ffi::CStr; use std::ffi::CString; use std::ptr::null; use std::rc::Rc; use deno_core::op2; +use deno_core::serde_v8; +use deno_core::v8; use deno_core::GarbageCollected; use deno_core::OpState; use deno_permissions::PermissionsContainer; @@ -41,6 +46,13 @@ impl Default for DatabaseSyncOptions { } } +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct ApplyChangesetOptions<'a> { + filter: Option>, + on_conflict: Option>, +} + pub struct DatabaseSync { conn: Rc>>, options: DatabaseSyncOptions, @@ -197,6 +209,119 @@ impl DatabaseSync { }) } + // Applies a changeset to the database. + // + // This method is a wrapper around `sqlite3changeset_apply()`. + #[reentrant] + fn apply_changeset<'a>( + &self, + scope: &mut v8::HandleScope<'a>, + #[buffer] changeset: &[u8], + #[serde] options: Option>, + ) -> Result { + struct HandlerCtx<'a, 'b> { + scope: &'a mut v8::HandleScope<'b>, + confict: Option>, + filter: Option>, + } + + // Conflict handler callback for `sqlite3changeset_apply()`. + unsafe extern "C" fn conflict_handler( + p_ctx: *mut c_void, + e_conflict: i32, + _: *mut libsqlite3_sys::sqlite3_changeset_iter, + ) -> i32 { + let ctx = &mut *(p_ctx as *mut HandlerCtx); + + if let Some(conflict) = &mut ctx.confict { + let recv = v8::undefined(ctx.scope).into(); + let args = [v8::Integer::new(ctx.scope, e_conflict).into()]; + + let ret = conflict.call(ctx.scope, recv, &args).unwrap(); + return ret + .int32_value(ctx.scope) + .unwrap_or(libsqlite3_sys::SQLITE_CHANGESET_ABORT); + } + + libsqlite3_sys::SQLITE_CHANGESET_ABORT + } + + // Filter handler callback for `sqlite3changeset_apply()`. + unsafe extern "C" fn filter_handler( + p_ctx: *mut c_void, + z_tab: *const c_char, + ) -> i32 { + let ctx = &mut *(p_ctx as *mut HandlerCtx); + + if let Some(filter) = &mut ctx.filter { + let tab = CStr::from_ptr(z_tab).to_str().unwrap(); + + let recv = v8::undefined(ctx.scope).into(); + let args = [v8::String::new(ctx.scope, tab).unwrap().into()]; + + let ret = filter.call(ctx.scope, recv, &args).unwrap(); + return ret.boolean_value(ctx.scope) as i32; + } + + 1 + } + + let db = self.conn.borrow(); + let db = db.as_ref().ok_or(SqliteError::AlreadyClosed)?; + + // It is safe to use scope in the handlers because they are never + // called after the call to `sqlite3changeset_apply()`. + let mut ctx = HandlerCtx { + scope, + confict: None, + filter: None, + }; + + if let Some(options) = options { + if let Some(filter) = options.filter { + let filter_cb: v8::Local = filter + .v8_value + .try_into() + .map_err(|_| SqliteError::InvalidCallback("filter"))?; + ctx.filter = Some(filter_cb); + } + + if let Some(on_conflict) = options.on_conflict { + let on_conflict_cb: v8::Local = on_conflict + .v8_value + .try_into() + .map_err(|_| SqliteError::InvalidCallback("onConflict"))?; + ctx.confict = Some(on_conflict_cb); + } + } + + // SAFETY: lifetime of the connection is guaranteed by reference + // counting. + let raw_handle = unsafe { db.handle() }; + + // SAFETY: `changeset` points to a valid memory location and its + // length is correct. `ctx` is stack allocated and its lifetime is + // longer than the call to `sqlite3changeset_apply()`. + unsafe { + let r = libsqlite3_sys::sqlite3changeset_apply( + raw_handle, + changeset.len() as i32, + changeset.as_ptr() as *mut _, + Some(filter_handler), + Some(conflict_handler), + &mut ctx as *mut _ as *mut c_void, + ); + + if r == libsqlite3_sys::SQLITE_OK { + return Ok(true); + } else if r == libsqlite3_sys::SQLITE_ABORT { + return Ok(false); + } + + Err(SqliteError::ChangesetApplyFailed) + } + } + // Creates and attaches a session to the database. // // This method is a wrapper around `sqlite3session_create()` and diff --git a/ext/node/ops/sqlite/mod.rs b/ext/node/ops/sqlite/mod.rs index d3c273a66347d9..05e2e256b8af16 100644 --- a/ext/node/ops/sqlite/mod.rs +++ b/ext/node/ops/sqlite/mod.rs @@ -58,4 +58,10 @@ pub enum SqliteError { #[class(range)] #[error("The value of column {0} is too large to be represented as a JavaScript number: {1}")] NumberTooLarge(i32, i64), + #[class(generic)] + #[error("Failed to apply changeset")] + ChangesetApplyFailed, + #[class(type)] + #[error("Invalid callback: {0}")] + InvalidCallback(&'static str), } diff --git a/tests/unit_node/sqlite_test.ts b/tests/unit_node/sqlite_test.ts index ec54780ae92980..5cfec0d5e43fef 100644 --- a/tests/unit_node/sqlite_test.ts +++ b/tests/unit_node/sqlite_test.ts @@ -152,3 +152,32 @@ Deno.test({ } }, }); + +Deno.test("[node/sqlite] applyChangeset across databases", () => { + const sourceDb = new DatabaseSync(":memory:"); + const targetDb = new DatabaseSync(":memory:"); + + sourceDb.exec("CREATE TABLE data(key INTEGER PRIMARY KEY, value TEXT)"); + targetDb.exec("CREATE TABLE data(key INTEGER PRIMARY KEY, value TEXT)"); + + const session = sourceDb.createSession(); + + const insert = sourceDb.prepare( + "INSERT INTO data (key, value) VALUES (?, ?)", + ); + insert.run(1, "hello"); + insert.run(2, "world"); + + const changeset = session.changeset(); + targetDb.applyChangeset(changeset, { + filter(e) { + return e === "data"; + }, + }); + + const stmt = targetDb.prepare("SELECT * FROM data"); + assertEquals(stmt.all(), [ + { key: 1, value: "hello", __proto__: null }, + { key: 2, value: "world", __proto__: null }, + ]); +}); From 28faaee772f4f6f8283a948e4b0c006bb05768c7 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Thu, 6 Feb 2025 08:52:49 +0530 Subject: [PATCH 10/17] fix(ext/node): throw Session methods when database is closed (#27968) --- ext/node/ops/sqlite/database.rs | 2 +- ext/node/ops/sqlite/session.rs | 12 +++++++++++- tests/unit_node/sqlite_test.ts | 3 +++ 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/ext/node/ops/sqlite/database.rs b/ext/node/ops/sqlite/database.rs index e935a634cef6df..b27d08caf82278 100644 --- a/ext/node/ops/sqlite/database.rs +++ b/ext/node/ops/sqlite/database.rs @@ -376,7 +376,7 @@ impl DatabaseSync { Ok(Session { inner: raw_session, freed: Cell::new(false), - _db: self.conn.clone(), + db: self.conn.clone(), }) } } diff --git a/ext/node/ops/sqlite/session.rs b/ext/node/ops/sqlite/session.rs index 520904d536ffac..4bc616a9c0d4c6 100644 --- a/ext/node/ops/sqlite/session.rs +++ b/ext/node/ops/sqlite/session.rs @@ -24,7 +24,7 @@ pub struct Session { pub(crate) freed: Cell, // Hold a strong reference to the database. - pub(crate) _db: Rc>>, + pub(crate) db: Rc>>, } impl GarbageCollected for Session {} @@ -57,6 +57,10 @@ impl Session { // Closes the session. #[fast] fn close(&self) -> Result<(), SqliteError> { + if self.db.borrow().is_none() { + return Err(SqliteError::AlreadyClosed); + } + self.delete() } @@ -66,6 +70,9 @@ impl Session { // This method is a wrapper around `sqlite3session_changeset()`. #[buffer] fn changeset(&self) -> Result, SqliteError> { + if self.db.borrow().is_none() { + return Err(SqliteError::AlreadyClosed); + } if self.freed.get() { return Err(SqliteError::SessionClosed); } @@ -78,6 +85,9 @@ impl Session { // This method is a wrapper around `sqlite3session_patchset()`. #[buffer] fn patchset(&self) -> Result, SqliteError> { + if self.db.borrow().is_none() { + return Err(SqliteError::AlreadyClosed); + } if self.freed.get() { return Err(SqliteError::SessionClosed); } diff --git a/tests/unit_node/sqlite_test.ts b/tests/unit_node/sqlite_test.ts index 5cfec0d5e43fef..fcf8292e029487 100644 --- a/tests/unit_node/sqlite_test.ts +++ b/tests/unit_node/sqlite_test.ts @@ -96,6 +96,9 @@ Deno.test("[node/sqlite] createSession and changesets", () => { assertThrows(() => session.changeset(), Error, "Session is already closed"); // Close after close should throw. assertThrows(() => session.close(), Error, "Session is already closed"); + + db.close(); + assertThrows(() => session.close(), Error, "Database is already closed"); }); Deno.test("[node/sqlite] StatementSync integer too large", () => { From a401a79c751dbe8f1bed1d4c542f1aa97ac70199 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Thu, 6 Feb 2025 12:11:42 +0530 Subject: [PATCH 11/17] fix(ext/node): fix missing privateKey.x in curve25519 JWK (#27990) Fixes https://github.com/denoland/deno/issues/27972 --- ext/node/ops/crypto/keys.rs | 13 +++++++++++-- tests/unit_node/crypto/crypto_key_test.ts | 13 +++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/ext/node/ops/crypto/keys.rs b/ext/node/ops/crypto/keys.rs index 79b09faa267a7d..db16d9e4ddb06e 100644 --- a/ext/node/ops/crypto/keys.rs +++ b/ext/node/ops/crypto/keys.rs @@ -1463,19 +1463,28 @@ impl AsymmetricPrivateKey { AsymmetricPrivateKey::X25519(static_secret) => { let bytes = static_secret.to_bytes(); + let AsymmetricPublicKey::X25519(x) = self.to_public_key() else { + unreachable!(); + }; + Ok(deno_core::serde_json::json!({ - "kty": "OKP", "crv": "X25519", + "x": bytes_to_b64(x.as_bytes()), "d": bytes_to_b64(&bytes), + "kty": "OKP", })) } AsymmetricPrivateKey::Ed25519(key) => { let bytes = key.to_bytes(); + let AsymmetricPublicKey::Ed25519(x) = self.to_public_key() else { + unreachable!(); + }; Ok(deno_core::serde_json::json!({ - "kty": "OKP", "crv": "Ed25519", + "x": bytes_to_b64(x.as_bytes()), "d": bytes_to_b64(&bytes), + "kty": "OKP", })) } _ => Err(AsymmetricPrivateKeyJwkError::JwkExportNotImplementedForKeyType), diff --git a/tests/unit_node/crypto/crypto_key_test.ts b/tests/unit_node/crypto/crypto_key_test.ts index c8535fb7681a5f..d0685257953050 100644 --- a/tests/unit_node/crypto/crypto_key_test.ts +++ b/tests/unit_node/crypto/crypto_key_test.ts @@ -755,3 +755,16 @@ Deno.test("X509Certificate checkHost", function () { assertEquals(cert.checkHost("www.google.com"), undefined); assertEquals(cert.checkHost("agent1"), "agent1"); }); + +// https://github.com/denoland/deno/issues/27972 +Deno.test("curve25519 generate valid private jwk", function () { + const { publicKey, privateKey } = generateKeyPairSync("ed25519", { + publicKeyEncoding: { format: "jwk" }, + privateKeyEncoding: { format: "jwk" }, + }); + + // @ts-ignore @types/node broken + assert(!publicKey.d); + // @ts-ignore @types/node broken + assert(privateKey.d); +}); From 78fceb4a3304fcecc6f8225e5daa218bf49c6095 Mon Sep 17 00:00:00 2001 From: Yoshiya Hinosawa Date: Thu, 6 Feb 2025 16:35:55 +0900 Subject: [PATCH 12/17] fix(ext/node): fix twitter-api-v2 compatibility (#27971) --- ext/node/polyfills/net.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/ext/node/polyfills/net.ts b/ext/node/polyfills/net.ts index d7a3183e41c673..a9a3da12efe271 100644 --- a/ext/node/polyfills/net.ts +++ b/ext/node/polyfills/net.ts @@ -1168,6 +1168,7 @@ const pkgsNeedsSockInitWorkaround = [ "@npmcli/agent", "npm-check-updates", "playwright-core", + "twitter-api-v2", ]; /** From 4a2b8fc22dcf0d7d620a888535089e9d41082d75 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Thu, 6 Feb 2025 13:08:40 +0530 Subject: [PATCH 13/17] fix(ext/node): expose sqlite changeset constants (#27992) https://nodejs.org/api/sqlite.html#sqliteconstants --- ext/node/polyfills/sqlite.ts | 13 +++++++++++++ tests/unit_node/sqlite_test.ts | 8 ++++---- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/ext/node/polyfills/sqlite.ts b/ext/node/polyfills/sqlite.ts index 7a4e92f8bb4a31..b3ea2b157f6d82 100644 --- a/ext/node/polyfills/sqlite.ts +++ b/ext/node/polyfills/sqlite.ts @@ -2,8 +2,21 @@ import { DatabaseSync } from "ext:core/ops"; +export const constants = { + SQLITE_CHANGESET_OMIT: 0, + SQLITE_CHANGESET_REPLACE: 1, + SQLITE_CHANGESET_ABORT: 2, + + SQLITE_CHANGESET_DATA: 1, + SQLITE_CHANGESET_NOTFOUND: 2, + SQLITE_CHANGESET_CONFLICT: 3, + SQLITE_CHANGESET_CONSTRAINT: 4, + SQLITE_CHANGESET_FOREIGN_KEY: 5, +}; + export { DatabaseSync }; export default { + constants, DatabaseSync, }; diff --git a/tests/unit_node/sqlite_test.ts b/tests/unit_node/sqlite_test.ts index fcf8292e029487..6b7f1aef54d1a7 100644 --- a/tests/unit_node/sqlite_test.ts +++ b/tests/unit_node/sqlite_test.ts @@ -1,5 +1,5 @@ // Copyright 2018-2025 the Deno authors. MIT license. -import { DatabaseSync } from "node:sqlite"; +import sqlite, { DatabaseSync } from "node:sqlite"; import { assert, assertEquals, assertThrows } from "@std/assert"; const tempDir = Deno.makeTempDirSync(); @@ -173,9 +173,9 @@ Deno.test("[node/sqlite] applyChangeset across databases", () => { const changeset = session.changeset(); targetDb.applyChangeset(changeset, { - filter(e) { - return e === "data"; - }, + filter: (e) => e === "data", + // @ts-ignore: types are not up to date + onConflict: () => sqlite.constants.SQLITE_CHANGESET_ABORT, }); const stmt = targetDb.prepare("SELECT * FROM data"); From df02af27fda104bd877a10eb8f1e58ee5ed0ca2b Mon Sep 17 00:00:00 2001 From: Marvin Hagemeister Date: Thu, 6 Feb 2025 09:00:23 +0100 Subject: [PATCH 14/17] feat(unstable): add `lint.plugins` to config schema (#27982) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Not sure what our handling of unstable properties in `deno.json` is. This PR adds it to the config schema. --------- Signed-off-by: Marvin Hagemeister Co-authored-by: Bartek Iwańczuk --- cli/schemas/config-file.v1.json | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/cli/schemas/config-file.v1.json b/cli/schemas/config-file.v1.json index ce1bad07c6e81c..cf2ad3cb3ddb84 100644 --- a/cli/schemas/config-file.v1.json +++ b/cli/schemas/config-file.v1.json @@ -292,6 +292,13 @@ "type": "string" } }, + "plugins": { + "type": "array", + "description": "UNSTABLE: List of plugins to load. These can be paths, npm or jsr specifiers", + "items": { + "type": "string" + } + }, "rules": { "type": "object", "properties": { From 9213215d6df8944509737d074df82ecb072306da Mon Sep 17 00:00:00 2001 From: Marvin Hagemeister Date: Thu, 6 Feb 2025 14:04:04 +0100 Subject: [PATCH 15/17] feat(unstable): add test for lint plugin destroy hook (#27981) Noticed that we didn't test the `destroy()` hook of lint plugins. This PR adds a test for that. --- .../lint/lint_plugin_lifecycle/__test__.jsonc | 8 +++++++ tests/specs/lint/lint_plugin_lifecycle/a.ts | 1 + .../lint/lint_plugin_lifecycle/deno.json | 5 ++++ .../specs/lint/lint_plugin_lifecycle/lint.out | 5 ++++ .../lint/lint_plugin_lifecycle/plugin.ts | 23 +++++++++++++++++++ 5 files changed, 42 insertions(+) create mode 100644 tests/specs/lint/lint_plugin_lifecycle/__test__.jsonc create mode 100644 tests/specs/lint/lint_plugin_lifecycle/a.ts create mode 100644 tests/specs/lint/lint_plugin_lifecycle/deno.json create mode 100644 tests/specs/lint/lint_plugin_lifecycle/lint.out create mode 100644 tests/specs/lint/lint_plugin_lifecycle/plugin.ts diff --git a/tests/specs/lint/lint_plugin_lifecycle/__test__.jsonc b/tests/specs/lint/lint_plugin_lifecycle/__test__.jsonc new file mode 100644 index 00000000000000..6e5dd5713bfd35 --- /dev/null +++ b/tests/specs/lint/lint_plugin_lifecycle/__test__.jsonc @@ -0,0 +1,8 @@ +{ + "steps": [ + { + "args": "lint a.ts", + "output": "lint.out" + } + ] +} diff --git a/tests/specs/lint/lint_plugin_lifecycle/a.ts b/tests/specs/lint/lint_plugin_lifecycle/a.ts new file mode 100644 index 00000000000000..0366a968a76b74 --- /dev/null +++ b/tests/specs/lint/lint_plugin_lifecycle/a.ts @@ -0,0 +1 @@ +const _a = "foo"; diff --git a/tests/specs/lint/lint_plugin_lifecycle/deno.json b/tests/specs/lint/lint_plugin_lifecycle/deno.json new file mode 100644 index 00000000000000..57b9dcb3647975 --- /dev/null +++ b/tests/specs/lint/lint_plugin_lifecycle/deno.json @@ -0,0 +1,5 @@ +{ + "lint": { + "plugins": ["./plugin.ts"] + } +} diff --git a/tests/specs/lint/lint_plugin_lifecycle/lint.out b/tests/specs/lint/lint_plugin_lifecycle/lint.out new file mode 100644 index 00000000000000..f3d079a57f0823 --- /dev/null +++ b/tests/specs/lint/lint_plugin_lifecycle/lint.out @@ -0,0 +1,5 @@ +create: test-plugin/my-rule +create: test-plugin/my-rule-2 +destroy: test-plugin/my-rule +destroy: test-plugin/my-rule-2 +Checked 1 file diff --git a/tests/specs/lint/lint_plugin_lifecycle/plugin.ts b/tests/specs/lint/lint_plugin_lifecycle/plugin.ts new file mode 100644 index 00000000000000..e76cdd00b127f5 --- /dev/null +++ b/tests/specs/lint/lint_plugin_lifecycle/plugin.ts @@ -0,0 +1,23 @@ +export default { + name: "test-plugin", + rules: { + "my-rule": { + create(ctx) { + console.log(`create: ${ctx.id}`); + return {}; + }, + destroy(ctx) { + console.log(`destroy: ${ctx.id}`); + }, + }, + "my-rule-2": { + create(ctx) { + console.log(`create: ${ctx.id}`); + return {}; + }, + destroy(ctx) { + console.log(`destroy: ${ctx.id}`); + }, + }, + }, +}; From 6619210509081c19f213437b1a5800d6ebfd1bee Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Thu, 6 Feb 2025 14:53:43 +0100 Subject: [PATCH 16/17] fix(otel): custom span start + end times are fractional ms (#27995) Previously they were treated as fractional seconds. --- ext/telemetry/lib.rs | 6 +++--- ext/telemetry/telemetry.ts | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/ext/telemetry/lib.rs b/ext/telemetry/lib.rs index 5d19f2bdcf8452..754b0bfa3ed8fe 100644 --- a/ext/telemetry/lib.rs +++ b/ext/telemetry/lib.rs @@ -1184,7 +1184,7 @@ impl OtelTracer { let start_time = start_time .map(|start_time| { SystemTime::UNIX_EPOCH - .checked_add(std::time::Duration::from_secs_f64(start_time)) + .checked_add(std::time::Duration::from_secs_f64(start_time / 1000.0)) .ok_or_else(|| JsErrorBox::generic("invalid start time")) }) .unwrap_or_else(|| Ok(SystemTime::now()))?; @@ -1251,7 +1251,7 @@ impl OtelTracer { let start_time = start_time .map(|start_time| { SystemTime::UNIX_EPOCH - .checked_add(std::time::Duration::from_secs_f64(start_time)) + .checked_add(std::time::Duration::from_secs_f64(start_time / 1000.0)) .ok_or_else(|| JsErrorBox::generic("invalid start time")) }) .unwrap_or_else(|| Ok(SystemTime::now()))?; @@ -1366,7 +1366,7 @@ impl OtelSpan { SystemTime::now() } else { SystemTime::UNIX_EPOCH - .checked_add(Duration::from_secs_f64(end_time)) + .checked_add(Duration::from_secs_f64(end_time / 1000.0)) .unwrap() }; diff --git a/ext/telemetry/telemetry.ts b/ext/telemetry/telemetry.ts index 7ce44f74ed122c..139affbf47e36e 100644 --- a/ext/telemetry/telemetry.ts +++ b/ext/telemetry/telemetry.ts @@ -139,8 +139,8 @@ interface IKeyValue { value: IAnyValue; } -function hrToSecs(hr: [number, number]): number { - return (hr[0] * 1e3 + hr[1] / 1e6) / 1000; +function hrToMs(hr: [number, number]): number { + return (hr[0] * 1e3 + hr[1] / 1e6); } export function enterSpan(span: Span): Context | undefined { @@ -276,7 +276,7 @@ class Tracer { let startTime = options?.startTime; if (startTime && ArrayIsArray(startTime)) { - startTime = hrToSecs(startTime); + startTime = hrToMs(startTime); } else if (startTime && isDate(startTime)) { startTime = DatePrototypeGetTime(startTime); } @@ -383,7 +383,7 @@ class Span { end(endTime?: TimeInput): void { if (endTime && ArrayIsArray(endTime)) { - endTime = hrToSecs(endTime); + endTime = hrToMs(endTime); } else if (endTime && isDate(endTime)) { endTime = DatePrototypeGetTime(endTime); } From 15cfa05fa6d1b0e4c7437bbadb2f5b1c05e519d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 6 Feb 2025 17:13:07 +0100 Subject: [PATCH 17/17] test: remove one of 'node_unit_tests::tls_test' tests (#27985) It will be moved to `npm_smoke_tests` repo instead --- tests/unit_node/tls_test.ts | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/tests/unit_node/tls_test.ts b/tests/unit_node/tls_test.ts index 97d753e4f89597..f34d9efb5b7857 100644 --- a/tests/unit_node/tls_test.ts +++ b/tests/unit_node/tls_test.ts @@ -12,7 +12,6 @@ import { fromFileUrl, join } from "@std/path"; import * as tls from "node:tls"; import * as net from "node:net"; import * as stream from "node:stream"; -import { text } from "node:stream/consumers"; import { execCode } from "../unit/test_util.ts"; const tlsTestdataDir = fromFileUrl( @@ -98,18 +97,6 @@ Connection: close assertEquals(bodyText, "hello"); }); -// Regression at https://github.com/denoland/deno/issues/27652 -Deno.test("tls.connect makes tls connection to example.com", async () => { - const socket = tls.connect(443, "example.com"); - await new Promise((resolve) => { - socket.on("secureConnect", resolve); - }); - socket.write( - "GET / HTTP/1.1\r\nHost: example.com\r\nConnection: close\r\n\r\n", - ); - assertStringIncludes(await text(socket), "Example Domain"); -}); - // https://github.com/denoland/deno/pull/20120 Deno.test("tls.connect mid-read tcp->tls upgrade", async () => { const { promise, resolve } = Promise.withResolvers();