diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 59609e4f..1a0839ce 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -12,6 +12,8 @@ on: - "Cargo.lock" - "rust-toolchain.toml" - "rustfmt.toml" + # or in js packages + - "packages/**" concurrency: group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref }} @@ -40,10 +42,15 @@ jobs: cache-base: main env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Setup Biome + uses: biomejs/setup-biome@v2 + with: + version: latest - name: Run format run: | cargo fmt --all --check taplo format --check + biome format actionlint: name: Lint GitHub Actions @@ -84,36 +91,15 @@ jobs: cache-base: main env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Setup Biome + uses: biomejs/setup-biome@v2 + with: + version: latest - name: Run Lints run: | cargo clippy cargo run -p rules_check - - # check-dependencies: - # name: Check Dependencies - # runs-on: ubuntu-latest - # services: - # postgres: - # image: postgres:latest - # env: - # POSTGRES_USER: postgres - # POSTGRES_PASSWORD: postgres - # POSTGRES_DB: postgres - # ports: - # - 5432:5432 - # steps: - # - name: Checkout PR Branch - # uses: actions/checkout@v4 - # with: - # submodules: true - # - name: Free Disk Space - # uses: ./.github/actions/free-disk-space - # - name: Install toolchain - # run: rustup toolchain install nightly - # - name: Install udeps - # run: cargo install cargo-udeps --locked - # - name: Detect unused dependencies using udeps - # run: cargo +nightly udeps --all-targets + biome lint test: name: Test @@ -145,6 +131,40 @@ jobs: - name: Run tests run: cargo test --workspace + test-js-bindings: + name: Test JS Bindings + runs-on: ubuntu-latest + services: + postgres: + image: postgres:latest + env: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + ports: + - 5432:5432 + steps: + - name: Checkout PR branch + uses: actions/checkout@v4 + with: + submodules: true + - name: Free Disk Space + uses: ./.github/actions/free-disk-space + - name: Install toolchain + uses: moonrepo/setup-rust@v1 + - name: Build main binary + run: cargo build -p pglt_cli --release + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + - name: Install JS dependencies + run: bun install + - name: Build TypeScript code + working-directory: packages/@pglt/backend-jsonrpc + run: bun run build + - name: Run JS tests + working-directory: packages/@pglt/backend-jsonrpc + run: bun run test + codegen: name: Check Codegen runs-on: ubuntu-latest diff --git a/.gitignore b/.gitignore index 0ff7be51..b16dfab6 100644 --- a/.gitignore +++ b/.gitignore @@ -18,4 +18,8 @@ target/ .DS_Store desktop.ini -*.log \ No newline at end of file +*.log + +node_modules/ + +**/dist/ diff --git a/Cargo.lock b/Cargo.lock index 8b01e85f..c40f1910 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -130,8 +130,8 @@ checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" dependencies = [ "async-task", "concurrent-queue", - "fastrand 2.3.0", - "futures-lite 2.5.0", + "fastrand", + "futures-lite", "slab", ] @@ -143,61 +143,32 @@ checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" dependencies = [ "async-channel 2.3.1", "async-executor", - "async-io 2.4.0", - "async-lock 3.4.0", + "async-io", + "async-lock", "blocking", - "futures-lite 2.5.0", + "futures-lite", "once_cell", ] -[[package]] -name = "async-io" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" -dependencies = [ - "async-lock 2.8.0", - "autocfg", - "cfg-if", - "concurrent-queue", - "futures-lite 1.13.0", - "log", - "parking", - "polling 2.8.0", - "rustix 0.37.27", - "slab", - "socket2 0.4.10", - "waker-fn", -] - [[package]] name = "async-io" version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059" dependencies = [ - "async-lock 3.4.0", + "async-lock", "cfg-if", "concurrent-queue", "futures-io", - "futures-lite 2.5.0", + "futures-lite", "parking", - "polling 3.7.4", - "rustix 0.38.42", + "polling", + "rustix", "slab", "tracing", "windows-sys 0.59.0", ] -[[package]] -name = "async-lock" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" -dependencies = [ - "event-listener 2.5.3", -] - [[package]] name = "async-lock" version = "3.4.0" @@ -217,13 +188,13 @@ checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615" dependencies = [ "async-channel 1.9.0", "async-global-executor", - "async-io 2.4.0", - "async-lock 3.4.0", + "async-io", + "async-lock", "crossbeam-utils", "futures-channel", "futures-core", "futures-io", - "futures-lite 2.5.0", + "futures-lite", "gloo-timers", "kv-log-macro", "log", @@ -349,6 +320,25 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "biome_deserialize" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b4443260d505148169f5fb35634c2a60d8489882f8c9c3f1db8b7cf0cb57632" +dependencies = [ + "biome_console", + "biome_deserialize_macros 0.5.7", + "biome_diagnostics", + "biome_json_parser", + "biome_json_syntax", + "biome_rowan", + "bitflags 2.6.0", + "indexmap 1.9.3", + "serde", + "serde_json", + "tracing", +] + [[package]] name = "biome_deserialize" version = "0.6.0" @@ -356,7 +346,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6f619dc8ca0595ed8850d729ebc71722d4233aba68c5aec7d9993a53e59f3fe" dependencies = [ "biome_console", - "biome_deserialize_macros", + "biome_deserialize_macros 0.6.0", "biome_diagnostics", "biome_json_parser", "biome_json_syntax", @@ -367,6 +357,19 @@ dependencies = [ "serde", ] +[[package]] +name = "biome_deserialize_macros" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fc1244cc5f0cc267bd26b601e9ccd6851c6a4d395bba07e27c2de641dc84479" +dependencies = [ + "convert_case", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "biome_deserialize_macros" version = "0.6.0" @@ -423,6 +426,71 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "biome_formatter" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d351a9dc49ae024220a83c44329ab14a9e66887a7ca51fc7ae875e9e56f626c" +dependencies = [ + "biome_console", + "biome_deserialize 0.5.7", + "biome_deserialize_macros 0.5.7", + "biome_diagnostics", + "biome_rowan", + "cfg-if", + "countme", + "drop_bomb", + "indexmap 1.9.3", + "rustc-hash 1.1.0", + "tracing", + "unicode-width", +] + +[[package]] +name = "biome_js_factory" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c9847f4dfd16ee242d12b90f96f6b2eb33238dfc4eac7b5c045e14eebe717b7" +dependencies = [ + "biome_js_syntax", + "biome_rowan", +] + +[[package]] +name = "biome_js_formatter" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bc1f8b67a8fa45555a7a9ea1004eca73c159b7f1941050311d35e312cff3bb8" +dependencies = [ + "biome_console", + "biome_deserialize 0.5.7", + "biome_deserialize_macros 0.5.7", + "biome_diagnostics_categories", + "biome_formatter", + "biome_js_factory", + "biome_js_syntax", + "biome_json_syntax", + "biome_rowan", + "biome_text_size", + "biome_unicode_table", + "cfg-if", + "smallvec", + "tracing", + "unicode-width", +] + +[[package]] +name = "biome_js_syntax" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38a524bd8b1f5f7b3355dfe2744196227ee15e9aa3446d562deb9ed511cf2015" +dependencies = [ + "biome_console", + "biome_diagnostics", + "biome_rowan", + "serde", +] + [[package]] name = "biome_json_factory" version = "0.5.7" @@ -564,7 +632,7 @@ dependencies = [ "async-channel 2.3.1", "async-task", "futures-io", - "futures-lite 2.5.0", + "futures-lite", "piper", ] @@ -754,6 +822,22 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + [[package]] name = "countme" version = "3.0.1" @@ -1021,18 +1105,18 @@ checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" [[package]] name = "enumflags2" -version = "0.7.10" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d232db7f5956f3f14313dc2f87985c58bd2c695ce124c8cdd984e08e15ac133d" +checksum = "ba2f4b465f5318854c6f8dd686ede6c0a9dc67d4b1ac241cf0eb51521a309147" dependencies = [ "enumflags2_derive", ] [[package]] name = "enumflags2_derive" -version = "0.7.10" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de0d48a183585823424a4ce1aa132d174a6a81bd540895822eb4c8373a8e49e8" +checksum = "fc4caf64a58d7a6d65ab00639b046ff54399a39f5f2554728895ace4b297cd79" dependencies = [ "proc-macro2", "quote", @@ -1093,15 +1177,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "fastrand" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] - [[package]] name = "fastrand" version = "2.3.0" @@ -1135,6 +1210,21 @@ dependencies = [ "spin", ] +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "form_urlencoded" version = "1.2.1" @@ -1209,28 +1299,13 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" -[[package]] -name = "futures-lite" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" -dependencies = [ - "fastrand 1.9.0", - "futures-core", - "futures-io", - "memchr", - "parking", - "pin-project-lite", - "waker-fn", -] - [[package]] name = "futures-lite" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cef40d21ae2c515b51041df9ed313ed21e572df340ea58a922a0aefe7e8891a1" dependencies = [ - "fastrand 2.3.0", + "fastrand", "futures-core", "futures-io", "parking", @@ -1409,12 +1484,6 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" -[[package]] -name = "hermit-abi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" - [[package]] name = "hermit-abi" version = "0.4.0" @@ -1650,26 +1719,6 @@ dependencies = [ "similar", ] -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi 0.3.9", - "libc", - "windows-sys 0.48.0", -] - [[package]] name = "is_ci" version = "1.2.0" @@ -1799,12 +1848,6 @@ version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" -[[package]] -name = "linux-raw-sys" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" - [[package]] name = "linux-raw-sys" version = "0.4.14" @@ -1924,6 +1967,23 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" +[[package]] +name = "native-tls" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + [[package]] name = "newtype-uuid" version = "1.1.3" @@ -2063,6 +2123,50 @@ version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +[[package]] +name = "openssl" +version = "0.10.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e14130c6a98cd258fdcb0fb6d744152343ff729cbfcb28c656a9d12b999fbcd" +dependencies = [ + "bitflags 2.6.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-sys" +version = "0.9.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bb61ea9811cc39e3c2069f40b8b8e2e70d8569b361f879786cc7ed48b777cdd" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "option-ext" version = "0.2.0" @@ -2207,8 +2311,8 @@ dependencies = [ name = "pglt_analyse" version = "0.0.0" dependencies = [ - "biome_deserialize", - "biome_deserialize_macros", + "biome_deserialize 0.6.0", + "biome_deserialize_macros 0.6.0", "enumflags2", "pglt_console", "pglt_diagnostics", @@ -2239,8 +2343,8 @@ name = "pglt_cli" version = "0.0.0" dependencies = [ "anyhow", - "biome_deserialize", - "biome_deserialize_macros", + "biome_deserialize 0.6.0", + "biome_deserialize_macros 0.6.0", "bpaf", "crossbeam", "dashmap 5.5.3", @@ -2289,6 +2393,7 @@ dependencies = [ "pglt_test_utils", "pglt_text_size", "pglt_treesitter_queries", + "schemars", "serde", "serde_json", "sqlx", @@ -2301,8 +2406,8 @@ dependencies = [ name = "pglt_configuration" version = "0.0.0" dependencies = [ - "biome_deserialize", - "biome_deserialize_macros", + "biome_deserialize 0.6.0", + "biome_deserialize_macros 0.6.0", "bpaf", "indexmap 2.7.0", "pglt_analyse", @@ -2418,7 +2523,7 @@ name = "pglt_lsp" version = "0.0.0" dependencies = [ "anyhow", - "biome_deserialize", + "biome_deserialize 0.6.0", "futures", "pglt_analyse", "pglt_completions", @@ -2600,7 +2705,10 @@ dependencies = [ name = "pglt_workspace" version = "0.0.0" dependencies = [ - "biome_deserialize", + "biome_deserialize 0.6.0", + "biome_js_factory", + "biome_js_syntax", + "biome_rowan", "dashmap 5.5.3", "futures", "ignore", @@ -2667,7 +2775,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" dependencies = [ "atomic-waker", - "fastrand 2.3.0", + "fastrand", "futures-io", ] @@ -2698,22 +2806,6 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" -[[package]] -name = "polling" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" -dependencies = [ - "autocfg", - "bitflags 1.3.2", - "cfg-if", - "concurrent-queue", - "libc", - "log", - "pin-project-lite", - "windows-sys 0.48.0", -] - [[package]] name = "polling" version = "3.7.4" @@ -2722,9 +2814,9 @@ checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" dependencies = [ "cfg-if", "concurrent-queue", - "hermit-abi 0.4.0", + "hermit-abi", "pin-project-lite", - "rustix 0.38.42", + "rustix", "tracing", "windows-sys 0.59.0", ] @@ -3053,21 +3145,6 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" -[[package]] -name = "ring" -version = "0.17.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" -dependencies = [ - "cc", - "cfg-if", - "getrandom", - "libc", - "spin", - "untrusted", - "windows-sys 0.52.0", -] - [[package]] name = "rsa" version = "0.9.7" @@ -3121,20 +3198,6 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" -[[package]] -name = "rustix" -version = "0.37.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", -] - [[package]] name = "rustix" version = "0.38.42" @@ -3144,50 +3207,10 @@ dependencies = [ "bitflags 2.6.0", "errno", "libc", - "linux-raw-sys 0.4.14", + "linux-raw-sys", "windows-sys 0.59.0", ] -[[package]] -name = "rustls" -version = "0.23.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "934b404430bb06b3fae2cba809eb45a1ab1aecd64491213d7c3301b88393f8d1" -dependencies = [ - "once_cell", - "ring", - "rustls-pki-types", - "rustls-webpki", - "subtle", - "zeroize", -] - -[[package]] -name = "rustls-pemfile" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" -dependencies = [ - "rustls-pki-types", -] - -[[package]] -name = "rustls-pki-types" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" - -[[package]] -name = "rustls-webpki" -version = "0.102.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" -dependencies = [ - "ring", - "rustls-pki-types", - "untrusted", -] - [[package]] name = "ryu" version = "1.0.18" @@ -3203,6 +3226,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "schannel" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +dependencies = [ + "windows-sys 0.59.0", +] + [[package]] name = "schemars" version = "0.8.22" @@ -3236,6 +3268,29 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.6.0", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "serde" version = "1.0.215" @@ -3411,16 +3466,6 @@ dependencies = [ "serde", ] -[[package]] -name = "socket2" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "socket2" version = "0.5.8" @@ -3479,8 +3524,6 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4d8060b456358185f7d50c55d9b5066ad956956fddec42ee2e8567134a8936e" dependencies = [ - "async-io 1.13.0", - "async-std", "atoi", "byteorder", "bytes", @@ -3499,20 +3542,20 @@ dependencies = [ "indexmap 2.7.0", "log", "memchr", + "native-tls", "once_cell", "paste", "percent-encoding", - "rustls", - "rustls-pemfile", "serde", "serde_json", "sha2", "smallvec", "sqlformat", "thiserror 1.0.69", + "tokio", + "tokio-stream", "tracing", "url", - "webpki-roots", ] [[package]] @@ -3534,7 +3577,6 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1804e8a7c7865599c9c79be146dc8a9fd8cc86935fa641d3ea58e5f0688abaa5" dependencies = [ - "async-std", "dotenvy", "either", "heck 0.5.0", @@ -3551,6 +3593,7 @@ dependencies = [ "sqlx-sqlite", "syn 2.0.90", "tempfile", + "tokio", "url", ] @@ -3756,10 +3799,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" dependencies = [ "cfg-if", - "fastrand 2.3.0", + "fastrand", "getrandom", "once_cell", - "rustix 0.38.42", + "rustix", "windows-sys 0.59.0", ] @@ -3911,7 +3954,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.8", + "socket2", "tokio-macros", "windows-sys 0.52.0", ] @@ -3927,6 +3970,17 @@ dependencies = [ "syn 2.0.90", ] +[[package]] +name = "tokio-stream" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + [[package]] name = "tokio-util" version = "0.7.13" @@ -4231,12 +4285,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - [[package]] name = "url" version = "2.5.4" @@ -4320,12 +4368,6 @@ dependencies = [ "quote", ] -[[package]] -name = "waker-fn" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317211a0dc0ceedd78fb2ca9a44aed3d7b9b26f81870d485c07122b4350673b7" - [[package]] name = "walkdir" version = "2.5.0" @@ -4425,15 +4467,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "webpki-roots" -version = "0.26.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" -dependencies = [ - "rustls-pki-types", -] - [[package]] name = "which" version = "4.4.2" @@ -4443,7 +4476,7 @@ dependencies = [ "either", "home", "once_cell", - "rustix 0.38.42", + "rustix", ] [[package]] @@ -4710,10 +4743,15 @@ name = "xtask_codegen" version = "0.0.0" dependencies = [ "anyhow", + "biome_js_factory", + "biome_js_formatter", + "biome_js_syntax", + "biome_rowan", "biome_string_case", "bpaf", "pglt_analyse", "pglt_analyser", + "pglt_workspace", "proc-macro2", "pulldown-cmark", "quote", diff --git a/Cargo.toml b/Cargo.toml index face77a8..3f7fbffa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,10 +17,14 @@ rust-version = "1.85.0" anyhow = "1.0.92" biome_deserialize = "0.6.0" biome_deserialize_macros = "0.6.0" +biome_js_factory = "0.5.7" +biome_js_formatter = "0.5.7" +biome_js_syntax = "0.5.7" +biome_rowan = "0.5.7" biome_string_case = "0.5.8" bpaf = { version = "0.9.15", features = ["derive"] } crossbeam = "0.8.4" -enumflags2 = "0.7.10" +enumflags2 = "0.7.11" ignore = "0.4.23" indexmap = { version = "2.6.0", features = ["serde"] } insta = "1.31.0" @@ -35,11 +39,10 @@ serde = "1.0.195" serde_json = "1.0.114" similar = "2.6.0" smallvec = { version = "1.13.2", features = ["union", "const_new", "serde"] } -sqlx = { version = "0.8.2", features = ["runtime-async-std", "tls-rustls", "postgres", "json"] } +sqlx = { version = "0.8.2", features = ["runtime-tokio", "tls-native-tls", "postgres", "json"] } syn = "1.0.109" termcolor = "1.4.1" tokio = { version = "1.40.0", features = ["full"] } -toml = "0.8.19" tower-lsp = "0.20.0" tracing = { version = "0.1.40", default-features = false, features = ["std"] } tracing-subscriber = "0.3.18" diff --git a/biome.jsonc b/biome.jsonc new file mode 100644 index 00000000..479e0e9f --- /dev/null +++ b/biome.jsonc @@ -0,0 +1,31 @@ +{ + "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json", + "vcs": { + "enabled": false, + "clientKind": "git", + "useIgnoreFile": false + }, + "files": { + "ignoreUnknown": false, + "ignore": [], + "include": ["packages/**/*"] + }, + "formatter": { + "enabled": true, + "indentStyle": "tab" + }, + "organizeImports": { + "enabled": true + }, + "linter": { + "enabled": true, + "rules": { + "recommended": true + } + }, + "javascript": { + "formatter": { + "quoteStyle": "double" + } + } +} diff --git a/bun.lock b/bun.lock new file mode 100644 index 00000000..1dea8b42 --- /dev/null +++ b/bun.lock @@ -0,0 +1,77 @@ +{ + "lockfileVersion": 1, + "workspaces": { + "": { + "name": "postgres_lsp", + "devDependencies": { + "@biomejs/biome": "1.9.4", + "@types/bun": "latest", + }, + "peerDependencies": { + "typescript": "^5", + }, + }, + "packages/@pglt/backend-jsonrpc": { + "name": "@pglt/backend-jsonrpc", + "optionalDependencies": { + "@pglt/cli-darwin-arm64": "", + "@pglt/cli-darwin-x64": "", + "@pglt/cli-linux-arm64": "", + "@pglt/cli-linux-arm64-musl": "", + "@pglt/cli-linux-x64": "", + "@pglt/cli-linux-x64-musl": "", + "@pglt/cli-win32-arm64": "", + "@pglt/cli-win32-x64": "", + }, + }, + "packages/@pglt/pglt": { + "name": "pglt", + "bin": { + "pglt": "bin/pglt", + }, + "optionalDependencies": { + "pglt-aarch64-apple-darwin": "", + "pglt-aarch64-linux-gnu": "", + "pglt-aarch64-windows-msvc": "", + "pglt-x86_64-apple-darwin": "", + "pglt-x86_64-linux-gnu": "", + "pglt-x86_64-windows-msvc": "", + }, + }, + }, + "packages": { + "@biomejs/biome": ["@biomejs/biome@1.9.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "1.9.4", "@biomejs/cli-darwin-x64": "1.9.4", "@biomejs/cli-linux-arm64": "1.9.4", "@biomejs/cli-linux-arm64-musl": "1.9.4", "@biomejs/cli-linux-x64": "1.9.4", "@biomejs/cli-linux-x64-musl": "1.9.4", "@biomejs/cli-win32-arm64": "1.9.4", "@biomejs/cli-win32-x64": "1.9.4" }, "bin": { "biome": "bin/biome" } }, "sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog=="], + + "@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@1.9.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bFBsPWrNvkdKrNCYeAp+xo2HecOGPAy9WyNyB/jKnnedgzl4W4Hb9ZMzYNbf8dMCGmUdSavlYHiR01QaYR58cw=="], + + "@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@1.9.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg=="], + + "@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g=="], + + "@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA=="], + + "@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg=="], + + "@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg=="], + + "@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@1.9.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg=="], + + "@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@1.9.4", "", { "os": "win32", "cpu": "x64" }, "sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA=="], + + "@pglt/backend-jsonrpc": ["@pglt/backend-jsonrpc@workspace:packages/@pglt/backend-jsonrpc"], + + "@types/bun": ["@types/bun@1.2.5", "", { "dependencies": { "bun-types": "1.2.5" } }, "sha512-w2OZTzrZTVtbnJew1pdFmgV99H0/L+Pvw+z1P67HaR18MHOzYnTYOi6qzErhK8HyT+DB782ADVPPE92Xu2/Opg=="], + + "@types/node": ["@types/node@22.13.10", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-I6LPUvlRH+O6VRUqYOcMudhaIdUVWfsjnZavnsraHvpBwaEyMN29ry+0UVJhImYL16xsscu0aske3yA+uPOWfw=="], + + "@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="], + + "bun-types": ["bun-types@1.2.5", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-3oO6LVGGRRKI4kHINx5PIdIgnLRb7l/SprhzqXapmoYkFl5m4j6EvALvbDVuuBFaamB46Ap6HCUxIXNLCGy+tg=="], + + "pglt": ["pglt@workspace:packages/@pglt/pglt"], + + "typescript": ["typescript@5.8.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ=="], + + "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], + } +} diff --git a/crates/pglt_completions/Cargo.toml b/crates/pglt_completions/Cargo.toml index 4c8e6ef2..f89aa56b 100644 --- a/crates/pglt_completions/Cargo.toml +++ b/crates/pglt_completions/Cargo.toml @@ -19,6 +19,7 @@ pglt_text_size.workspace = true pglt_schema_cache.workspace = true pglt_treesitter_queries.workspace = true +schemars = { workspace = true, optional = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } tree-sitter.workspace = true @@ -33,3 +34,6 @@ pglt_test_utils.workspace = true [lib] doctest = false + +[features] +schema = ["dep:schemars"] diff --git a/crates/pglt_completions/src/complete.rs b/crates/pglt_completions/src/complete.rs index 1a1fd59e..d077ef6a 100644 --- a/crates/pglt_completions/src/complete.rs +++ b/crates/pglt_completions/src/complete.rs @@ -19,6 +19,7 @@ pub struct CompletionParams<'a> { } #[derive(Debug, Default, Serialize, Deserialize)] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct CompletionResult { pub(crate) items: Vec, } diff --git a/crates/pglt_completions/src/item.rs b/crates/pglt_completions/src/item.rs index d14485c2..8f0e3b95 100644 --- a/crates/pglt_completions/src/item.rs +++ b/crates/pglt_completions/src/item.rs @@ -1,6 +1,8 @@ use serde::{Deserialize, Serialize}; #[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[serde(rename_all = "camelCase")] pub enum CompletionItemKind { Table, Function, @@ -8,6 +10,7 @@ pub enum CompletionItemKind { } #[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct CompletionItem { pub label: String, pub(crate) score: i32, diff --git a/crates/pglt_diagnostics/src/diagnostic.rs b/crates/pglt_diagnostics/src/diagnostic.rs index 0914e72e..949af89e 100644 --- a/crates/pglt_diagnostics/src/diagnostic.rs +++ b/crates/pglt_diagnostics/src/diagnostic.rs @@ -118,6 +118,7 @@ pub trait Diagnostic: Debug { Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, Default, )] #[serde(rename_all = "camelCase")] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] /// The severity to associate to a diagnostic. pub enum Severity { /// Reports a hint. @@ -165,6 +166,7 @@ impl Display for Severity { /// and help with the implementation of `serde` and `schemars` for tags. #[derive(Debug, Copy, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[bitflags] #[repr(u8)] pub(super) enum DiagnosticTag { diff --git a/crates/pglt_diagnostics/src/display/backtrace.rs b/crates/pglt_diagnostics/src/display/backtrace.rs index ff9b1d98..b3164187 100644 --- a/crates/pglt_diagnostics/src/display/backtrace.rs +++ b/crates/pglt_diagnostics/src/display/backtrace.rs @@ -91,6 +91,17 @@ impl<'de> serde::Deserialize<'de> for Backtrace { } } +#[cfg(feature = "schema")] +impl schemars::JsonSchema for Backtrace { + fn schema_name() -> String { + String::from("Backtrace") + } + + fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { + >::json_schema(r#gen) + } +} + /// Internal representation of a [Backtrace], can be either a native backtrace /// instance or a vector of serialized frames. #[derive(Clone, Debug)] @@ -292,6 +303,11 @@ pub(super) fn print_backtrace( /// Serializable representation of a backtrace frame. #[derive(Clone, Debug, Serialize, Deserialize)] +#[cfg_attr( + feature = "schema", + derive(schemars::JsonSchema), + schemars(rename = "BacktraceFrame") +)] #[cfg_attr(test, derive(Eq, PartialEq))] struct SerializedFrame { ip: u64, @@ -309,6 +325,11 @@ impl From<&'_ backtrace::BacktraceFrame> for SerializedFrame { /// Serializable representation of a backtrace frame symbol. #[derive(Clone, Debug, Serialize, Deserialize)] +#[cfg_attr( + feature = "schema", + derive(schemars::JsonSchema), + schemars(rename = "BacktraceSymbol") +)] #[cfg_attr(test, derive(Eq, PartialEq))] struct SerializedSymbol { name: Option, diff --git a/crates/pglt_diagnostics/src/location.rs b/crates/pglt_diagnostics/src/location.rs index 3b847975..390484fc 100644 --- a/crates/pglt_diagnostics/src/location.rs +++ b/crates/pglt_diagnostics/src/location.rs @@ -39,6 +39,7 @@ impl Eq for Location<'_> {} /// Represents the resource a diagnostic is associated with. #[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(rename_all = "camelCase")] pub enum Resource

{ /// The diagnostic is related to the content of the command line arguments. diff --git a/crates/pglt_diagnostics/src/serde.rs b/crates/pglt_diagnostics/src/serde.rs index aa67ebde..c4699b6a 100644 --- a/crates/pglt_diagnostics/src/serde.rs +++ b/crates/pglt_diagnostics/src/serde.rs @@ -15,7 +15,8 @@ use crate::{ /// Serializable representation for a [Diagnostic](super::Diagnostic). #[derive(Clone, Debug, Serialize, Deserialize)] -#[cfg_attr(not(target_arch = "wasm32"), serde(rename_all = "camelCase"))] +#[serde(rename_all = "camelCase")] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[cfg_attr(test, derive(Eq, PartialEq))] pub struct Diagnostic { category: Option<&'static Category>, @@ -137,7 +138,8 @@ impl std::fmt::Display for PrintDescription<'_, D } #[derive(Clone, Debug, Serialize, Deserialize)] -#[cfg_attr(not(target_arch = "wasm32"), serde(rename_all = "camelCase"))] +#[serde(rename_all = "camelCase")] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[cfg_attr(test, derive(Eq, PartialEq))] struct Location { path: Option>, @@ -160,6 +162,7 @@ impl From> for Location { /// Implementation of [Visitor] collecting serializable [Advice] into a vector. #[derive(Clone, Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[cfg_attr(test, derive(Eq, PartialEq))] struct Advices { advices: Vec, @@ -246,6 +249,7 @@ impl super::Advices for Advices { /// advice types. #[derive(Clone, Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[cfg_attr(test, derive(Eq, PartialEq))] enum Advice { Log(LogCategory, MarkupBuf), @@ -354,6 +358,17 @@ impl<'de> Deserialize<'de> for DiagnosticTags { } } +#[cfg(feature = "schema")] +impl schemars::JsonSchema for DiagnosticTags { + fn schema_name() -> String { + String::from("DiagnosticTags") + } + + fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { + >::json_schema(r#gen) + } +} + #[cfg(test)] mod tests { use std::io; diff --git a/crates/pglt_lsp/src/handlers/completions.rs b/crates/pglt_lsp/src/handlers/completions.rs index 0e432909..c5c44ef5 100644 --- a/crates/pglt_lsp/src/handlers/completions.rs +++ b/crates/pglt_lsp/src/handlers/completions.rs @@ -26,22 +26,23 @@ pub fn get_completions( pglt_lsp_converters::negotiated_encoding(client_capabilities), )?; - let completion_result = match session - .workspace - .get_completions(workspace::CompletionParams { - path, - position: offset, - }) { - Ok(result) => result, - Err(e) => match e { - WorkspaceError::DatabaseConnectionError(_) => { - return Ok(lsp_types::CompletionResponse::Array(vec![])); - } - _ => { - return Err(e.into()); - } - }, - }; + let completion_result = + match session + .workspace + .get_completions(workspace::GetCompletionsParams { + path, + position: offset, + }) { + Ok(result) => result, + Err(e) => match e { + WorkspaceError::DatabaseConnectionError(_) => { + return Ok(lsp_types::CompletionResponse::Array(vec![])); + } + _ => { + return Err(e.into()); + } + }, + }; let items: Vec = completion_result .into_iter() diff --git a/crates/pglt_workspace/Cargo.toml b/crates/pglt_workspace/Cargo.toml index c746c0ef..c337fe9a 100644 --- a/crates/pglt_workspace/Cargo.toml +++ b/crates/pglt_workspace/Cargo.toml @@ -38,9 +38,22 @@ tracing = { workspace = true, features = ["attributes", "log"] tree-sitter.workspace = true tree_sitter_sql.workspace = true +biome_js_factory = { workspace = true, optional = true } +biome_js_syntax = { workspace = true, optional = true } +biome_rowan = { workspace = true, optional = true } [features] -schema = ["dep:schemars", "pglt_configuration/schema", "pglt_fs/schema"] +schema = [ + "dep:schemars", + "dep:biome_rowan", + "dep:biome_js_syntax", + "dep:biome_js_factory", + "pglt_configuration/schema", + "pglt_diagnostics/schema", + "pglt_fs/schema", + "pglt_analyse/schema", + "pglt_completions/schema", +] [dev-dependencies] tempfile = "3.15.0" diff --git a/crates/pglt_workspace/src/lib.rs b/crates/pglt_workspace/src/lib.rs index 6985d09f..7f4c221e 100644 --- a/crates/pglt_workspace/src/lib.rs +++ b/crates/pglt_workspace/src/lib.rs @@ -9,6 +9,8 @@ pub mod dome; pub mod matcher; pub mod settings; pub mod workspace; +#[cfg(feature = "schema")] +pub mod workspace_types; pub use crate::diagnostics::{TransportError, WorkspaceError}; pub use crate::workspace::Workspace; diff --git a/crates/pglt_workspace/src/workspace.rs b/crates/pglt_workspace/src/workspace.rs index 1bd67cda..d5f74d51 100644 --- a/crates/pglt_workspace/src/workspace.rs +++ b/crates/pglt_workspace/src/workspace.rs @@ -46,7 +46,7 @@ pub struct PullDiagnosticsParams { #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct CompletionParams { +pub struct GetCompletionsParams { /// The File for which a completion is requested. pub path: PgLTPath, /// The Cursor position in the file for which a completion is requested. @@ -117,7 +117,7 @@ pub trait Workspace: Send + Sync + RefUnwindSafe { fn get_completions( &self, - params: CompletionParams, + params: GetCompletionsParams, ) -> Result; /// Update the global settings for this workspace @@ -213,75 +213,6 @@ impl<'app, W: Workspace + ?Sized> FileGuard<'app, W> { skip, }) } - // - // pub fn pull_actions( - // &self, - // range: Option, - // only: Vec, - // skip: Vec, - // suppression_reason: Option, - // ) -> Result { - // self.workspace.pull_actions(PullActionsParams { - // path: self.path.clone(), - // range, - // only, - // skip, - // suppression_reason, - // }) - // } - // - // pub fn format_file(&self) -> Result { - // self.workspace.format_file(FormatFileParams { - // path: self.path.clone(), - // }) - // } - // - // pub fn format_range(&self, range: TextRange) -> Result { - // self.workspace.format_range(FormatRangeParams { - // path: self.path.clone(), - // range, - // }) - // } - // - // pub fn format_on_type(&self, offset: TextSize) -> Result { - // self.workspace.format_on_type(FormatOnTypeParams { - // path: self.path.clone(), - // offset, - // }) - // } - // - // pub fn fix_file( - // &self, - // fix_file_mode: FixFileMode, - // should_format: bool, - // rule_categories: RuleCategories, - // only: Vec, - // skip: Vec, - // suppression_reason: Option, - // ) -> Result { - // self.workspace.fix_file(FixFileParams { - // path: self.path.clone(), - // fix_file_mode, - // should_format, - // only, - // skip, - // rule_categories, - // suppression_reason, - // }) - // } - // - // pub fn organize_imports(&self) -> Result { - // self.workspace.organize_imports(OrganizeImportsParams { - // path: self.path.clone(), - // }) - // } - // - // pub fn search_pattern(&self, pattern: &PatternId) -> Result { - // self.workspace.search_pattern(SearchPatternParams { - // path: self.path.clone(), - // pattern: pattern.clone(), - // }) - // } } impl Drop for FileGuard<'_, W> { diff --git a/crates/pglt_workspace/src/workspace/client.rs b/crates/pglt_workspace/src/workspace/client.rs index fbad3f41..c8a2fd60 100644 --- a/crates/pglt_workspace/src/workspace/client.rs +++ b/crates/pglt_workspace/src/workspace/client.rs @@ -126,7 +126,7 @@ where fn get_completions( &self, - params: super::CompletionParams, + params: super::GetCompletionsParams, ) -> Result { self.request("pglt/get_completions", params) } diff --git a/crates/pglt_workspace/src/workspace/server.rs b/crates/pglt_workspace/src/workspace/server.rs index bb90a236..b49db1a1 100644 --- a/crates/pglt_workspace/src/workspace/server.rs +++ b/crates/pglt_workspace/src/workspace/server.rs @@ -397,7 +397,7 @@ impl Workspace for WorkspaceServer { #[tracing::instrument(level = "debug", skip(self))] fn get_completions( &self, - params: super::CompletionParams, + params: super::GetCompletionsParams, ) -> Result { tracing::debug!( "Getting completions for file {:?} at position {:?}", diff --git a/crates/pglt_workspace/src/workspace_types.rs b/crates/pglt_workspace/src/workspace_types.rs new file mode 100644 index 00000000..02215e79 --- /dev/null +++ b/crates/pglt_workspace/src/workspace_types.rs @@ -0,0 +1,471 @@ +//! Utility functions to help with generating bindings for the [Workspace] API + +use std::collections::VecDeque; + +use biome_js_syntax::{AnyJsDeclaration, AnyTsTupleTypeElement}; +use rustc_hash::FxHashSet; +use schemars::{ + JsonSchema, + r#gen::{SchemaGenerator, SchemaSettings}, + schema::{InstanceType, RootSchema, Schema, SchemaObject, SingleOrVec}, +}; +use serde_json::Value; + +use crate::{WorkspaceError, workspace::*}; +use biome_js_factory::{ + make, + syntax::{AnyJsObjectMemberName, AnyTsName, AnyTsType, AnyTsTypeMember, T}, +}; +use biome_rowan::AstSeparatedList; + +/// Manages a queue of type definitions that need to be generated +#[derive(Default)] +pub struct ModuleQueue<'a> { + /// Set of type names that have already been emitted + visited: FxHashSet<&'a str>, + /// Queue of type names and definitions that need to be generated + queue: VecDeque<(&'a str, &'a SchemaObject)>, +} + +impl<'a> ModuleQueue<'a> { + /// Add a type definition to the queue if it hasn't been emitted already + fn push_back(&mut self, item: (&'a str, &'a SchemaObject)) { + if self.visited.insert(item.0) { + self.queue.push_back(item); + } + } + + /// Pull a type name and definition from the queue + fn pop_front(&mut self) -> Option<(&'a str, &'a SchemaObject)> { + self.queue.pop_front() + } + + pub fn visited(&self) -> &FxHashSet<&'a str> { + &self.visited + } +} + +/// Generate a [TsType] node from the `instance_type` of a [SchemaObject] +fn instance_type<'a>( + queue: &mut ModuleQueue<'a>, + root_schema: &'a RootSchema, + schema: &'a SchemaObject, + ty: InstanceType, +) -> AnyTsType { + match ty { + // If the instance type is an object, generate a TS object type with the corresponding properties + InstanceType::Object => { + let object = schema.object.as_deref().unwrap(); + AnyTsType::from(make::ts_object_type( + make::token(T!['{']), + make::ts_type_member_list(object.properties.iter().map(|(property, schema)| { + let (ts_type, optional, description) = schema_type(queue, root_schema, schema); + assert!(!optional, "optional nested types are not supported"); + + let mut property = make::ident(property); + if let Some(description) = description { + let comment = format!("/**\n\t* {description} \n\t */"); + let trivia = vec![ + (biome_js_syntax::TriviaPieceKind::Newline, "\n"), + ( + biome_js_syntax::TriviaPieceKind::MultiLineComment, + comment.as_str(), + ), + (biome_js_syntax::TriviaPieceKind::Newline, "\n"), + ]; + property = property.with_leading_trivia(trivia); + } + + AnyTsTypeMember::from( + make::ts_property_signature_type_member(AnyJsObjectMemberName::from( + make::js_literal_member_name(property), + )) + .with_type_annotation(make::ts_type_annotation(make::token(T![:]), ts_type)) + .build(), + ) + })), + make::token(T!['}']), + )) + } + // If the instance type is an array, generate a TS array type with the corresponding item type + InstanceType::Array => { + let array = schema.array.as_deref().unwrap(); + let items = array.items.as_ref().unwrap(); + match items { + SingleOrVec::Single(schema) => { + let (ts_type, optional, _) = schema_type(queue, root_schema, schema); + assert!(!optional, "optional nested types are not supported"); + + AnyTsType::from(make::ts_array_type( + ts_type, + make::token(T!['[']), + make::token(T![']']), + )) + } + SingleOrVec::Vec(items) => AnyTsType::from(make::ts_tuple_type( + make::token(T!['[']), + make::ts_tuple_type_element_list( + items.iter().map(|schema| { + let (ts_type, optional, _) = schema_type(queue, root_schema, schema); + assert!(!optional, "optional nested types are not supported"); + AnyTsTupleTypeElement::AnyTsType(ts_type) + }), + items.iter().map(|_| make::token(T![,])), + ), + make::token(T![']']), + )), + } + } + + // Map native types to the corresponding TS type + InstanceType::Null => AnyTsType::from(make::ts_null_literal_type(make::token(T![null]))), + InstanceType::Boolean => AnyTsType::from(make::ts_boolean_type(make::token(T![boolean]))), + InstanceType::String => AnyTsType::from(make::ts_string_type(make::token(T![string]))), + InstanceType::Number | InstanceType::Integer => { + AnyTsType::from(make::ts_number_type(make::token(T![number]))) + } + } +} + +/// Generate a literal [TsType] from a `serde_json` [Value] +fn value_type(value: &Value) -> AnyTsType { + match value { + Value::Null => AnyTsType::from(make::ts_null_literal_type(make::token(T![null]))), + Value::Bool(true) => AnyTsType::from(make::ts_boolean_literal_type(make::token(T![true]))), + Value::Bool(false) => { + AnyTsType::from(make::ts_boolean_literal_type(make::token(T![false]))) + } + Value::Number(value) => AnyTsType::from( + make::ts_number_literal_type(make::js_number_literal(value.as_f64().unwrap())).build(), + ), + Value::String(value) => { + AnyTsType::from(make::ts_string_literal_type(make::js_string_literal(value))) + } + Value::Array(_) => unimplemented!(), + Value::Object(_) => unimplemented!(), + } +} + +/// Generate a union [TsType] node from a list of [TsType]s, +/// flattening any nested union type the iterator may emit +fn make_union_type(items: impl IntoIterator) -> AnyTsType { + let mut result = Vec::new(); + + for item in items { + if let AnyTsType::TsUnionType(union_type) = item { + for item in union_type.types().iter() { + result.push(item.unwrap()); + } + } else { + result.push(item); + } + } + + let separators = (0..result.len().saturating_sub(1)).map(|_| make::token(T![|])); + AnyTsType::from( + make::ts_union_type(make::ts_union_type_variant_list(result, separators)).build(), + ) +} + +/// Generate a [TsType] node from a [SchemaObject], returning the generated +/// TypeScript type along with a boolean flag indicating whether the type is +/// considered "optional" in the schema +fn schema_object_type<'a>( + queue: &mut ModuleQueue<'a>, + root_schema: &'a RootSchema, + schema: &'a SchemaObject, +) -> (AnyTsType, bool, Option<&'a String>) { + // Start by detecting enum types by inspecting the `enum_values` field, i + // the field is set return a union type generated from the literal enum values + let description = schema + .metadata + .as_ref() + .and_then(|s| s.description.as_ref()); + let ts_type = schema + .enum_values + .as_deref() + .map(|enum_values| make_union_type(enum_values.iter().map(value_type))) + // If the type isn't an enum, inspect its `instance_type` field, if the + // field is set return a type annotation for the corresponding type + .or_else(|| { + Some(match schema.instance_type.as_ref()? { + SingleOrVec::Single(ty) => instance_type(queue, root_schema, schema, **ty), + SingleOrVec::Vec(types) => make_union_type( + types + .iter() + .map(|ty| instance_type(queue, root_schema, schema, *ty)), + ), + }) + }) + // Otherwise inspect the `reference` field of the schema, if its set return + // a TS reference type and add the corresponding type to the queue + .or_else(|| { + let reference = schema.reference.as_deref()?; + let key = reference.trim_start_matches("#/components/schemas/"); + match root_schema.definitions.get(key) { + Some(Schema::Bool(_)) => unimplemented!(), + Some(Schema::Object(schema)) => queue.push_back((key, schema)), + None => panic!("definition for type {key:?} not found"), + } + + Some(AnyTsType::from( + make::ts_reference_type(AnyTsName::from(make::js_reference_identifier( + make::ident(key), + ))) + .build(), + )) + }) + // Finally try to inspect the subschemas for this type + .or_else(|| { + let subschemas = schema.subschemas.as_deref()?; + // First try to inspect the `all_of` list of subschemas, if it's + // set generate an intersection type from it + subschemas + .all_of + .as_deref() + .map(|all_of| { + AnyTsType::from( + make::ts_intersection_type(make::ts_intersection_type_element_list( + all_of.iter().map(|ty| { + let (ts_type, optional, _) = schema_type(queue, root_schema, ty); + assert!(!optional, "optional nested types are not supported"); + ts_type + }), + (0..all_of.len().saturating_sub(1)).map(|_| make::token(T![&])), + )) + .build(), + ) + }) + // Otherwise try to inspect the `any_of` list of subschemas, and + // generate the corresponding union type for it + .or_else(|| { + let any_of = subschemas + .any_of + .as_deref() + .or(subschemas.one_of.as_deref())?; + + Some(make_union_type(any_of.iter().map(|ty| { + let (ts_type, optional, _) = schema_type(queue, root_schema, ty); + assert!(!optional, "optional nested types are not supported"); + ts_type + }))) + }) + }) + .unwrap_or_else(|| { + // this is temporary workaround to fix the `options` field, which is not used at the moment + AnyTsType::from(make::ts_any_type(make::token(T![any]))) + }); + + // Types are considered "optional" in the serialization protocol if they + // have the `nullable` OpenAPI extension property, or if they have a default value + let is_nullable = matches!(schema.extensions.get("nullable"), Some(Value::Bool(true))); + let has_defaults = schema + .metadata + .as_ref() + .is_some_and(|metadata| metadata.default.is_some()); + + (ts_type, is_nullable || has_defaults, description) +} + +/// Generate a [TsType] node from a [Schema], returning the generated type +/// along with a boolean flag indicating whether the type is considered +/// "optional" in the schema +fn schema_type<'a>( + queue: &mut ModuleQueue<'a>, + root_schema: &'a RootSchema, + schema: &'a Schema, +) -> (AnyTsType, bool, Option<&'a String>) { + match schema { + // Types defined as `true` in the schema always pass validation, + // map them to the `any` type + Schema::Bool(true) => ( + AnyTsType::from(make::ts_any_type(make::token(T![any]))), + true, + None, + ), + // Types defined as `false` in the schema never pass validation, + // map them to the `never` type + Schema::Bool(false) => ( + AnyTsType::from(make::ts_never_type(make::token(T![never]))), + false, + None, + ), + Schema::Object(schema_object) => schema_object_type(queue, root_schema, schema_object), + } +} + +/// Generate and emit all the types defined in `root_schema` into the `module` +pub fn generate_type<'a>( + module: &mut Vec<(AnyJsDeclaration, Option<&'a String>)>, + queue: &mut ModuleQueue<'a>, + root_schema: &'a RootSchema, +) -> AnyTsType { + // Read the root type of the schema and push it to the queue + let root_name = root_schema + .schema + .metadata + .as_deref() + .and_then(|metadata| metadata.title.as_deref()) + .unwrap(); + + match root_name { + "Null" => return AnyTsType::TsVoidType(make::ts_void_type(make::token(T![void]))), + "Boolean" => { + return AnyTsType::TsBooleanType(make::ts_boolean_type(make::token(T![boolean]))); + } + "String" => return AnyTsType::TsStringType(make::ts_string_type(make::token(T![string]))), + _ => {} + } + + queue.push_back((root_name, &root_schema.schema)); + + while let Some((name, schema)) = queue.pop_front() { + // Detect if the type being emitted is an object, emit it as an + // interface definition if that's the case + let is_interface = schema.instance_type.as_ref().map_or_else( + || schema.object.is_some(), + |instance_type| { + if let SingleOrVec::Single(instance_type) = instance_type { + matches!(**instance_type, InstanceType::Object) + } else { + false + } + }, + ); + + if is_interface { + let mut members = Vec::new(); + + // Create a property signature member in the interface for each + // property of the corresponding schema object + let object = schema.object.as_deref().unwrap(); + for (property, schema) in &object.properties { + let (ts_type, optional, description) = schema_type(queue, root_schema, schema); + + let mut property = make::ident(property); + if let Some(description) = description { + let comment = format!("/**\n\t* {description} \n\t */"); + let trivia = vec![ + (biome_js_syntax::TriviaPieceKind::Newline, "\n"), + ( + biome_js_syntax::TriviaPieceKind::MultiLineComment, + comment.as_str(), + ), + (biome_js_syntax::TriviaPieceKind::Newline, "\n"), + ]; + property = property.with_leading_trivia(trivia); + } + + let mut builder = make::ts_property_signature_type_member( + AnyJsObjectMemberName::from(make::js_literal_member_name(property)), + ) + .with_type_annotation(make::ts_type_annotation(make::token(T![:]), ts_type)); + + if optional { + builder = builder.with_optional_token(make::token(T![?])); + } + + members.push(AnyTsTypeMember::from(builder.build())); + } + + let description = schema + .metadata + .as_ref() + .and_then(|s| s.description.as_ref()); + let current_module = AnyJsDeclaration::from( + make::ts_interface_declaration( + make::token(T![interface]), + make::ts_identifier_binding(make::ident(name)), + make::token(T!['{']), + make::ts_type_member_list(members), + make::token(T!['}']), + ) + .build(), + ); + module.push((current_module, description)); + } else { + // If the schema for this type is not an object, emit it as a type alias + let (ts_type, optional, description) = schema_object_type(queue, root_schema, schema); + + assert!(!optional, "optional nested types are not supported"); + + let current_module = AnyJsDeclaration::from( + make::ts_type_alias_declaration( + make::token(T![type]), + make::ts_identifier_binding(make::ident(name)), + make::token(T![=]), + ts_type, + ) + .build(), + ); + module.push((current_module, description)); + } + } + + AnyTsType::TsReferenceType( + make::ts_reference_type(AnyTsName::JsReferenceIdentifier( + make::js_reference_identifier(make::ident(root_name)), + )) + .build(), + ) +} + +/// Signature metadata for a [Workspace] method +pub struct WorkspaceMethod { + /// Name of the method + pub name: &'static str, + /// Schema for the parameters object of the method + pub params: RootSchema, + /// Schema for the result object of the method + pub result: RootSchema, +} + +impl WorkspaceMethod { + /// Construct a [WorkspaceMethod] from a name, a parameter type and a result type + fn of(name: &'static str) -> Self + where + P: JsonSchema, + R: JsonSchema, + { + let params = SchemaGenerator::from(SchemaSettings::openapi3()).root_schema_for::

(); + let result = SchemaGenerator::from(SchemaSettings::openapi3()).root_schema_for::(); + Self { + name, + params, + result, + } + } + + /// Construct a [WorkspaceMethod] from a name and a function pointer + fn from_method( + name: &'static str, + _func: fn(T, P) -> Result, + ) -> Self + where + P: JsonSchema, + R: JsonSchema, + { + Self::of::(name) + } +} + +/// Helper macro for generated an OpenAPI schema for a type implementing JsonSchema +macro_rules! workspace_method { + ($name:ident) => { + WorkspaceMethod::from_method(stringify!($name), ::$name) + }; +} + +/// Returns a list of signature for all the methods in the [Workspace] trait +pub fn methods() -> [WorkspaceMethod; 8] { + [ + workspace_method!(is_path_ignored), + workspace_method!(get_file_content), + workspace_method!(pull_diagnostics), + workspace_method!(get_completions), + workspace_method!(update_settings), + workspace_method!(open_file), + workspace_method!(change_file), + workspace_method!(close_file), + ] +} diff --git a/justfile b/justfile index 64ee96ac..fcfb93e0 100644 --- a/justfile +++ b/justfile @@ -11,33 +11,34 @@ install-tools: cargo install cargo-binstall cargo binstall cargo-insta taplo-cli cargo binstall --git "https://github.com/astral-sh/uv" uv - + bun install # Upgrades the tools needed to develop upgrade-tools: cargo install cargo-binstall --force cargo binstall cargo-insta taplo-cli --force cargo binstall --git "https://github.com/astral-sh/uv" uv --force + bun install # Generates code generated files for the linter gen-lint: cargo run -p xtask_codegen -- analyser cargo run -p xtask_codegen -- configuration - # cargo codegen-migrate - # just gen-bindings + cargo run -p xtask_codegen -- bindings cargo run -p rules_check + cargo run -p docs_codegen just format # Creates a new lint rule in the given path, with the given name. Name has to be camel case. Group should be lowercase. new-lintrule group rulename: cargo run -p xtask_codegen -- new-lintrule --category=lint --name={{rulename}} --group={{group}} just gen-lint - # just documentation -# Format Rust files and TOML files +# Format Rust, JS and TOML files format: cargo fmt taplo format + bun biome format --write [unix] _touch file: @@ -63,10 +64,12 @@ test-doc: lint: cargo clippy cargo run -p rules_check + bun biome lint lint-fix: cargo clippy --fix cargo run -p rules_check + bun biome lint --write serve-docs: uv sync @@ -77,6 +80,7 @@ ready: git diff --exit-code --quiet cargo run -p xtask_codegen -- configuration cargo run -p docs_codegen + cargo run -p xtask_codegen -- bindings just lint-fix just format git diff --exit-code --quiet diff --git a/package.json b/package.json new file mode 100644 index 00000000..82b06731 --- /dev/null +++ b/package.json @@ -0,0 +1,17 @@ +{ + "name": "@pglt/monorepo", + "version": "0.0.0", + "private": true, + "devDependencies": { + "@biomejs/biome": "1.9.4", + "@types/bun": "latest" + }, + "peerDependencies": { + "typescript": "^5" + }, + "workspaces": ["packages/@pglt/pglt", "packages/@pglt/backend-jsonrpc"], + "keywords": [], + "author": "Supabase Community", + "license": "MIT OR Apache-2.0", + "packageManager": "bun@1" +} diff --git a/packages/@pglt/backend-jsonrpc/package.json b/packages/@pglt/backend-jsonrpc/package.json new file mode 100644 index 00000000..838f796c --- /dev/null +++ b/packages/@pglt/backend-jsonrpc/package.json @@ -0,0 +1,32 @@ +{ + "name": "@pglt/backend-jsonrpc", + "version": "", + "main": "dist/index.js", + "scripts": { + "test": "bun test", + "test:ci": "bun build && bun test", + "build": "bun build ./src/index.ts --outdir ./dist --target node" + }, + "files": ["dist/", "README.md"], + "repository": { + "type": "git", + "url": "git+https://github.com/supabase-community/postgres_lsp.git", + "directory": "packages/@pglt/backend-jsonrpc" + }, + "author": "Supabase Community", + "bugs": "ttps://github.com/supabase-community/postgres_lsp/issues", + "description": "Bindings to the JSON-RPC Workspace API of the Postgres Language Tools daemon", + "keywords": ["TypeScript", "Postgres"], + "license": "MIT", + "publishConfig": { + "provenance": true + }, + "optionalDependencies": { + "@pglt/cli-win32-x64": "", + "@pglt/cli-win32-arm64": "", + "@pglt/cli-darwin-x64": "", + "@pglt/cli-darwin-arm64": "", + "@pglt/cli-linux-x64": "", + "@pglt/cli-linux-arm64": "" + } +} diff --git a/packages/@pglt/backend-jsonrpc/src/command.ts b/packages/@pglt/backend-jsonrpc/src/command.ts new file mode 100644 index 00000000..7249002e --- /dev/null +++ b/packages/@pglt/backend-jsonrpc/src/command.ts @@ -0,0 +1,36 @@ +/** + * Gets the path of the binary for the current platform + * + * @returns Filesystem path to the binary, or null if no prebuilt distribution exists for the current platform + */ +export function getCommand(): string | null { + const { platform, arch } = process; + + type PlatformPaths = { + [P in NodeJS.Platform]?: { + [A in NodeJS.Architecture]?: string; + }; + }; + + const PLATFORMS: PlatformPaths = { + win32: { + x64: "@pglt/cli-win32-x64/pglt.exe", + arm64: "@pglt/cli-win32-arm64/pglt.exe", + }, + darwin: { + x64: "@pglt/cli-darwin-x64/pglt", + arm64: "@pglt/cli-darwin-arm64/pglt", + }, + linux: { + x64: "@pglt/cli-linux-x64/pglt", + arm64: "@pglt/cli-linux-arm64/pglt", + }, + }; + + const binPath = PLATFORMS?.[platform]?.[arch]; + if (!binPath) { + return null; + } + + return require.resolve(binPath); +} diff --git a/packages/@pglt/backend-jsonrpc/src/index.ts b/packages/@pglt/backend-jsonrpc/src/index.ts new file mode 100644 index 00000000..5d4dd41b --- /dev/null +++ b/packages/@pglt/backend-jsonrpc/src/index.ts @@ -0,0 +1,46 @@ +import { getCommand } from "./command"; +import { createSocket } from "./socket"; +import { Transport } from "./transport"; +import { type Workspace, createWorkspace as wrapTransport } from "./workspace"; + +/** + * Create an instance of the Workspace client connected to a remote daemon + * instance through the JSON-RPC protocol + * + * @returns A Workspace client, or null if the underlying platform is not supported + */ +export async function createWorkspace(): Promise { + const command = getCommand(); + if (!command) { + return null; + } + + return createWorkspaceWithBinary(command); +} + +/** + * Create an instance of the Workspace client connected to a remote daemon + * instance through the JSON-RPC protocol, using the provided command to spawn + * the daemon if necessary + * + * @param command Path to the binary + * @returns A Workspace client, or null if the underlying platform is not supported + */ +export async function createWorkspaceWithBinary( + command: string, +): Promise { + const socket = await createSocket(command); + const transport = new Transport(socket); + + await transport.request("initialize", { + capabilities: {}, + client_info: { + name: "@pglt/backend-jsonrpc", + version: "0.0.0", + }, + }); + + return wrapTransport(transport); +} + +export * from "./workspace"; diff --git a/packages/@pglt/backend-jsonrpc/src/socket.ts b/packages/@pglt/backend-jsonrpc/src/socket.ts new file mode 100644 index 00000000..6fd2902f --- /dev/null +++ b/packages/@pglt/backend-jsonrpc/src/socket.ts @@ -0,0 +1,47 @@ +import { spawn } from "node:child_process"; +import { type Socket, connect } from "node:net"; + +function getSocket(command: string): Promise { + return new Promise((resolve, reject) => { + const process = spawn(command, ["__print_socket"], { + stdio: "pipe", + }); + + process.on("error", reject); + + let pipeName = ""; + process.stdout.on("data", (data) => { + pipeName += data.toString("utf-8"); + }); + + process.on("exit", (code) => { + if (code === 0) { + resolve(pipeName.trimEnd()); + } else { + reject( + new Error( + `Command '${command} __print_socket' exited with code ${code}`, + ), + ); + } + }); + }); +} + +/** + * Ensure the daemon server is running and create a Socket connected to the RPC channel + * + * @param command Path to the daemon binary + * @returns Socket instance connected to the daemon + */ +export async function createSocket(command: string): Promise { + const path = await getSocket(command); + const socket = connect(path); + + await new Promise((resolve, reject) => { + socket.once("error", reject); + socket.once("ready", resolve); + }); + + return socket; +} diff --git a/packages/@pglt/backend-jsonrpc/src/transport.ts b/packages/@pglt/backend-jsonrpc/src/transport.ts new file mode 100644 index 00000000..b1cdad44 --- /dev/null +++ b/packages/@pglt/backend-jsonrpc/src/transport.ts @@ -0,0 +1,293 @@ +interface Socket { + on(event: "data", fn: (data: Buffer) => void): void; + write(data: Buffer): void; + destroy(): void; +} + +enum ReaderStateKind { + Header = 0, + Body = 1, +} + +interface ReaderStateHeader { + readonly kind: ReaderStateKind.Header; + contentLength?: number; + contentType?: string; +} + +interface ReaderStateBody { + readonly kind: ReaderStateKind.Body; + readonly contentLength: number; + readonly contentType?: string; +} + +type ReaderState = ReaderStateHeader | ReaderStateBody; + +interface JsonRpcRequest { + jsonrpc: "2.0"; + id: number; + method: string; + params: unknown; +} + +function isJsonRpcRequest(message: JsonRpcMessage): message is JsonRpcRequest { + return ( + "id" in message && + typeof message.id === "number" && + "method" in message && + typeof message.method === "string" && + "params" in message + ); +} + +interface JsonRpcNotification { + jsonrpc: "2.0"; + method: string; + params: unknown; +} + +function isJsonRpcNotification( + message: JsonRpcMessage, +): message is JsonRpcNotification { + return ( + !("id" in message) && + "method" in message && + typeof message.method === "string" && + "params" in message + ); +} + +type JsonRpcResponse = + | { + jsonrpc: "2.0"; + id: number; + result: unknown; + } + | { + jsonrpc: "2.0"; + id: number; + error: unknown; + }; + +function isJsonRpcResponse( + message: JsonRpcMessage, +): message is JsonRpcResponse { + return ( + "id" in message && + typeof message.id === "number" && + !("method" in message) && + ("result" in message || "error" in message) + ); +} + +type JsonRpcMessage = JsonRpcRequest | JsonRpcNotification | JsonRpcResponse; + +function isJsonRpcMessage(message: unknown): message is JsonRpcMessage { + return ( + typeof message === "object" && + message !== null && + "jsonrpc" in message && + message.jsonrpc === "2.0" + ); +} + +interface PendingRequest { + resolve(result: unknown): void; + reject(error: unknown): void; +} + +const MIME_JSONRPC = "application/vscode-jsonrpc"; + +/** + * Implements the daemon server JSON-RPC protocol over a Socket instance + */ +export class Transport { + /** + * Counter incremented for each outgoing request to generate a unique ID + */ + private nextRequestId = 0; + + /** + * Storage for the promise resolver functions of pending requests, + * keyed by ID of the request + */ + private pendingRequests: Map = new Map(); + + constructor(private socket: Socket) { + socket.on("data", (data) => { + this.processIncoming(data); + }); + } + + /** + * Send a request to the remote server + * + * @param method Name of the remote method to call + * @param params Parameters object the remote method should be called with + * @return Promise resolving with the value returned by the remote method, or rejecting with an RPC error if the remote call failed + */ + // biome-ignore lint/suspicious/noExplicitAny: if i change it to Promise typescript breaks + request(method: string, params: unknown): Promise { + return new Promise((resolve, reject) => { + const id = this.nextRequestId++; + this.pendingRequests.set(id, { resolve, reject }); + this.sendMessage({ + jsonrpc: "2.0", + id, + method, + params, + }); + }); + } + + /** + * Send a notification message to the remote server + * + * @param method Name of the remote method to call + * @param params Parameters object the remote method should be called with + */ + notify(method: string, params: unknown) { + this.sendMessage({ + jsonrpc: "2.0", + method, + params, + }); + } + + /** + * Destroy the internal socket instance for this Transport + */ + destroy() { + this.socket.destroy(); + } + + private sendMessage(message: JsonRpcMessage) { + const body = Buffer.from(JSON.stringify(message)); + const headers = Buffer.from( + `Content-Length: ${body.length}\r\nContent-Type: ${MIME_JSONRPC};charset=utf-8\r\n\r\n`, + ); + this.socket.write(Buffer.concat([headers, body])); + } + + private pendingData = Buffer.from(""); + private readerState: ReaderState = { + kind: ReaderStateKind.Header, + }; + + private processIncoming(data: Buffer) { + this.pendingData = Buffer.concat([this.pendingData, data]); + + while (this.pendingData.length > 0) { + if (this.readerState.kind === ReaderStateKind.Header) { + const lineBreakIndex = this.pendingData.indexOf("\n"); + if (lineBreakIndex < 0) { + break; + } + + const header = this.pendingData.subarray(0, lineBreakIndex + 1); + this.pendingData = this.pendingData.subarray(lineBreakIndex + 1); + this.processIncomingHeader(this.readerState, header.toString("utf-8")); + } else if (this.pendingData.length >= this.readerState.contentLength) { + const body = this.pendingData.subarray( + 0, + this.readerState.contentLength, + ); + this.pendingData = this.pendingData.subarray( + this.readerState.contentLength, + ); + this.processIncomingBody(body); + + this.readerState = { + kind: ReaderStateKind.Header, + }; + } else { + break; + } + } + } + + private processIncomingHeader(readerState: ReaderStateHeader, line: string) { + if (line === "\r\n") { + const { contentLength, contentType } = readerState; + if (typeof contentLength !== "number") { + throw new Error( + "incoming message from the remote workspace is missing the Content-Length header", + ); + } + + this.readerState = { + kind: ReaderStateKind.Body, + contentLength, + contentType, + }; + return; + } + + const colonIndex = line.indexOf(":"); + if (colonIndex < 0) { + throw new Error(`could not find colon token in "${line}"`); + } + + const headerName = line.substring(0, colonIndex); + const headerValue = line.substring(colonIndex + 1).trim(); + + switch (headerName) { + case "Content-Length": { + const value = Number.parseInt(headerValue); + readerState.contentLength = value; + break; + } + case "Content-Type": { + if (!headerValue.startsWith(MIME_JSONRPC)) { + throw new Error( + `invalid value for Content-Type expected "${MIME_JSONRPC}", got "${headerValue}"`, + ); + } + + readerState.contentType = headerValue; + break; + } + default: + console.warn(`ignoring unknown header "${headerName}"`); + } + } + + private processIncomingBody(buffer: Buffer) { + const data = buffer.toString("utf-8"); + const body = JSON.parse(data); + + if (isJsonRpcMessage(body)) { + if (isJsonRpcRequest(body)) { + // TODO: Not implemented at the moment + return; + } + + if (isJsonRpcNotification(body)) { + // TODO: Not implemented at the moment + return; + } + + if (isJsonRpcResponse(body)) { + const pendingRequest = this.pendingRequests.get(body.id); + if (pendingRequest) { + this.pendingRequests.delete(body.id); + const { resolve, reject } = pendingRequest; + if ("result" in body) { + resolve(body.result); + } else { + reject(body.error); + } + } else { + throw new Error( + `could not find any pending request matching RPC response ID ${body.id}`, + ); + } + return; + } + } + + throw new Error( + `failed to deserialize incoming message from remote workspace, "${data}" is not a valid JSON-RPC message body`, + ); + } +} diff --git a/packages/@pglt/backend-jsonrpc/src/workspace.ts b/packages/@pglt/backend-jsonrpc/src/workspace.ts new file mode 100644 index 00000000..c110b0ef --- /dev/null +++ b/packages/@pglt/backend-jsonrpc/src/workspace.ts @@ -0,0 +1,453 @@ +// Generated file, do not edit by hand, see `xtask/codegen` +import type { Transport } from "./transport"; +export interface IsPathIgnoredParams { + pglt_path: PgLTPath; +} +export interface PgLTPath { + /** + * Determines the kind of the file inside PgLT. Some files are considered as configuration files, others as manifest files, and others as files to handle + */ + kind: FileKind; + path: string; + /** + * Whether this path (usually a file) was fixed as a result of a format/lint/check command with the `--write` filag. + */ + was_written: boolean; +} +export type FileKind = FileKind2[]; +/** + * The priority of the file + */ +export type FileKind2 = "Config" | "Ignore" | "Inspectable" | "Handleable"; +export interface GetFileContentParams { + path: PgLTPath; +} +export interface PullDiagnosticsParams { + categories: RuleCategories; + max_diagnostics: number; + only: RuleCode[]; + path: PgLTPath; + skip: RuleCode[]; +} +export type RuleCategories = RuleCategory[]; +export type RuleCode = string; +export type RuleCategory = "Lint" | "Action" | "Transformation"; +export interface PullDiagnosticsResult { + diagnostics: Diagnostic[]; + errors: number; + skipped_diagnostics: number; +} +/** + * Serializable representation for a [Diagnostic](super::Diagnostic). + */ +export interface Diagnostic { + advices: Advices; + category?: Category; + description: string; + location: Location; + message: MarkupBuf; + severity: Severity; + source?: Diagnostic; + tags: DiagnosticTags; + verboseAdvices: Advices; +} +/** + * Implementation of [Visitor] collecting serializable [Advice] into a vector. + */ +export interface Advices { + advices: Advice[]; +} +export type Category = + | "lint/safety/addingRequiredField" + | "lint/safety/banDropColumn" + | "lint/safety/banDropNotNull" + | "lint/safety/banDropTable" + | "stdin" + | "check" + | "configuration" + | "database/connection" + | "internalError/io" + | "internalError/runtime" + | "internalError/fs" + | "flags/invalid" + | "project" + | "typecheck" + | "internalError/panic" + | "syntax" + | "dummy" + | "lint" + | "lint/performance" + | "lint/safety"; +export interface Location { + path?: Resource_for_String; + sourceCode?: string; + span?: TextRange; +} +export type MarkupBuf = MarkupNodeBuf[]; +/** + * The severity to associate to a diagnostic. + */ +export type Severity = "hint" | "information" | "warning" | "error" | "fatal"; +export type DiagnosticTags = DiagnosticTag[]; +/** + * Serializable representation of a [Diagnostic](super::Diagnostic) advice + +See the [Visitor] trait for additional documentation on all the supported advice types. + */ +export type Advice = + | { log: [LogCategory, MarkupBuf] } + | { list: MarkupBuf[] } + | { frame: Location } + | { diff: TextEdit } + | { backtrace: [MarkupBuf, Backtrace] } + | { command: string } + | { group: [MarkupBuf, Advices] }; +/** + * Represents the resource a diagnostic is associated with. + */ +export type Resource_for_String = "argv" | "memory" | { file: string }; +export type TextRange = [TextSize, TextSize]; +export interface MarkupNodeBuf { + content: string; + elements: MarkupElement[]; +} +/** + * Internal enum used to automatically generate bit offsets for [DiagnosticTags] and help with the implementation of `serde` and `schemars` for tags. + */ +export type DiagnosticTag = + | "fixable" + | "internal" + | "unnecessaryCode" + | "deprecatedCode" + | "verbose"; +/** + * The category for a log advice, defines how the message should be presented to the user. + */ +export type LogCategory = "none" | "info" | "warn" | "error"; +export interface TextEdit { + dictionary: string; + ops: CompressedOp[]; +} +export type Backtrace = BacktraceFrame[]; +export type TextSize = number; +/** + * Enumeration of all the supported markup elements + */ +export type MarkupElement = + | "Emphasis" + | "Dim" + | "Italic" + | "Underline" + | "Error" + | "Success" + | "Warn" + | "Info" + | "Debug" + | "Trace" + | "Inverse" + | { Hyperlink: { href: string } }; +export type CompressedOp = + | { diffOp: DiffOp } + | { equalLines: { line_count: number } }; +/** + * Serializable representation of a backtrace frame. + */ +export interface BacktraceFrame { + ip: number; + symbols: BacktraceSymbol[]; +} +export type DiffOp = + | { equal: { range: TextRange } } + | { insert: { range: TextRange } } + | { delete: { range: TextRange } }; +/** + * Serializable representation of a backtrace frame symbol. + */ +export interface BacktraceSymbol { + colno?: number; + filename?: string; + lineno?: number; + name?: string; +} +export interface GetCompletionsParams { + /** + * The File for which a completion is requested. + */ + path: PgLTPath; + /** + * The Cursor position in the file for which a completion is requested. + */ + position: TextSize; +} +export interface CompletionResult { + items: CompletionItem[]; +} +export interface CompletionItem { + description: string; + kind: CompletionItemKind; + label: string; + preselected: boolean; + score: number; +} +export type CompletionItemKind = "table" | "function" | "column"; +export interface UpdateSettingsParams { + configuration: PartialConfiguration; + gitignore_matches: string[]; + skip_db: boolean; + vcs_base_path?: string; + workspace_directory?: string; +} +/** + * The configuration that is contained inside the configuration file. + */ +export interface PartialConfiguration { + /** + * A field for the [JSON schema](https://json-schema.org/) specification + */ + $schema?: string; + /** + * The configuration of the database connection + */ + db?: PartialDatabaseConfiguration; + /** + * The configuration of the filesystem + */ + files?: PartialFilesConfiguration; + /** + * The configuration for the linter + */ + linter?: PartialLinterConfiguration; + /** + * Configure migrations + */ + migrations?: PartialMigrationsConfiguration; + /** + * The configuration of the VCS integration + */ + vcs?: PartialVcsConfiguration; +} +/** + * The configuration of the database connection. + */ +export interface PartialDatabaseConfiguration { + /** + * The connection timeout in seconds. + */ + connTimeoutSecs?: number; + /** + * The name of the database. + */ + database?: string; + /** + * The host of the database. + */ + host?: string; + /** + * The password to connect to the database. + */ + password?: string; + /** + * The port of the database. + */ + port?: number; + /** + * The username to connect to the database. + */ + username?: string; +} +/** + * The configuration of the filesystem + */ +export interface PartialFilesConfiguration { + /** + * A list of Unix shell style patterns. Will ignore files/folders that will match these patterns. + */ + ignore?: StringSet; + /** + * A list of Unix shell style patterns. Will handle only those files/folders that will match these patterns. + */ + include?: StringSet; + /** + * The maximum allowed size for source code files in bytes. Files above this limit will be ignored for performance reasons. Defaults to 1 MiB + */ + maxSize?: number; +} +export interface PartialLinterConfiguration { + /** + * if `false`, it disables the feature and the linter won't be executed. `true` by default + */ + enabled?: boolean; + /** + * A list of Unix shell style patterns. The formatter will ignore files/folders that will match these patterns. + */ + ignore?: StringSet; + /** + * A list of Unix shell style patterns. The formatter will include files/folders that will match these patterns. + */ + include?: StringSet; + /** + * List of rules + */ + rules?: Rules; +} +/** + * The configuration of the filesystem + */ +export interface PartialMigrationsConfiguration { + /** + * Ignore any migrations before this timestamp + */ + after?: number; + /** + * The directory where the migration files are stored + */ + migrationsDir?: string; +} +/** + * Set of properties to integrate with a VCS software. + */ +export interface PartialVcsConfiguration { + /** + * The kind of client. + */ + clientKind?: VcsClientKind; + /** + * The main branch of the project + */ + defaultBranch?: string; + /** + * Whether we should integrate itself with the VCS client + */ + enabled?: boolean; + /** + * The folder where we should check for VCS files. By default, we will use the same folder where `pglt.jsonc` was found. + +If we can't find the configuration, it will attempt to use the current working directory. If no current working directory can't be found, we won't use the VCS integration, and a diagnostic will be emitted + */ + root?: string; + /** + * Whether we should use the VCS ignore file. When [true], we will ignore the files specified in the ignore file. + */ + useIgnoreFile?: boolean; +} +export type StringSet = string[]; +export interface Rules { + /** + * It enables ALL rules. The rules that belong to `nursery` won't be enabled. + */ + all?: boolean; + /** + * It enables the lint rules recommended by PgLT. `true` by default. + */ + recommended?: boolean; + safety?: Safety; +} +export type VcsClientKind = "git"; +/** + * A list of rules that belong to this group + */ +export interface Safety { + /** + * Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required. + */ + addingRequiredField?: RuleConfiguration_for_Null; + /** + * It enables ALL rules for this group. + */ + all?: boolean; + /** + * Dropping a column may break existing clients. + */ + banDropColumn?: RuleConfiguration_for_Null; + /** + * Dropping a NOT NULL constraint may break existing clients. + */ + banDropNotNull?: RuleConfiguration_for_Null; + /** + * Dropping a table may break existing clients. + */ + banDropTable?: RuleConfiguration_for_Null; + /** + * It enables the recommended rules for this group + */ + recommended?: boolean; +} +export type RuleConfiguration_for_Null = + | RulePlainConfiguration + | RuleWithOptions_for_Null; +export type RulePlainConfiguration = "warn" | "error" | "info" | "off"; +export interface RuleWithOptions_for_Null { + /** + * The severity of the emitted diagnostics by the rule + */ + level: RulePlainConfiguration; + /** + * Rule's options + */ + options: null; +} +export interface OpenFileParams { + content: string; + path: PgLTPath; + version: number; +} +export interface ChangeFileParams { + changes: ChangeParams[]; + path: PgLTPath; + version: number; +} +export interface ChangeParams { + /** + * The range of the file that changed. If `None`, the whole file changed. + */ + range?: TextRange; + text: string; +} +export interface CloseFileParams { + path: PgLTPath; +} +export type Configuration = PartialConfiguration; +export interface Workspace { + isPathIgnored(params: IsPathIgnoredParams): Promise; + getFileContent(params: GetFileContentParams): Promise; + pullDiagnostics( + params: PullDiagnosticsParams, + ): Promise; + getCompletions(params: GetCompletionsParams): Promise; + updateSettings(params: UpdateSettingsParams): Promise; + openFile(params: OpenFileParams): Promise; + changeFile(params: ChangeFileParams): Promise; + closeFile(params: CloseFileParams): Promise; + destroy(): void; +} +export function createWorkspace(transport: Transport): Workspace { + return { + isPathIgnored(params) { + return transport.request("pglt/is_path_ignored", params); + }, + getFileContent(params) { + return transport.request("pglt/get_file_content", params); + }, + pullDiagnostics(params) { + return transport.request("pglt/pull_diagnostics", params); + }, + getCompletions(params) { + return transport.request("pglt/get_completions", params); + }, + updateSettings(params) { + return transport.request("pglt/update_settings", params); + }, + openFile(params) { + return transport.request("pglt/open_file", params); + }, + changeFile(params) { + return transport.request("pglt/change_file", params); + }, + closeFile(params) { + return transport.request("pglt/close_file", params); + }, + destroy() { + transport.destroy(); + }, + }; +} diff --git a/packages/@pglt/backend-jsonrpc/tests/transport.test.mjs b/packages/@pglt/backend-jsonrpc/tests/transport.test.mjs new file mode 100644 index 00000000..32a103ee --- /dev/null +++ b/packages/@pglt/backend-jsonrpc/tests/transport.test.mjs @@ -0,0 +1,160 @@ +import { describe, expect, it, mock } from "bun:test"; + +import { Transport } from "../src/transport"; + +function makeMessage(body) { + const content = JSON.stringify(body); + return Buffer.from( + `Content-Length: ${content.length}\r\nContent-Type: application/vscode-jsonrpc;charset=utf-8\r\n\r\n${content}`, + ); +} + +describe("Transport Layer", () => { + it("should encode requests into the socket", async () => { + let onData = null; + const socket = { + on(event, fn) { + expect(event).toBe("data"); + onData = fn; + }, + write: mock(), + destroy: mock(), + }; + + const transport = new Transport(socket); + + const result = transport.request("method", "params"); + + expect(socket.write).toHaveBeenCalledWith( + makeMessage({ + jsonrpc: "2.0", + id: 0, + method: "method", + params: "params", + }), + ); + + onData( + makeMessage({ + jsonrpc: "2.0", + id: 0, + result: "result", + }), + ); + + const response = await result; + expect(response).toBe("result"); + + transport.destroy(); + expect(socket.destroy).toHaveBeenCalledOnce(); + }); + + it("should throw on missing Content-Length headers", async () => { + let onData = null; + const socket = { + on(event, fn) { + expect(event).toBe("data"); + onData = fn; + }, + write: mock(), + destroy: mock(), + }; + + const transport = new Transport(socket); + + expect(() => onData(Buffer.from("\r\n"))).toThrowError( + "incoming message from the remote workspace is missing the Content-Length header", + ); + + transport.destroy(); + expect(socket.destroy).toHaveBeenCalledOnce(); + }); + + it("should throw on missing colon token", async () => { + let onData = null; + const socket = { + on(event, fn) { + expect(event).toBe("data"); + onData = fn; + }, + write: mock(), + destroy: mock(), + }; + + const transport = new Transport(socket); + + expect(() => onData(Buffer.from("Content-Length\r\n"))).toThrowError( + 'could not find colon token in "Content-Length\r\n"', + ); + + transport.destroy(); + expect(socket.destroy).toHaveBeenCalledOnce(); + }); + + it("should throw on invalid Content-Type", async () => { + let onData = null; + const socket = { + on(event, fn) { + expect(event).toBe("data"); + onData = fn; + }, + write: mock(), + destroy: mock(), + }; + + const transport = new Transport(socket); + + expect(() => + onData(Buffer.from("Content-Type: text/plain\r\n")), + ).toThrowError( + 'invalid value for Content-Type expected "application/vscode-jsonrpc", got "text/plain"', + ); + + transport.destroy(); + expect(socket.destroy).toHaveBeenCalledOnce(); + }); + + it("should throw on unknown request ID", async () => { + let onData = null; + const socket = { + on(event, fn) { + expect(event).toBe("data"); + onData = fn; + }, + write: mock(), + destroy: mock(), + }; + + const transport = new Transport(socket); + + expect(() => + onData(makeMessage({ jsonrpc: "2.0", id: 0, result: "result" })), + ).toThrowError( + "could not find any pending request matching RPC response ID 0", + ); + + transport.destroy(); + expect(socket.destroy).toHaveBeenCalledOnce(); + }); + + it("should throw on invalid messages", async () => { + let onData = null; + const socket = { + on(event, fn) { + expect(event).toBe("data"); + onData = fn; + }, + write: mock(), + destroy: mock(), + }; + + const transport = new Transport(socket); + + expect(() => onData(makeMessage({}))).toThrowError( + 'failed to deserialize incoming message from remote workspace, "{}" is not a valid JSON-RPC message body', + ); + + transport.destroy(); + expect(socket.destroy).toHaveBeenCalledOnce(); + }); +}); diff --git a/packages/@pglt/backend-jsonrpc/tests/workspace.test.mjs b/packages/@pglt/backend-jsonrpc/tests/workspace.test.mjs new file mode 100644 index 00000000..fc6c71c8 --- /dev/null +++ b/packages/@pglt/backend-jsonrpc/tests/workspace.test.mjs @@ -0,0 +1,54 @@ +import { resolve } from "node:path"; +import { fileURLToPath } from "node:url"; +import { describe, expect, it } from "vitest"; + +import { createWorkspaceWithBinary } from "../dist"; + +describe("Workspace API", () => { + it("should process remote requests", async () => { + const extension = process.platform === "win32" ? ".exe" : ""; + const command = resolve( + fileURLToPath(import.meta.url), + "../../../../..", + `target/release/pglt${extension}`, + ); + + const workspace = await createWorkspaceWithBinary(command); + await workspace.openFile({ + path: { + path: "test.sql", + was_written: false, + kind: ["Handleable"], + }, + content: "select 1 from", + version: 0, + }); + + const { diagnostics } = await workspace.pullDiagnostics({ + only: [], + skip: [], + max_diagnostics: 100, + categories: [], + path: { + path: "test.sql", + was_written: false, + kind: ["Handleable"], + }, + }); + + expect(diagnostics).toHaveLength(1); + expect(diagnostics[0].description).toBe( + "Invalid statement: syntax error at end of input", + ); + + await workspace.closeFile({ + path: { + path: "test.sql", + was_written: false, + kind: ["Handleable"], + }, + }); + + workspace.destroy(); + }); +}); diff --git a/packages/@pglt/pglt/bin/pglt b/packages/@pglt/pglt/bin/pglt old mode 100644 new mode 100755 diff --git a/packages/@pglt/pglt/package.json b/packages/@pglt/pglt/package.json index 264ee54c..5a551807 100644 --- a/packages/@pglt/pglt/package.json +++ b/packages/@pglt/pglt/package.json @@ -1,43 +1,40 @@ { - "name": "pglt", - "version": "", - "bin": { - "pglt": "bin/pglt" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/supabase-community/postgres_lsp.git", - "directory": "packages/@pglt/pglt" - }, - "author": "Supabase Community", - "contributors": [ - { - "name": "Philipp Steinrötter", - "url": "https://github.com/psteinroe" - }, - { - "name": "Julian Domke", - "url": "https://github.com/juleswritescode" - } - ], - "license": "MIT or Apache-2.0", - "description": "A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling.", - "files": [ - "bin/pglt", - "schema.json" - ], - "engines": { - "node": ">=20" - }, - "publishConfig": { - "provenance": true - }, - "optionalDependencies": { - "pglt-x86_64-windows-msvc": "", - "pglt-aarch64-windows-msvc": "", - "pglt-x86_64-apple-darwin": "", - "pglt-aarch64-apple-darwin": "", - "pglt-x86_64-linux-gnu": "", - "pglt-aarch64-linux-gnu": "" - } + "name": "pglt", + "version": "", + "bin": { + "pglt": "bin/pglt" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/supabase-community/postgres_lsp.git", + "directory": "packages/@pglt/pglt" + }, + "author": "Supabase Community", + "contributors": [ + { + "name": "Philipp Steinrötter", + "url": "https://github.com/psteinroe" + }, + { + "name": "Julian Domke", + "url": "https://github.com/juleswritescode" + } + ], + "license": "MIT or Apache-2.0", + "description": "A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling.", + "files": ["bin/pglt", "schema.json"], + "engines": { + "node": ">=20" + }, + "publishConfig": { + "provenance": true + }, + "optionalDependencies": { + "pglt-x86_64-windows-msvc": "", + "pglt-aarch64-windows-msvc": "", + "pglt-x86_64-apple-darwin": "", + "pglt-aarch64-apple-darwin": "", + "pglt-x86_64-linux-gnu": "", + "pglt-aarch64-linux-gnu": "" + } } diff --git a/packages/@pglt/pglt/scripts/generate-packages.mjs b/packages/@pglt/pglt/scripts/generate-packages.mjs index 2a1fd2f4..52c6bf8f 100644 --- a/packages/@pglt/pglt/scripts/generate-packages.mjs +++ b/packages/@pglt/pglt/scripts/generate-packages.mjs @@ -9,233 +9,242 @@ const streamPipeline = promisify(pipeline); const CLI_ROOT = resolve(fileURLToPath(import.meta.url), "../.."); const PACKAGES_PGLT_ROOT = resolve(CLI_ROOT, ".."); const PGLT_ROOT = resolve(PACKAGES_PGLT_ROOT, "../.."); -const MANIFEST_PATH = resolve(CLI_ROOT, "package.json"); const SUPPORTED_PLATFORMS = [ - "pc-windows-msvc", - "apple-darwin", - "unknown-linux-gnu", + "pc-windows-msvc", + "apple-darwin", + "unknown-linux-gnu", ]; const SUPPORTED_ARCHITECTURES = ["x86_64", "aarch64"]; async function downloadSchema(releaseTag, githubToken) { - const assetUrl = `https://github.com/supabase-community/postgres_lsp/releases/download/${releaseTag}/schema.json`; + const assetUrl = `https://github.com/supabase-community/postgres_lsp/releases/download/${releaseTag}/schema.json`; - const response = await fetch(assetUrl.trim(), { - headers: { - Authorization: `token ${githubToken}`, - Accept: `application/octet-stream`, - }, - }); + const response = await fetch(assetUrl.trim(), { + headers: { + Authorization: `token ${githubToken}`, + Accept: "application/octet-stream", + }, + }); - if (!response.ok) { - throw new Error(`Failed to Fetch Asset from ${assetUrl}`); - } + if (!response.ok) { + throw new Error(`Failed to Fetch Asset from ${assetUrl}`); + } - // download to root. - const fileStream = fs.createWriteStream(resolve(PGLT_ROOT, "schema.json")); + // download to root. + const fileStream = fs.createWriteStream(resolve(PGLT_ROOT, "schema.json")); - await streamPipeline(response.body, fileStream); + await streamPipeline(response.body, fileStream); - console.log(`Downloaded schema for ${releaseTag}`); + console.log(`Downloaded schema for ${releaseTag}`); } async function downloadBinary(platform, arch, os, releaseTag, githubToken) { - const buildName = getBuildName(platform, arch); + const buildName = getBuildName(platform, arch); - const assetUrl = `https://github.com/supabase-community/postgres_lsp/releases/download/${releaseTag}/${buildName}`; + const assetUrl = `https://github.com/supabase-community/postgres_lsp/releases/download/${releaseTag}/${buildName}`; - const response = await fetch(assetUrl.trim(), { - headers: { - Authorization: `token ${githubToken}`, - Accept: `application/octet-stream`, - }, - }); + const response = await fetch(assetUrl.trim(), { + headers: { + Authorization: `token ${githubToken}`, + Accept: "application/octet-stream", + }, + }); - if (!response.ok) { - const error = await response.text(); - throw new Error( - `Failed to Fetch Asset from ${assetUrl} (Reason: ${error})` - ); - } + if (!response.ok) { + const error = await response.text(); + throw new Error( + `Failed to Fetch Asset from ${assetUrl} (Reason: ${error})`, + ); + } - // just download to root. - const fileStream = fs.createWriteStream(getBinarySource(platform, arch, os)); + // just download to root. + const fileStream = fs.createWriteStream(getBinarySource(platform, arch, os)); - await streamPipeline(response.body, fileStream); + await streamPipeline(response.body, fileStream); - console.log(`Downloaded asset for ${buildName} (v${releaseTag})`); + console.log(`Downloaded asset for ${buildName} (v${releaseTag})`); } -async function overwriteManifestVersions(releaseTag, isPrerelease) { - const version = getVersion(releaseTag, isPrerelease); +async function writeManifest(packagePath, version) { + const manifestPath = resolve(PACKAGES_PGLT_ROOT, packagePath, "package.json"); - const manifestClone = structuredClone(rootManifest()); + const manifestData = JSON.parse( + fs.readFileSync(manifestPath).toString("utf-8"), + ); - manifestClone.version = version; - for (const dep in manifestClone.optionalDependencies) { - manifestClone.optionalDependencies[dep] = version; - } + const nativePackages = SUPPORTED_PLATFORMS.flatMap((platform) => + SUPPORTED_ARCHITECTURES.map((arch) => [ + `@pglt/${getName(platform, arch)}`, + version, + ]), + ); - /** - * writeFileSync seemed to not work reliably? - */ - await new Promise((res, rej) => { - fs.writeFile(MANIFEST_PATH, JSON.stringify(manifestClone, null, 2), (e) => - e ? rej(e) : res() - ); - }); + manifestData.version = version; + manifestData.optionalDependencies = Object.fromEntries(nativePackages); + + console.log(`Update manifest ${manifestPath}`); + const content = JSON.stringify(manifestData, null, 2); + + /** + * writeFileSync seemed to not work reliably? + */ + await new Promise((res, rej) => { + fs.writeFile(manifestPath, content, (e) => (e ? rej(e) : res())); + }); } async function makePackageDir(platform, arch) { - const buildName = getBuildName(platform, arch); - const packageRoot = resolve(PACKAGES_PGLT_ROOT, buildName); + const buildName = getBuildName(platform, arch); + const packageRoot = resolve(PACKAGES_PGLT_ROOT, buildName); - await new Promise((res, rej) => { - fs.mkdir(packageRoot, {}, (e) => (e ? rej(e) : res())); - }); + await new Promise((res, rej) => { + fs.mkdir(packageRoot, {}, (e) => (e ? rej(e) : res())); + }); } function copyBinaryToNativePackage(platform, arch, os) { - // Update the package.json manifest - const buildName = getBuildName(platform, arch); - const packageRoot = resolve(PACKAGES_PGLT_ROOT, buildName); - const packageName = getPackageName(platform, arch); - - const { version, license, repository, engines } = rootManifest(); - - /** - * We need to map rust triplets to NPM-known values. - * Otherwise, npm will abort the package installation. - */ - const npm_arch = arch === "aarch64" ? "arm64" : "x64"; - let libc = undefined; - let npm_os = undefined; - - switch (os) { - case "linux": { - libc = "gnu"; - npm_os = "linux"; - break; - } - case "windows": { - libc = "msvc"; - npm_os = "win32"; - break; - } - case "darwin": { - libc = undefined; - npm_os = "darwin"; - break; - } - default: { - throw new Error(`Unsupported os: ${os}`); - } - } - - const manifest = JSON.stringify( - { - name: packageName, - version, - license, - repository, - engines, - os: [npm_os], - cpu: [npm_arch], - libc, - }, - null, - 2 - ); - - const ext = getBinaryExt(os); - const manifestPath = resolve(packageRoot, "package.json"); - console.info(`Update manifest ${manifestPath}`); - fs.writeFileSync(manifestPath, manifest); - - // Copy the CLI binary - const binarySource = getBinarySource(platform, arch, os); - const binaryTarget = resolve(packageRoot, `pglt${ext}`); - - if (!fs.existsSync(binarySource)) { - console.error( - `Source for binary for ${buildName} not found at: ${binarySource}` - ); - process.exit(1); - } - - console.info(`Copy binary ${binaryTarget}`); - fs.copyFileSync(binarySource, binaryTarget); - fs.chmodSync(binaryTarget, 0o755); + // Update the package.json manifest + const buildName = getBuildName(platform, arch); + const packageRoot = resolve(PACKAGES_PGLT_ROOT, buildName); + const packageName = getPackageName(platform, arch); + + const { version, license, repository, engines } = rootManifest(); + + /** + * We need to map rust triplets to NPM-known values. + * Otherwise, npm will abort the package installation. + */ + const npm_arch = arch === "aarch64" ? "arm64" : "x64"; + let libc = undefined; + let npm_os = undefined; + + switch (os) { + case "linux": { + libc = "gnu"; + npm_os = "linux"; + break; + } + case "windows": { + libc = "msvc"; + npm_os = "win32"; + break; + } + case "darwin": { + libc = undefined; + npm_os = "darwin"; + break; + } + default: { + throw new Error(`Unsupported os: ${os}`); + } + } + + const manifest = JSON.stringify( + { + name: packageName, + version, + license, + repository, + engines, + os: [npm_os], + cpu: [npm_arch], + libc, + }, + null, + 2, + ); + + const ext = getBinaryExt(os); + const manifestPath = resolve(packageRoot, "package.json"); + console.info(`Update manifest ${manifestPath}`); + fs.writeFileSync(manifestPath, manifest); + + // Copy the CLI binary + const binarySource = getBinarySource(platform, arch, os); + const binaryTarget = resolve(packageRoot, `pglt${ext}`); + + if (!fs.existsSync(binarySource)) { + console.error( + `Source for binary for ${buildName} not found at: ${binarySource}`, + ); + process.exit(1); + } + + console.info(`Copy binary ${binaryTarget}`); + fs.copyFileSync(binarySource, binaryTarget); + fs.chmodSync(binaryTarget, 0o755); } function copySchemaToNativePackage(platform, arch) { - const buildName = getBuildName(platform, arch); - const packageRoot = resolve(PACKAGES_PGLT_ROOT, buildName); + const buildName = getBuildName(platform, arch); + const packageRoot = resolve(PACKAGES_PGLT_ROOT, buildName); - const schemaSrc = resolve(PGLT_ROOT, `schema.json`); - const schemaTarget = resolve(packageRoot, `schema.json`); + const schemaSrc = resolve(PGLT_ROOT, "schema.json"); + const schemaTarget = resolve(packageRoot, "schema.json"); - if (!fs.existsSync(schemaSrc)) { - console.error(`schema.json not found at: ${schemaSrc}`); - process.exit(1); - } + if (!fs.existsSync(schemaSrc)) { + console.error(`schema.json not found at: ${schemaSrc}`); + process.exit(1); + } - console.info(`Copying schema.json`); - fs.copyFileSync(schemaSrc, schemaTarget); - fs.chmodSync(schemaTarget, 0o666); + console.info("Copying schema.json"); + fs.copyFileSync(schemaSrc, schemaTarget); + fs.chmodSync(schemaTarget, 0o666); } const rootManifest = () => - JSON.parse(fs.readFileSync(MANIFEST_PATH).toString("utf-8")); + JSON.parse(fs.readFileSync(MANIFEST_PATH).toString("utf-8")); function getBinaryExt(os) { - return os === "windows" ? ".exe" : ""; + return os === "windows" ? ".exe" : ""; } function getBinarySource(platform, arch, os) { - const ext = getBinaryExt(os); - return resolve(PGLT_ROOT, `${getBuildName(platform, arch)}${ext}`); + const ext = getBinaryExt(os); + return resolve(PGLT_ROOT, `${getBuildName(platform, arch)}${ext}`); } function getBuildName(platform, arch) { - return `pglt_${arch}-${platform}`; + return `pglt_${arch}-${platform}`; } function getPackageName(platform, arch) { - // trim the "unknown" from linux and the "pc" from windows - const platformName = platform.split("-").slice(-2).join("-"); - return `pglt-${arch}-${platformName}`; + // trim the "unknown" from linux and the "pc" from windows + const platformName = platform.split("-").slice(-2).join("-"); + return `pglt-${arch}-${platformName}`; } function getOs(platform) { - return platform.split("-").find((_, idx) => idx === 1); + return platform.split("-").find((_, idx) => idx === 1); } function getVersion(releaseTag, isPrerelease) { - return releaseTag + (isPrerelease ? "-rc" : ""); + return releaseTag + (isPrerelease ? "-rc" : ""); } (async function main() { - const githubToken = process.env.GITHUB_TOKEN; - let releaseTag = process.env.RELEASE_TAG; - assert(githubToken, "GITHUB_TOKEN not defined!"); - assert(releaseTag, "RELEASE_TAG not defined!"); - - const isPrerelease = process.env.PRERELEASE === "true"; - - await downloadSchema(releaseTag, githubToken); - await overwriteManifestVersions(releaseTag, isPrerelease); - - for (const platform of SUPPORTED_PLATFORMS) { - const os = getOs(platform); - - for (const arch of SUPPORTED_ARCHITECTURES) { - await makePackageDir(platform, arch); - await downloadBinary(platform, arch, os, releaseTag, githubToken); - copyBinaryToNativePackage(platform, arch, os); - copySchemaToNativePackage(platform, arch); - } - } - - process.exit(0); + const githubToken = process.env.GITHUB_TOKEN; + const releaseTag = process.env.RELEASE_TAG; + assert(githubToken, "GITHUB_TOKEN not defined!"); + assert(releaseTag, "RELEASE_TAG not defined!"); + + const isPrerelease = process.env.PRERELEASE === "true"; + + await downloadSchema(releaseTag, githubToken); + const version = getVersion(releaseTag, isPrerelease); + await writeManifest("pglt", version); + await writeManifest("backend-jsonrpc", version); + + for (const platform of SUPPORTED_PLATFORMS) { + const os = getOs(platform); + + for (const arch of SUPPORTED_ARCHITECTURES) { + await makePackageDir(platform, arch); + await downloadBinary(platform, arch, os, releaseTag, githubToken); + copyBinaryToNativePackage(platform, arch, os); + copySchemaToNativePackage(platform, arch); + } + } + + process.exit(0); })(); diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..238655f2 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,27 @@ +{ + "compilerOptions": { + // Enable latest features + "lib": ["ESNext", "DOM"], + "target": "ESNext", + "module": "ESNext", + "moduleDetection": "force", + "jsx": "react-jsx", + "allowJs": true, + + // Bundler mode + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "noEmit": true, + + // Best practices + "strict": true, + "skipLibCheck": true, + "noFallthroughCasesInSwitch": true, + + // Some stricter flags (disabled by default) + "noUnusedLocals": false, + "noUnusedParameters": false, + "noPropertyAccessFromIndexSignature": false + } +} diff --git a/xtask/codegen/Cargo.toml b/xtask/codegen/Cargo.toml index c13dba18..8ee2c73d 100644 --- a/xtask/codegen/Cargo.toml +++ b/xtask/codegen/Cargo.toml @@ -5,12 +5,17 @@ publish = false version = "0.0.0" [dependencies] -anyhow = { workspace = true } -biome_string_case = { workspace = true } -bpaf = { workspace = true, features = ["derive"] } -pglt_analyse = { workspace = true } -pglt_analyser = { workspace = true } -proc-macro2 = { workspace = true, features = ["span-locations"] } -pulldown-cmark = { version = "0.12.2" } -quote = "1.0.36" -xtask = { path = '../', version = "0.0" } +anyhow = { workspace = true } +biome_js_factory = { workspace = true } +biome_js_formatter = { workspace = true } +biome_js_syntax = { workspace = true } +biome_rowan = { workspace = true } +biome_string_case = { workspace = true } +bpaf = { workspace = true, features = ["derive"] } +pglt_analyse = { workspace = true } +pglt_analyser = { workspace = true } +pglt_workspace = { workspace = true, features = ["schema"] } +proc-macro2 = { workspace = true, features = ["span-locations"] } +pulldown-cmark = { version = "0.12.2" } +quote = "1.0.36" +xtask = { path = '../', version = "0.0" } diff --git a/xtask/codegen/src/generate_bindings.rs b/xtask/codegen/src/generate_bindings.rs new file mode 100644 index 00000000..abfc29fe --- /dev/null +++ b/xtask/codegen/src/generate_bindings.rs @@ -0,0 +1,434 @@ +use crate::update; +use biome_js_factory::make; +use biome_js_formatter::{context::JsFormatOptions, format_node}; +use biome_js_syntax::{ + AnyJsBinding, AnyJsBindingPattern, AnyJsCallArgument, AnyJsDeclaration, AnyJsDeclarationClause, + AnyJsExportClause, AnyJsExpression, AnyJsFormalParameter, AnyJsImportClause, + AnyJsLiteralExpression, AnyJsModuleItem, AnyJsName, AnyJsNamedImportSpecifier, + AnyJsObjectMember, AnyJsObjectMemberName, AnyJsParameter, AnyJsStatement, AnyTsName, + AnyTsReturnType, AnyTsType, AnyTsTypeMember, JsFileSource, TriviaPieceKind, T, +}; +use biome_rowan::AstNode; +use biome_string_case::Case; +use pglt_workspace::workspace_types::{generate_type, methods, ModuleQueue}; +use xtask::{project_root, Mode, Result}; + +pub fn generate_bindings(mode: Mode) -> Result<()> { + let bindings_path = project_root().join("packages/@pglt/backend-jsonrpc/src/workspace.ts"); + let methods = methods(); + + let mut declarations = Vec::new(); + let mut member_definitions = Vec::with_capacity(methods.len()); + let mut member_declarations = Vec::with_capacity(methods.len()); + let mut queue = ModuleQueue::default(); + + for method in &methods { + let params = generate_type(&mut declarations, &mut queue, &method.params); + let result = generate_type(&mut declarations, &mut queue, &method.result); + + let camel_case = Case::Camel.convert(method.name); + + member_definitions.push(AnyTsTypeMember::TsMethodSignatureTypeMember( + make::ts_method_signature_type_member( + AnyJsObjectMemberName::JsLiteralMemberName(make::js_literal_member_name( + make::ident(&camel_case), + )), + make::js_parameters( + make::token(T!['(']), + make::js_parameter_list( + Some(AnyJsParameter::AnyJsFormalParameter( + AnyJsFormalParameter::JsFormalParameter( + make::js_formal_parameter( + make::js_decorator_list([]), + AnyJsBindingPattern::AnyJsBinding( + AnyJsBinding::JsIdentifierBinding( + make::js_identifier_binding(make::ident("params")), + ), + ), + ) + .with_type_annotation(make::ts_type_annotation( + make::token(T![:]), + params, + )) + .build(), + ), + )), + None, + ), + make::token(T![')']), + ), + ) + .with_return_type_annotation(make::ts_return_type_annotation( + make::token(T![:]), + AnyTsReturnType::AnyTsType(AnyTsType::TsReferenceType( + make::ts_reference_type(AnyTsName::JsReferenceIdentifier( + make::js_reference_identifier(make::ident("Promise")), + )) + .with_type_arguments(make::ts_type_arguments( + make::token(T![<]), + make::ts_type_argument_list(Some(result), None), + make::token(T![>]), + )) + .build(), + )), + )) + .build(), + )); + + member_declarations.push(AnyJsObjectMember::JsMethodObjectMember( + make::js_method_object_member( + AnyJsObjectMemberName::JsLiteralMemberName(make::js_literal_member_name( + make::ident(&camel_case), + )), + make::js_parameters( + make::token(T!['(']), + make::js_parameter_list( + Some(AnyJsParameter::AnyJsFormalParameter( + AnyJsFormalParameter::JsFormalParameter( + make::js_formal_parameter(make::js_decorator_list([]),AnyJsBindingPattern::AnyJsBinding( + AnyJsBinding::JsIdentifierBinding(make::js_identifier_binding( + make::ident("params"), + )), + )) + .build(), + ), + )), + None, + ), + make::token(T![')']), + ), + make::js_function_body( + make::token(T!['{']), + make::js_directive_list(None), + make::js_statement_list(Some(AnyJsStatement::JsReturnStatement( + make::js_return_statement(make::token(T![return])) + .with_argument(AnyJsExpression::JsCallExpression( + make::js_call_expression( + AnyJsExpression::JsStaticMemberExpression( + make::js_static_member_expression( + AnyJsExpression::JsIdentifierExpression( + make::js_identifier_expression( + make::js_reference_identifier(make::ident( + "transport", + )), + ), + ), + make::token(T![.]), + AnyJsName::JsName(make::js_name(make::ident( + "request", + ))), + ), + ), + make::js_call_arguments( + make::token(T!['(']), + make::js_call_argument_list( + [ + AnyJsCallArgument::AnyJsExpression( + AnyJsExpression::AnyJsLiteralExpression( + AnyJsLiteralExpression::JsStringLiteralExpression(make::js_string_literal_expression(make::js_string_literal(&format!("pglt/{}", method.name)))), + ), + ), + AnyJsCallArgument::AnyJsExpression( + AnyJsExpression::JsIdentifierExpression( + make::js_identifier_expression( + make::js_reference_identifier(make::ident( + "params", + )), + ), + ), + ), + ], + Some(make::token(T![,])), + ), + make::token(T![')']), + ), + ) + .build(), + )) + .build(), + ))), + make::token(T!['}']), + ), + ) + .build(), + )); + } + + let leading_comment = [ + ( + TriviaPieceKind::SingleLineComment, + "// Generated file, do not edit by hand, see `xtask/codegen`", + ), + (TriviaPieceKind::Newline, "\n"), + ]; + + let mut items = vec![AnyJsModuleItem::JsImport( + make::js_import( + make::token(T![import]).with_leading_trivia(leading_comment.into_iter()), + AnyJsImportClause::JsImportNamedClause( + make::js_import_named_clause( + make::js_named_import_specifiers( + make::token(T!['{']), + make::js_named_import_specifier_list( + Some(AnyJsNamedImportSpecifier::JsShorthandNamedImportSpecifier( + make::js_shorthand_named_import_specifier( + AnyJsBinding::JsIdentifierBinding(make::js_identifier_binding( + make::ident("Transport"), + )), + ) + .build(), + )), + None, + ), + make::token(T!['}']), + ), + make::token(T![from]), + make::js_module_source(make::js_string_literal("./transport")), + ) + .with_type_token(make::token(T![type])) + .build(), + ), + ) + .build(), + )]; + + items.extend(declarations.into_iter().map(|(decl, description)| { + let mut export = make::token(T![export]); + if let Some(description) = description { + let comment = format!("/**\n\t* {} \n\t */\n", description); + let trivia = vec![ + (TriviaPieceKind::Newline, "\n"), + (TriviaPieceKind::MultiLineComment, comment.as_str()), + (TriviaPieceKind::Newline, "\n"), + ]; + export = export.with_leading_trivia(trivia); + } + AnyJsModuleItem::JsExport(make::js_export( + make::js_decorator_list([]), + export, + AnyJsExportClause::AnyJsDeclarationClause(match decl { + AnyJsDeclaration::JsClassDeclaration(decl) => { + AnyJsDeclarationClause::JsClassDeclaration(decl) + } + AnyJsDeclaration::JsFunctionDeclaration(decl) => { + AnyJsDeclarationClause::JsFunctionDeclaration(decl) + } + AnyJsDeclaration::JsVariableDeclaration(decl) => { + AnyJsDeclarationClause::JsVariableDeclarationClause( + make::js_variable_declaration_clause(decl).build(), + ) + } + AnyJsDeclaration::TsDeclareFunctionDeclaration(decl) => { + AnyJsDeclarationClause::TsDeclareFunctionDeclaration(decl) + } + AnyJsDeclaration::TsEnumDeclaration(decl) => { + AnyJsDeclarationClause::TsEnumDeclaration(decl) + } + AnyJsDeclaration::TsExternalModuleDeclaration(decl) => { + AnyJsDeclarationClause::TsExternalModuleDeclaration(decl) + } + AnyJsDeclaration::TsGlobalDeclaration(decl) => { + AnyJsDeclarationClause::TsGlobalDeclaration(decl) + } + AnyJsDeclaration::TsImportEqualsDeclaration(decl) => { + AnyJsDeclarationClause::TsImportEqualsDeclaration(decl) + } + AnyJsDeclaration::TsInterfaceDeclaration(decl) => { + AnyJsDeclarationClause::TsInterfaceDeclaration(decl) + } + AnyJsDeclaration::TsModuleDeclaration(decl) => { + AnyJsDeclarationClause::TsModuleDeclaration(decl) + } + AnyJsDeclaration::TsTypeAliasDeclaration(decl) => { + AnyJsDeclarationClause::TsTypeAliasDeclaration(decl) + } + }), + )) + })); + + member_definitions.push(AnyTsTypeMember::TsMethodSignatureTypeMember( + make::ts_method_signature_type_member( + AnyJsObjectMemberName::JsLiteralMemberName(make::js_literal_member_name(make::ident( + "destroy", + ))), + make::js_parameters( + make::token(T!['(']), + make::js_parameter_list(None, None), + make::token(T![')']), + ), + ) + .with_return_type_annotation(make::ts_return_type_annotation( + make::token(T![:]), + AnyTsReturnType::AnyTsType(AnyTsType::TsVoidType(make::ts_void_type(make::token(T![ + void + ])))), + )) + .build(), + )); + + member_declarations.push(AnyJsObjectMember::JsMethodObjectMember( + make::js_method_object_member( + AnyJsObjectMemberName::JsLiteralMemberName(make::js_literal_member_name(make::ident( + "destroy", + ))), + make::js_parameters( + make::token(T!['(']), + make::js_parameter_list(None, None), + make::token(T![')']), + ), + make::js_function_body( + make::token(T!['{']), + make::js_directive_list(None), + make::js_statement_list(Some(AnyJsStatement::JsExpressionStatement( + make::js_expression_statement(AnyJsExpression::JsCallExpression( + make::js_call_expression( + AnyJsExpression::JsStaticMemberExpression( + make::js_static_member_expression( + AnyJsExpression::JsIdentifierExpression( + make::js_identifier_expression( + make::js_reference_identifier(make::ident("transport")), + ), + ), + make::token(T![.]), + AnyJsName::JsName(make::js_name(make::ident("destroy"))), + ), + ), + make::js_call_arguments( + make::token(T!['(']), + make::js_call_argument_list(None, None), + make::token(T![')']), + ), + ) + .build(), + )) + .build(), + ))), + make::token(T!['}']), + ), + ) + .build(), + )); + + // Export `PartialConfiguration` as `Configuration` for backwards compatibility. + items.push(AnyJsModuleItem::JsExport(make::js_export( + make::js_decorator_list([]), + make::token(T![export]), + AnyJsExportClause::AnyJsDeclarationClause(AnyJsDeclarationClause::TsTypeAliasDeclaration( + make::ts_type_alias_declaration( + make::token(T![type]), + make::ts_identifier_binding(make::ident("Configuration")), + make::token(T![=]), + AnyTsType::TsReferenceType( + make::ts_reference_type(AnyTsName::JsReferenceIdentifier( + make::js_reference_identifier(make::ident("PartialConfiguration")), + )) + .build(), + ), + ) + .build(), + )), + ))); + + items.push(AnyJsModuleItem::JsExport(make::js_export( + make::js_decorator_list([]), + make::token(T![export]), + AnyJsExportClause::AnyJsDeclarationClause(AnyJsDeclarationClause::TsInterfaceDeclaration( + make::ts_interface_declaration( + make::token(T![interface]), + make::ts_identifier_binding(make::ident("Workspace")), + make::token(T!['{']), + make::ts_type_member_list(member_definitions), + make::token(T!['}']), + ) + .build(), + )), + ))); + + let member_separators = (0..member_declarations.len()).map(|_| make::token(T![,])); + + items.push(AnyJsModuleItem::JsExport(make::js_export( + make::js_decorator_list([]), + make::token(T![export]), + AnyJsExportClause::AnyJsDeclarationClause(AnyJsDeclarationClause::JsFunctionDeclaration( + make::js_function_declaration( + make::token(T![function]), + AnyJsBinding::JsIdentifierBinding(make::js_identifier_binding(make::ident( + "createWorkspace", + ))), + make::js_parameters( + make::token(T!['(']), + make::js_parameter_list( + Some(AnyJsParameter::AnyJsFormalParameter( + AnyJsFormalParameter::JsFormalParameter( + make::js_formal_parameter( + make::js_decorator_list([]), + AnyJsBindingPattern::AnyJsBinding( + AnyJsBinding::JsIdentifierBinding( + make::js_identifier_binding(make::ident("transport")), + ), + ), + ) + .with_type_annotation(make::ts_type_annotation( + make::token(T![:]), + AnyTsType::TsReferenceType( + make::ts_reference_type(AnyTsName::JsReferenceIdentifier( + make::js_reference_identifier(make::ident("Transport")), + )) + .build(), + ), + )) + .build(), + ), + )), + None, + ), + make::token(T![')']), + ), + make::js_function_body( + make::token(T!['{']), + make::js_directive_list(None), + make::js_statement_list(Some(AnyJsStatement::JsReturnStatement( + make::js_return_statement(make::token(T![return])) + .with_argument(AnyJsExpression::JsObjectExpression( + make::js_object_expression( + make::token(T!['{']), + make::js_object_member_list( + member_declarations, + member_separators, + ), + make::token(T!['}']), + ), + )) + .build(), + ))), + make::token(T!['}']), + ), + ) + .with_return_type_annotation(make::ts_return_type_annotation( + make::token(T![:]), + AnyTsReturnType::AnyTsType(AnyTsType::TsReferenceType( + make::ts_reference_type(AnyTsName::JsReferenceIdentifier( + make::js_reference_identifier(make::ident("Workspace")), + )) + .build(), + )), + )) + .build(), + )), + ))); + + let module = make::js_module( + make::js_directive_list(None), + make::js_module_item_list(items), + make::eof(), + ) + .build(); + + let formatted = format_node(JsFormatOptions::new(JsFileSource::ts()), module.syntax()).unwrap(); + let printed = formatted.print().unwrap(); + let code = printed.into_code(); + + update(&bindings_path, &code, &mode)?; + + Ok(()) +} diff --git a/xtask/codegen/src/lib.rs b/xtask/codegen/src/lib.rs index 8cf07590..61ae5e4f 100644 --- a/xtask/codegen/src/lib.rs +++ b/xtask/codegen/src/lib.rs @@ -1,11 +1,13 @@ //! Codegen tools. Derived from Biome's codegen mod generate_analyser; +mod generate_bindings; mod generate_configuration; mod generate_crate; mod generate_new_analyser_rule; pub use self::generate_analyser::generate_analyser; +pub use self::generate_bindings::generate_bindings; pub use self::generate_configuration::generate_rules_configuration; pub use self::generate_crate::generate_crate; pub use self::generate_new_analyser_rule::generate_new_analyser_rule; @@ -52,6 +54,9 @@ pub fn to_capitalized(s: &str) -> String { #[derive(Debug, Clone, Bpaf)] #[bpaf(options)] pub enum TaskCommand { + /// Generate TypeScript definitions for the JavaScript bindings to the Workspace API + #[bpaf(command)] + Bindings, /// Generate factory functions for the analyser and the configuration of the analysers #[bpaf(command)] Analyser, diff --git a/xtask/codegen/src/main.rs b/xtask/codegen/src/main.rs index c432c16e..8e0e6cd8 100644 --- a/xtask/codegen/src/main.rs +++ b/xtask/codegen/src/main.rs @@ -2,8 +2,8 @@ use xtask::Mode::Overwrite; use xtask::{project_root, pushd, Result}; use xtask_codegen::{ - generate_analyser, generate_crate, generate_new_analyser_rule, generate_rules_configuration, - task_command, TaskCommand, + generate_analyser, generate_bindings, generate_crate, generate_new_analyser_rule, + generate_rules_configuration, task_command, TaskCommand, }; fn main() -> Result<()> { @@ -27,6 +27,9 @@ fn main() -> Result<()> { TaskCommand::Configuration => { generate_rules_configuration(Overwrite)?; } + TaskCommand::Bindings => { + generate_bindings(Overwrite)?; + } } Ok(())