diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 00000000..9609e08a --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,2 @@ +[registries.pragma] +index = "sparse+https://registry.devnet.pragma.build/api/v1/crates/" \ No newline at end of file diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..7d196866 --- /dev/null +++ b/.env.example @@ -0,0 +1,21 @@ +# Mode, can be dev or production +MODE=dev + +# Starknet RPCs +MAINNET_RPC_URL=https://free-rpc.nethermind.io/mainnet-juno +SEPOLIA_RPC_URL=https://free-rpc.nethermind.io/sepolia-juno + +# Databases info +OFFCHAIN_DATABASE_URL="postgres://postgres:test-password@0.0.0.0:5432/pragma" +ONCHAIN_DATABASE_URL="postgres://postgres:test-password@0.0.0.0:5433/pragma" +DATABASE_MAX_CONN=5 + +# API info +HOST="0.0.0.0" +PORT=3000 +METRICS_PORT=8080 + +# Kafka info +KAFKA_BROKERS=localhost:29092 +TOPIC="pragma-data" +OTEL_EXPORTER_OTLP_ENDPOINT=localhost:4317 diff --git a/.github/workflows/pragma.yaml b/.github/workflows/pragma.yaml index 23437496..f083662c 100644 --- a/.github/workflows/pragma.yaml +++ b/.github/workflows/pragma.yaml @@ -3,19 +3,11 @@ name: Pragma CI on: workflow_dispatch: pull_request: - branches: [main] + branches: [main, dev] push: - branches: [main] + branches: [main, dev] jobs: - prettier: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Run prettier - run: |- - npx prettier --check . - toml-lint: runs-on: ubuntu-latest steps: @@ -38,7 +30,7 @@ jobs: with: fetch-depth: 0 - - uses: astraly-labs/workflows/rust/test@v1.0.5 + - uses: astraly-labs/workflows/rust/test@v1.0.6 with: fail_ci: false @@ -55,7 +47,7 @@ jobs: with: fetch-depth: 0 - name: Rust Lint - uses: astraly-labs/workflows/rust/lint@v1.0.5 + uses: astraly-labs/workflows/rust/lint@v1.0.6 with: token: ${{ secrets.GITHUB_TOKEN }} @@ -72,6 +64,6 @@ jobs: with: fetch-depth: 0 - name: Rust Scanner - uses: astraly-labs/workflows/rust/scan@v1.0.5 + uses: astraly-labs/workflows/rust/scan@v1.0.6 with: codeql_upload: false diff --git a/.gitignore b/.gitignore index 08d75c51..3a7c6f21 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,6 @@ /target .idea .env -db/ infra/db/password.txt # Vscode settings diff --git a/Cargo.lock b/Cargo.lock index da2841d6..ad83d08d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" @@ -17,6 +17,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + [[package]] name = "aes" version = "0.8.4" @@ -60,9 +66,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.15" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", @@ -90,28 +96,29 @@ dependencies = [ [[package]] name = "anstyle-query" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.4" +version = "3.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" +checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "once_cell", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.95" +version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" [[package]] name = "arbitrary" @@ -130,137 +137,9 @@ checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" [[package]] name = "arrayvec" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" - -[[package]] -name = "ascii-canvas" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" -dependencies = [ - "term", -] - -[[package]] -name = "assert-json-diff" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" -dependencies = [ - "serde", - "serde_json", -] - -[[package]] -name = "async-attributes" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3203e79f4dd9bdda415ed03cf14dae5a2bf775c683a00f94e9cd1faf0f596e5" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "async-channel" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" -dependencies = [ - "concurrent-queue", - "event-listener 2.5.3", - "futures-core", -] - -[[package]] -name = "async-channel" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" -dependencies = [ - "concurrent-queue", - "event-listener-strategy", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-executor" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7ebdfa2ebdab6b1760375fa7d6f382b9f486eac35fc994625a00e89280bdbb7" -dependencies = [ - "async-task", - "concurrent-queue", - "fastrand 2.1.0", - "futures-lite 2.3.0", - "slab", -] - -[[package]] -name = "async-global-executor" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" -dependencies = [ - "async-channel 2.3.1", - "async-executor", - "async-io 2.3.3", - "async-lock 3.4.0", - "blocking", - "futures-lite 2.3.0", - "once_cell", -] - -[[package]] -name = "async-io" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" -dependencies = [ - "async-lock 2.8.0", - "autocfg", - "cfg-if", - "concurrent-queue", - "futures-lite 1.13.0", - "log", - "parking", - "polling 2.8.0", - "rustix 0.37.27", - "slab", - "socket2 0.4.10", - "waker-fn", -] - -[[package]] -name = "async-io" -version = "2.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964" -dependencies = [ - "async-lock 3.4.0", - "cfg-if", - "concurrent-queue", - "futures-io", - "futures-lite 2.3.0", - "parking", - "polling 3.7.2", - "rustix 0.38.34", - "slab", - "tracing", - "windows-sys 0.52.0", -] - -[[package]] -name = "async-lock" -version = "2.8.0" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" -dependencies = [ - "event-listener 2.5.3", -] +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "async-lock" @@ -268,88 +147,16 @@ version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" dependencies = [ - "event-listener 5.3.1", + "event-listener", "event-listener-strategy", "pin-project-lite", ] -[[package]] -name = "async-object-pool" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aeb901c30ebc2fc4ab46395bbfbdba9542c16559d853645d75190c3056caf3bc" -dependencies = [ - "async-std", -] - -[[package]] -name = "async-process" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea6438ba0a08d81529c69b36700fa2f95837bfe3e776ab39cde9c14d9149da88" -dependencies = [ - "async-io 1.13.0", - "async-lock 2.8.0", - "async-signal", - "blocking", - "cfg-if", - "event-listener 3.1.0", - "futures-lite 1.13.0", - "rustix 0.38.34", - "windows-sys 0.48.0", -] - -[[package]] -name = "async-signal" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfb3634b73397aa844481f814fad23bbf07fdb0eabec10f2eb95e58944b1ec32" -dependencies = [ - "async-io 2.3.3", - "async-lock 3.4.0", - "atomic-waker", - "cfg-if", - "futures-core", - "futures-io", - "rustix 0.38.34", - "signal-hook-registry", - "slab", - "windows-sys 0.52.0", -] - -[[package]] -name = "async-std" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" -dependencies = [ - "async-attributes", - "async-channel 1.9.0", - "async-global-executor", - "async-io 1.13.0", - "async-lock 2.8.0", - "async-process", - "crossbeam-utils", - "futures-channel", - "futures-core", - "futures-io", - "futures-lite 1.13.0", - "gloo-timers", - "kv-log-macro", - "log", - "memchr", - "once_cell", - "pin-project-lite", - "pin-utils", - "slab", - "wasm-bindgen-futures", -] - [[package]] name = "async-stream" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" dependencies = [ "async-stream-impl", "futures-core", @@ -358,21 +165,15 @@ dependencies = [ [[package]] name = "async-stream-impl" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] -[[package]] -name = "async-task" -version = "4.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" - [[package]] name = "async-trait" version = "0.1.86" @@ -381,7 +182,7 @@ checksum = "644dd749086bf3771a2fbc5f256fdb982d53f011c7d5d560304eafeecebce79d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -392,26 +193,26 @@ checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "auto_impl" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" +checksum = "e12882f59de5360c748c4cbf569a042d5fb0eb515f7bea9c1f470b47f6ffbd73" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] name = "autocfg" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "aws-config" -version = "1.5.11" +version = "1.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5d1c2c88936a73c699225d0bc00684a534166b0cebc2659c3cdf08de8edc64c" +checksum = "490aa7465ee685b2ced076bb87ef654a47724a7844e2c7d3af4e749ce5b875dd" dependencies = [ "aws-credential-types", "aws-runtime", @@ -426,7 +227,7 @@ dependencies = [ "aws-smithy-types", "aws-types", "bytes", - "fastrand 2.1.0", + "fastrand", "hex", "http 0.2.12", "ring", @@ -449,38 +250,11 @@ dependencies = [ "zeroize", ] -[[package]] -name = "aws-lc-rs" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae74d9bd0a7530e8afd1770739ad34b36838829d6ad61818f9230f683f5ad77" -dependencies = [ - "aws-lc-sys", - "mirai-annotations", - "paste", - "zeroize", -] - -[[package]] -name = "aws-lc-sys" -version = "0.20.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f0e249228c6ad2d240c2dc94b714d711629d52bad946075d8e9b2f5391f0703" -dependencies = [ - "bindgen", - "cc", - "cmake", - "dunce", - "fs_extra", - "libc", - "paste", -] - [[package]] name = "aws-runtime" -version = "1.5.4" +version = "1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bee7643696e7fdd74c10f9eb42848a87fe469d35eae9c3323f80aa98f350baac" +checksum = "76dd04d39cc12844c0994f2c9c5a6f5184c22e9188ec1ff723de41910a21dcad" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -491,21 +265,21 @@ dependencies = [ "aws-smithy-types", "aws-types", "bytes", - "fastrand 2.1.0", + "fastrand", "http 0.2.12", "http-body 0.4.6", "once_cell", "percent-encoding", "pin-project-lite", "tracing", - "uuid 1.8.0", + "uuid 1.15.0", ] [[package]] name = "aws-sdk-secretsmanager" -version = "1.61.0" +version = "1.64.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81527c73092c505408ffe82988c937fd3c031258b950e1e2d8960209c26de72c" +checksum = "f9d043d816535461c5d5289b53c03a4458aae3e9c4d47e3e901722590c8d6fcb" dependencies = [ "aws-credential-types", "aws-runtime", @@ -517,7 +291,7 @@ dependencies = [ "aws-smithy-types", "aws-types", "bytes", - "fastrand 2.1.0", + "fastrand", "http 0.2.12", "once_cell", "regex-lite", @@ -526,9 +300,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.57.0" +version = "1.60.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c54bab121fe1881a74c338c5f723d1592bf3b53167f80268a1274f404e1acc38" +checksum = "60186fab60b24376d3e33b9ff0a43485f99efd470e3b75a9160c849741d63d56" dependencies = [ "aws-credential-types", "aws-runtime", @@ -548,9 +322,9 @@ dependencies = [ [[package]] name = "aws-sdk-ssooidc" -version = "1.58.0" +version = "1.61.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c8234fd024f7ac61c4e44ea008029bde934250f371efe7d4a39708397b1080c" +checksum = "7033130ce1ee13e6018905b7b976c915963755aef299c1521897679d6cd4f8ef" dependencies = [ "aws-credential-types", "aws-runtime", @@ -570,9 +344,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "1.58.0" +version = "1.61.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba60e1d519d6f23a9df712c04fdeadd7872ac911c84b2f62a8bda92e129b7962" +checksum = "c5c1cac7677179d622b4448b0d31bcb359185295dc6fca891920cfb17e2b5156" dependencies = [ "aws-credential-types", "aws-runtime", @@ -593,9 +367,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.2.8" +version = "1.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bc5bbd1e4a2648fd8c5982af03935972c24a2f9846b396de661d351ee3ce837" +checksum = "9bfe75fad52793ce6dec0dc3d4b1f388f038b5eb866c8d4d7f3a8e21b5ea5051" dependencies = [ "aws-credential-types", "aws-smithy-http", @@ -606,7 +380,7 @@ dependencies = [ "hex", "hmac", "http 0.2.12", - "http 1.1.0", + "http 1.2.0", "once_cell", "percent-encoding", "sha2", @@ -666,22 +440,22 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.7.7" +version = "1.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "865f7050bbc7107a6c98a397a9fcd9413690c27fa718446967cf03b2d3ac517e" +checksum = "d526a12d9ed61fadefda24abe2e682892ba288c2018bcb38b1b4c111d13f6d92" dependencies = [ "aws-smithy-async", "aws-smithy-http", "aws-smithy-runtime-api", "aws-smithy-types", "bytes", - "fastrand 2.1.0", + "fastrand", "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "http-body 1.0.0", + "http-body 1.0.1", "httparse", - "hyper 0.14.29", + "hyper 0.14.32", "hyper-rustls 0.24.2", "once_cell", "pin-project-lite", @@ -701,7 +475,7 @@ dependencies = [ "aws-smithy-types", "bytes", "http 0.2.12", - "http 1.1.0", + "http 1.2.0", "pin-project-lite", "tokio", "tracing", @@ -719,9 +493,9 @@ dependencies = [ "bytes-utils", "futures-core", "http 0.2.12", - "http 1.1.0", + "http 1.2.0", "http-body 0.4.6", - "http-body 1.0.0", + "http-body 1.0.1", "http-body-util", "itoa", "num-integer", @@ -771,7 +545,7 @@ dependencies = [ "futures-util", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.29", + "hyper 0.14.32", "itoa", "matchit 0.7.3", "memchr", @@ -801,8 +575,8 @@ dependencies = [ "axum-core 0.4.5", "bytes", "futures-util", - "http 1.1.0", - "http-body 1.0.0", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", "itoa", "matchit 0.7.3", @@ -812,9 +586,7 @@ dependencies = [ "pin-project-lite", "rustversion", "serde", - "serde_json", - "serde_path_to_error", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tower 0.5.2", "tower-layer", "tower-service", @@ -832,10 +604,10 @@ dependencies = [ "bytes", "form_urlencoded", "futures-util", - "http 1.1.0", - "http-body 1.0.0", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "itoa", "matchit 0.8.4", @@ -849,9 +621,9 @@ dependencies = [ "serde_path_to_error", "serde_urlencoded", "sha1", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tokio", - "tokio-tungstenite 0.26.1", + "tokio-tungstenite 0.26.2", "tower 0.5.2", "tower-layer", "tower-service", @@ -884,13 +656,13 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.1.0", - "http-body 1.0.0", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", "rustversion", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tower-layer", "tower-service", ] @@ -903,13 +675,13 @@ checksum = "df1362f362fd16024ae199c1970ce98f9661bf5ef94b9808fee734bc3698b733" dependencies = [ "bytes", "futures-util", - "http 1.1.0", - "http-body 1.0.0", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", "rustversion", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tower-layer", "tower-service", "tracing", @@ -924,14 +696,17 @@ dependencies = [ "axum 0.8.1", "axum-core 0.5.0", "bytes", + "form_urlencoded", "futures-util", "headers", - "http 1.1.0", - "http-body 1.0.0", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", "serde", + "serde_html_form", + "serde_path_to_error", "tower 0.5.2", "tower-layer", "tower-service", @@ -946,7 +721,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -957,7 +732,7 @@ checksum = "604fde5e028fea851ce1d8570bbdc034bec850d157f7569d10f347d06808c05c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -969,7 +744,7 @@ dependencies = [ "axum 0.7.9", "futures-core", "futures-util", - "http 1.1.0", + "http 1.2.0", "opentelemetry", "pin-project-lite", "tower 0.5.2", @@ -988,7 +763,7 @@ dependencies = [ "cc", "cfg-if", "libc", - "miniz_oxide", + "miniz_oxide 0.7.4", "object", "rustc-demangle", ] @@ -1022,21 +797,21 @@ dependencies = [ ] [[package]] -name = "basic-cookies" -version = "0.1.5" +name = "bigdecimal" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67bd8fd42c16bdb08688243dc5f0cc117a3ca9efeeaba3a345a18a6159ad96f7" +checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa" dependencies = [ - "lalrpop", - "lalrpop-util", - "regex", + "num-bigint", + "num-integer", + "num-traits", ] [[package]] name = "bigdecimal" -version = "0.4.3" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9324c8014cd04590682b34f1e9448d38f0674d0f7b2dc553331016ef0e4e9ebc" +checksum = "7f31f3af01c5c65a07985c804d3366560e6fa7883d640a122819b14ec327482c" dependencies = [ "autocfg", "libm", @@ -1046,44 +821,6 @@ dependencies = [ "serde", ] -[[package]] -name = "bindgen" -version = "0.69.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" -dependencies = [ - "bitflags 2.5.0", - "cexpr", - "clang-sys", - "itertools", - "lazy_static", - "lazycell", - "log", - "prettyplease", - "proc-macro2", - "quote", - "regex", - "rustc-hash", - "shlex", - "syn 2.0.87", - "which", -] - -[[package]] -name = "bit-set" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" -dependencies = [ - "bit-vec", -] - -[[package]] -name = "bit-vec" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" - [[package]] name = "bitflags" version = "1.3.2" @@ -1092,9 +829,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" [[package]] name = "bitvec" @@ -1117,24 +854,11 @@ dependencies = [ "generic-array", ] -[[package]] -name = "blocking" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" -dependencies = [ - "async-channel 2.3.1", - "async-task", - "futures-io", - "futures-lite 2.3.0", - "piper", -] - [[package]] name = "bollard" -version = "0.17.0" +version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a063d51a634c7137ecd9f6390ec78e1c512e84c9ded80198ec7df3339a16a33" +checksum = "d41711ad46fda47cd701f6908e59d1bd6b9a2b7464c0d0aeab95c6d37096ff8a" dependencies = [ "base64 0.22.1", "bollard-stubs", @@ -1143,25 +867,25 @@ dependencies = [ "futures-util", "hex", "home", - "http 1.1.0", + "http 1.2.0", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-named-pipe", - "hyper-rustls 0.27.2", + "hyper-rustls 0.27.5", "hyper-util", "hyperlocal", "log", "pin-project-lite", - "rustls 0.23.12", - "rustls-native-certs 0.7.1", - "rustls-pemfile 2.1.2", + "rustls 0.23.23", + "rustls-native-certs 0.7.3", + "rustls-pemfile 2.2.0", "rustls-pki-types", "serde", "serde_derive", "serde_json", "serde_repr", "serde_urlencoded", - "thiserror 1.0.61", + "thiserror 1.0.69", "tokio", "tokio-util", "tower-service", @@ -1177,14 +901,14 @@ checksum = "6d7c5415e3a6bc6d3e99eff6268e488fd4ee25e7b28c10f08fa6760bd9de16e4" dependencies = [ "serde", "serde_repr", - "serde_with 3.9.0", + "serde_with 3.12.0", ] [[package]] name = "bumpalo" -version = "3.16.0" +version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" [[package]] name = "byte-slice-cast" @@ -1216,8 +940,9 @@ dependencies = [ [[package]] name = "cainome" -version = "0.4.5" -source = "git+https://github.com/cartridge-gg/cainome?tag=v0.4.5#c3857b13cbf2ed3abda9d07297326ea4ad8e93da" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d5d94fa57317b42e011a2715a07ce8cadeb80ca2a471167cbd85f48ea4d0934" dependencies = [ "anyhow", "async-trait", @@ -1232,9 +957,9 @@ dependencies = [ "convert_case", "serde", "serde_json", - "starknet 0.12.0", + "starknet 0.13.0", "starknet-types-core", - "thiserror 1.0.61", + "thiserror 1.0.69", "tracing", "tracing-subscriber", "url", @@ -1243,41 +968,47 @@ dependencies = [ [[package]] name = "cainome-cairo-serde" version = "0.1.0" -source = "git+https://github.com/cartridge-gg/cainome?tag=v0.4.5#c3857b13cbf2ed3abda9d07297326ea4ad8e93da" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2b487395a270c6f683a72ecb1f7848cfae81a269015a4a9328fbfa012448fe9" dependencies = [ + "num-bigint", "serde", - "starknet 0.12.0", - "thiserror 1.0.61", + "serde_with 3.12.0", + "starknet 0.13.0", + "thiserror 1.0.69", ] [[package]] name = "cainome-cairo-serde-derive" version = "0.1.0" -source = "git+https://github.com/cartridge-gg/cainome?tag=v0.4.5#c3857b13cbf2ed3abda9d07297326ea4ad8e93da" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d272424141f0ced49ca5f40bc4b756235ee6e7c9cf6ab01f7ef5ac010f5f8864" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", "unzip-n", ] [[package]] name = "cainome-parser" -version = "0.1.0" -source = "git+https://github.com/cartridge-gg/cainome?tag=v0.4.5#c3857b13cbf2ed3abda9d07297326ea4ad8e93da" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8053124dfe40eacb6c78ffe44e4199fc0547b22b3a1e506431b1cd554812ef1c" dependencies = [ "convert_case", "quote", "serde_json", - "starknet 0.12.0", - "syn 2.0.87", - "thiserror 1.0.61", + "starknet 0.13.0", + "syn 2.0.98", + "thiserror 1.0.69", ] [[package]] name = "cainome-rs" version = "0.1.0" -source = "git+https://github.com/cartridge-gg/cainome?tag=v0.4.5#c3857b13cbf2ed3abda9d07297326ea4ad8e93da" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0e16da812c3df59d3093df7dd7cfe7fd1ff051c870aae3807dee2180c511557" dependencies = [ "anyhow", "cainome-cairo-serde", @@ -1287,15 +1018,16 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "starknet 0.12.0", - "syn 2.0.87", - "thiserror 1.0.61", + "starknet 0.13.0", + "syn 2.0.98", + "thiserror 1.0.69", ] [[package]] name = "cainome-rs-macro" version = "0.1.0" -source = "git+https://github.com/cartridge-gg/cainome?tag=v0.4.5#c3857b13cbf2ed3abda9d07297326ea4ad8e93da" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71009b935b7b3056c8032c2369d022beb629ea903f167e37bff0a2c84dd43675" dependencies = [ "anyhow", "cainome-cairo-serde", @@ -1305,9 +1037,9 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "starknet 0.12.0", - "syn 2.0.87", - "thiserror 1.0.61", + "starknet 0.13.0", + "syn 2.0.98", + "thiserror 1.0.69", ] [[package]] @@ -1320,35 +1052,36 @@ dependencies = [ ] [[package]] -name = "cassowary" -version = "0.3.0" +name = "capnp" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" +checksum = "b1d1b4a00e80b7c4b1a49e845365f25c9d8fd0a19c9cd8d66f68afea47b1f020" +dependencies = [ + "embedded-io", +] [[package]] -name = "castaway" -version = "0.1.2" +name = "capnpc" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" +checksum = "5af589f7a7f3e6d920120b913345bd9a2fc65dfd76c5053a142852a5ea2e8609" +dependencies = [ + "capnp", +] [[package]] -name = "cc" -version = "1.0.99" +name = "cassowary" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96c51067fd44124faa7f870b4b1c969379ad32b2ba805aa959430ceaa384f695" -dependencies = [ - "jobserver", - "libc", - "once_cell", -] +checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" [[package]] -name = "cexpr" -version = "0.6.0" +name = "cc" +version = "1.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +checksum = "c736e259eea577f443d5c86c304f9f4ae0295c43f3ba05c21f1d66b5f06001af" dependencies = [ - "nom", + "shlex", ] [[package]] @@ -1357,11 +1090,17 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" dependencies = [ "android-tzdata", "iana-time-zone", @@ -1369,7 +1108,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.52.6", + "windows-link", ] [[package]] @@ -1382,22 +1121,11 @@ dependencies = [ "inout", ] -[[package]] -name = "clang-sys" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" -dependencies = [ - "glob", - "libc", - "libloading", -] - [[package]] name = "clap" -version = "4.5.21" +version = "4.5.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f" +checksum = "027bb0d98429ae334a8698531da7077bdf906419543a35a55c2cb1b66437d767" dependencies = [ "clap_builder", "clap_derive", @@ -1405,9 +1133,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.21" +version = "4.5.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec" +checksum = "5589e0cba072e0f3d23791efac0fd8627b49c829c196a492e88168e6a669d863" dependencies = [ "anstream", "anstyle", @@ -1417,94 +1145,64 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.38" +version = "4.5.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9647a559c112175f17cf724dc72d3645680a883c58481332779192b0d8e7a01" +checksum = "f5c5508ea23c5366f77e53f5a0070e5a84e51687ec3ef9e0464c86dc8d13ce98" dependencies = [ "clap", ] [[package]] name = "clap_derive" -version = "4.5.18" +version = "4.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" +checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] name = "clap_lex" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] -name = "cmake" -version = "0.1.50" +name = "colorchoice" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31c789563b815f77f4250caee12365734369f942439b7defd71e18a48197130" -dependencies = [ - "cc", -] - -[[package]] -name = "color-eyre" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55146f5e46f237f7423d74111267d4597b59b0dad0ffaf7303bce9945d843ad5" -dependencies = [ - "backtrace", - "color-spantrace", - "eyre", - "indenter", - "once_cell", - "owo-colors", - "tracing-error", -] +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] -name = "color-spantrace" -version = "0.2.1" +name = "concurrent-queue" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" dependencies = [ - "once_cell", - "owo-colors", - "tracing-core", - "tracing-error", + "crossbeam-utils", ] [[package]] -name = "colorchoice" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" - -[[package]] -name = "combine" -version = "4.6.7" +name = "const_format" +version = "0.2.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +checksum = "126f97965c8ad46d6d9163268ff28432e8f6a1196a55578867832e3049df63dd" dependencies = [ - "bytes", - "futures-core", - "memchr", - "pin-project-lite", - "tokio", - "tokio-util", + "const_format_proc_macros", ] [[package]] -name = "concurrent-queue" -version = "2.5.0" +name = "const_format_proc_macros" +version = "0.2.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" dependencies = [ - "crossbeam-utils", + "proc-macro2", + "quote", + "unicode-xid", ] [[package]] @@ -1526,17 +1224,27 @@ dependencies = [ "libc", ] +[[package]] +name = "core-foundation" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" -version = "0.8.6" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.12" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] @@ -1570,9 +1278,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crossterm" @@ -1580,7 +1288,7 @@ version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.8.0", "crossterm_winapi", "libc", "mio 0.8.11", @@ -1601,9 +1309,9 @@ dependencies = [ [[package]] name = "crunchy" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" +checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" [[package]] name = "crypto-bigint" @@ -1635,42 +1343,11 @@ dependencies = [ "cipher", ] -[[package]] -name = "curl" -version = "0.4.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e2161dd6eba090ff1594084e95fd67aeccf04382ffea77999ea94ed42ec67b6" -dependencies = [ - "curl-sys", - "libc", - "openssl-probe", - "openssl-sys", - "schannel", - "socket2 0.5.7", - "windows-sys 0.52.0", -] - -[[package]] -name = "curl-sys" -version = "0.4.74+curl-8.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8af10b986114528fcdc4b63b6f5f021b7057618411046a4de2ba0f0149a097bf" -dependencies = [ - "cc", - "libc", - "libnghttp2-sys", - "libz-sys", - "openssl-sys", - "pkg-config", - "vcpkg", - "windows-sys 0.52.0", -] - [[package]] name = "darling" -version = "0.20.9" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83b2eb4d90d12bdda5ed17de686c2acb4c57914f8f921b8da7e112b5a36f3fe1" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" dependencies = [ "darling_core", "darling_macro", @@ -1678,27 +1355,27 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.9" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622687fe0bac72a04e5599029151f5796111b90f1baaa9b544d807a5e31cd120" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] name = "darling_macro" -version = "0.20.9" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "733cabb43482b1a1b53eee8583c2b9e8684d592215ea83efd305dd31bc2f0178" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -1730,9 +1407,9 @@ dependencies = [ [[package]] name = "data-encoding" -version = "2.6.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" +checksum = "575f75dfd25738df5b91b8e43e14d44bda14637a58fae779fd2b064f8bf3e010" [[package]] name = "deadpool" @@ -1794,7 +1471,7 @@ checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -1803,8 +1480,8 @@ version = "2.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff236accb9a5069572099f0b350a92e9560e8e63a9b8d546162f4a5e03026bb2" dependencies = [ - "bigdecimal", - "bitflags 2.5.0", + "bigdecimal 0.4.7", + "bitflags 2.8.0", "byteorder", "chrono", "diesel_derives 2.1.4", @@ -1818,7 +1495,7 @@ dependencies = [ "r2d2", "serde_json", "time", - "uuid 1.8.0", + "uuid 1.15.0", ] [[package]] @@ -1856,7 +1533,7 @@ dependencies = [ "diesel_table_macro_syntax", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -1876,7 +1553,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc5557efc453706fed5e4fa85006fe9817c224c3f480a34c7e5959fd700921c5" dependencies = [ - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -1905,16 +1582,6 @@ dependencies = [ "dirs-sys", ] -[[package]] -name = "dirs-next" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" -dependencies = [ - "cfg-if", - "dirs-sys-next", -] - [[package]] name = "dirs-sys" version = "0.4.1" @@ -1927,17 +1594,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "dirs-sys-next" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" -dependencies = [ - "libc", - "redox_users", - "winapi", -] - [[package]] name = "displaydoc" version = "0.2.5" @@ -1946,7 +1602,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -1972,46 +1628,37 @@ version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" -[[package]] -name = "dunce" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" - [[package]] name = "either" -version = "1.12.0" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" +checksum = "b7914353092ddf589ad78f25c5c1c21b7f80b0ff8621e7c814c3485b5306da9d" [[package]] -name = "ena" -version = "0.14.3" +name = "embedded-io" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" -dependencies = [ - "log", -] +checksum = "edd0f118536f44f5ccd48bcb8b111bdc3de888b58c74639dfb034a357d0f206d" [[package]] name = "encoding_rs" -version = "0.8.34" +version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] [[package]] name = "enum-as-inner" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" +checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc" dependencies = [ - "heck 0.4.1", + "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -2038,18 +1685,18 @@ dependencies = [ [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2070,7 +1717,7 @@ dependencies = [ "serde_json", "sha2", "sha3", - "thiserror 1.0.61", + "thiserror 1.0.69", "uuid 0.8.2", ] @@ -2103,26 +1750,9 @@ dependencies = [ [[package]] name = "event-listener" -version = "2.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" - -[[package]] -name = "event-listener" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d93877bcde0eb80ca09131a08d23f0a5c18a620b01db137dba666d18cd9b30c2" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", -] - -[[package]] -name = "event-listener" -version = "5.3.1" +version = "5.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +checksum = "3492acde4c3fc54c845eaab3eed8bd00c7a7d881f78bfc801e43a93dec1331ae" dependencies = [ "concurrent-queue", "parking", @@ -2131,24 +1761,14 @@ dependencies = [ [[package]] name = "event-listener-strategy" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" +checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2" dependencies = [ - "event-listener 5.3.1", + "event-listener", "pin-project-lite", ] -[[package]] -name = "eyre" -version = "0.6.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" -dependencies = [ - "indenter", - "once_cell", -] - [[package]] name = "fallible-iterator" version = "0.2.0" @@ -2157,18 +1777,20 @@ checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" [[package]] name = "fastrand" -version = "1.9.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] -name = "fastrand" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" +name = "faucon-rs" +version = "0.1.7" +source = "sparse+https://registry.devnet.pragma.build/api/v1/crates/" +checksum = "88ec94858b4e5d0ad02004347796ff3ed17817a2b6dc6c7adb3c60c67159676d" +dependencies = [ + "pragma-common", + "rdkafka", + "thiserror 2.0.12", +] [[package]] name = "fixed-hash" @@ -2182,20 +1804,14 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "fixedbitset" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" - [[package]] name = "flate2" -version = "1.0.30" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" +checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc" dependencies = [ "crc32fast", - "miniz_oxide", + "miniz_oxide 0.8.5", ] [[package]] @@ -2234,12 +1850,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "fs_extra" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" - [[package]] name = "funty" version = "2.0.0" @@ -2294,34 +1904,6 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" -[[package]] -name = "futures-lite" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" -dependencies = [ - "fastrand 1.9.0", - "futures-core", - "futures-io", - "memchr", - "parking", - "pin-project-lite", - "waker-fn", -] - -[[package]] -name = "futures-lite" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" -dependencies = [ - "fastrand 2.1.0", - "futures-core", - "futures-io", - "parking", - "pin-project-lite", -] - [[package]] name = "futures-macro" version = "0.3.31" @@ -2330,7 +1912,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -2425,20 +2007,51 @@ checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" [[package]] name = "glob" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] -name = "gloo-timers" -version = "0.2.6" +name = "google-apis-common" +version = "7.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +checksum = "7530ee92a7e9247c3294ae1b84ea98474dbc27563c49a14d3938e816499bf38f" dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", + "base64 0.22.1", + "chrono", + "http 1.2.0", + "http-body-util", + "hyper 1.6.0", + "hyper-util", + "itertools 0.13.0", + "mime", + "percent-encoding", + "serde", + "serde_json", + "serde_with 3.12.0", + "tokio", + "url", + "yup-oauth2", +] + +[[package]] +name = "google-secretmanager1" +version = "6.0.0+20240621" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5efdd2a31532b28fa4bcc44aa13ba4499a1a944e3b5bc22207b0191760cc4d3d" +dependencies = [ + "chrono", + "google-apis-common", + "hyper 1.6.0", + "hyper-rustls 0.27.5", + "hyper-util", + "mime", + "serde", + "serde_json", + "serde_with 3.12.0", + "tokio", + "url", + "yup-oauth2", ] [[package]] @@ -2473,7 +2086,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.2.6", + "indexmap 2.7.1", "slab", "tokio", "tokio-util", @@ -2482,17 +2095,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.5" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab" +checksum = "5017294ff4bb30944501348f6f8e42e6ad28f42c8bbef7a74029aff064a4e3c2" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "http 1.1.0", - "indexmap 2.2.6", + "http 1.2.0", + "indexmap 2.7.1", "slab", "tokio", "tokio-util", @@ -2531,7 +2144,7 @@ dependencies = [ "base64 0.21.7", "bytes", "headers-core", - "http 1.1.0", + "http 1.2.0", "httpdate", "mime", "sha1", @@ -2543,7 +2156,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" dependencies = [ - "http 1.1.0", + "http 1.2.0", ] [[package]] @@ -2578,9 +2191,9 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hickory-proto" -version = "0.24.1" +version = "0.24.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07698b8420e2f0d6447a436ba999ec85d8fbf2a398bbd737b82cac4a2e96e512" +checksum = "92652067c9ce6f66ce53cc38d1169daa36e6e7eb7dd3b63b5103bd9d97117248" dependencies = [ "async-trait", "cfg-if", @@ -2589,11 +2202,11 @@ dependencies = [ "futures-channel", "futures-io", "futures-util", - "idna 0.4.0", + "idna", "ipnet", "once_cell", "rand 0.8.5", - "thiserror 1.0.61", + "thiserror 1.0.69", "tinyvec", "tokio", "tracing", @@ -2602,9 +2215,9 @@ dependencies = [ [[package]] name = "hickory-resolver" -version = "0.24.1" +version = "0.24.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28757f23aa75c98f254cf0405e6d8c25b831b32921b050a66692427679b1f243" +checksum = "cbb117a1ca520e111743ab2f6688eddee69db4e0ea242545a604dce8a66fd22e" dependencies = [ "cfg-if", "futures-util", @@ -2616,7 +2229,7 @@ dependencies = [ "rand 0.8.5", "resolv-conf", "smallvec", - "thiserror 1.0.61", + "thiserror 1.0.69", "tokio", "tracing", ] @@ -2632,11 +2245,11 @@ dependencies = [ [[package]] name = "home" -version = "0.5.9" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2663,9 +2276,9 @@ dependencies = [ [[package]] name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -2685,38 +2298,38 @@ dependencies = [ [[package]] name = "http-body" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.1.0", + "http 1.2.0", ] [[package]] name = "http-body-util" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", - "futures-core", - "http 1.1.0", - "http-body 1.0.0", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", "pin-project-lite", ] [[package]] name = "http-range-header" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08a397c49fec283e3d6211adbe480be95aae5f304cfb923e9970e08956d5168a" +checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" [[package]] name = "httparse" -version = "1.8.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "f2d708df4e7140240a16cd6ab0ab65c972d7433ab77819ea693fde9c43811e2a" [[package]] name = "httpdate" @@ -2724,35 +2337,6 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" -[[package]] -name = "httpmock" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08ec9586ee0910472dec1a1f0f8acf52f0fdde93aea74d70d4a3107b4be0fd5b" -dependencies = [ - "assert-json-diff", - "async-object-pool", - "async-std", - "async-trait", - "base64 0.21.7", - "basic-cookies", - "crossbeam-utils", - "form_urlencoded", - "futures-util", - "hyper 0.14.29", - "isahc", - "lazy_static", - "levenshtein", - "log", - "regex", - "serde", - "serde_json", - "serde_regex", - "similar", - "tokio", - "url", -] - [[package]] name = "humantime" version = "2.1.0" @@ -2761,9 +2345,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.29" +version = "0.14.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f361cde2f109281a220d4307746cdfd5ee3f410da58a70377762396775634b33" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" dependencies = [ "bytes", "futures-channel", @@ -2776,7 +2360,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.5.7", + "socket2", "tokio", "tower-service", "tracing", @@ -2785,16 +2369,16 @@ dependencies = [ [[package]] name = "hyper" -version = "1.4.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.5", - "http 1.1.0", - "http-body 1.0.0", + "h2 0.4.8", + "http 1.2.0", + "http-body 1.0.1", "httparse", "httpdate", "itoa", @@ -2811,7 +2395,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73b7d8abf35697b81a825e386fc151e0d503e8cb5fcb93cc8669c376dfd6f278" dependencies = [ "hex", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "pin-project-lite", "tokio", @@ -2827,7 +2411,7 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper 0.14.29", + "hyper 0.14.32", "log", "rustls 0.21.12", "rustls-native-certs 0.6.3", @@ -2837,22 +2421,21 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.27.2" +version = "0.27.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" +checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" dependencies = [ "futures-util", - "http 1.1.0", - "hyper 1.4.1", + "http 1.2.0", + "hyper 1.6.0", "hyper-util", - "log", - "rustls 0.23.12", - "rustls-native-certs 0.7.1", + "rustls 0.23.23", + "rustls-native-certs 0.8.1", "rustls-pki-types", "tokio", - "tokio-rustls 0.26.0", + "tokio-rustls 0.26.1", "tower-service", - "webpki-roots 0.26.3", + "webpki-roots 0.26.8", ] [[package]] @@ -2861,7 +2444,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "pin-project-lite", "tokio", @@ -2875,7 +2458,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper 0.14.29", + "hyper 0.14.32", "native-tls", "tokio", "tokio-native-tls", @@ -2889,7 +2472,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "native-tls", "tokio", @@ -2906,11 +2489,11 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "hyper 1.4.1", + "http 1.2.0", + "http-body 1.0.1", + "hyper 1.6.0", "pin-project-lite", - "socket2 0.5.7", + "socket2", "tokio", "tower-service", "tracing", @@ -2924,7 +2507,7 @@ checksum = "986c5ce3b994526b3cd75578e62554abd09f0899d6206de48b3e96ab34ccc8c7" dependencies = [ "hex", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "pin-project-lite", "tokio", @@ -2933,9 +2516,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.60" +version = "0.1.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -2955,29 +2538,148 @@ dependencies = [ ] [[package]] -name = "ident_case" -version = "1.0.1" +name = "icu_collections" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] [[package]] -name = "idna" -version = "0.4.0" +name = "icu_locid" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", ] [[package]] -name = "idna" -version = "0.5.0" +name = "icu_locid_transform" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", ] [[package]] @@ -3009,21 +2711,15 @@ dependencies = [ [[package]] name = "impl-trait-for-tuples" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.98", ] -[[package]] -name = "indenter" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" - [[package]] name = "indexmap" version = "1.9.3" @@ -3037,12 +2733,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.6" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" dependencies = [ "equivalent", - "hashbrown 0.14.5", + "hashbrown 0.15.2", "serde", ] @@ -3054,50 +2750,30 @@ checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5" [[package]] name = "inout" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" dependencies = [ "generic-array", ] -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi 0.3.9", - "libc", - "windows-sys 0.48.0", -] - [[package]] name = "ipconfig" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2 0.5.7", + "socket2", "widestring", "windows-sys 0.48.0", - "winreg 0.50.0", + "winreg", ] [[package]] name = "ipnet" -version = "2.9.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "ipnetwork" @@ -3116,7 +2792,7 @@ checksum = "e19b23d53f35ce9f56aebc7d1bb4e6ac1e9c0db7ac85c8d1760c04379edced37" dependencies = [ "hermit-abi 0.4.0", "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3126,62 +2802,45 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] -name = "isahc" -version = "1.7.2" +name = "itertools" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" dependencies = [ - "async-channel 1.9.0", - "castaway", - "crossbeam-utils", - "curl", - "curl-sys", - "encoding_rs", - "event-listener 2.5.3", - "futures-lite 1.13.0", - "http 0.2.12", - "log", - "mime", - "once_cell", - "polling 2.8.0", - "slab", - "sluice", - "tracing", - "tracing-futures", - "url", - "waker-fn", + "either", ] [[package]] name = "itertools" -version = "0.11.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] [[package]] -name = "itoa" -version = "1.0.11" +name = "itertools" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] [[package]] -name = "jobserver" -version = "0.1.32" +name = "itoa" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" -dependencies = [ - "libc", -] +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "js-sys" -version = "0.3.69" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" dependencies = [ + "once_cell", "wasm-bindgen", ] @@ -3194,46 +2853,6 @@ dependencies = [ "cpufeatures", ] -[[package]] -name = "kv-log-macro" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" -dependencies = [ - "log", -] - -[[package]] -name = "lalrpop" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cb077ad656299f160924eb2912aa147d7339ea7d69e1b5517326fdcec3c1ca" -dependencies = [ - "ascii-canvas", - "bit-set", - "ena", - "itertools", - "lalrpop-util", - "petgraph", - "pico-args", - "regex", - "regex-syntax 0.8.3", - "string_cache", - "term", - "tiny-keccak", - "unicode-xid", - "walkdir", -] - -[[package]] -name = "lalrpop-util" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" -dependencies = [ - "regex-automata 0.4.6", -] - [[package]] name = "lambdaworks-crypto" version = "0.10.0" @@ -3258,53 +2877,21 @@ dependencies = [ [[package]] name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - -[[package]] -name = "levenshtein" -version = "1.0.5" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.169" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" - -[[package]] -name = "libloading" -version = "0.8.5" +version = "0.2.170" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" -dependencies = [ - "cfg-if", - "windows-targets 0.52.6", -] +checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828" [[package]] name = "libm" -version = "0.2.8" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" - -[[package]] -name = "libnghttp2-sys" -version = "0.1.10+1.61.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "959c25552127d2e1fa72f0e52548ec04fc386e827ba71a7bd01db46a447dc135" -dependencies = [ - "cc", - "libc", -] +checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" [[package]] name = "libredox" @@ -3312,15 +2899,15 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.8.0", "libc", ] [[package]] name = "libz-sys" -version = "1.1.18" +version = "1.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c15da26e5af7e25c90b37a2d75cdbf940cf4a55316de9d84c679c9b8bfabf82e" +checksum = "df9b68e50e6e0b26f672573834882eb57759f6db9b3be2ea3c35c91188bb4eaa" dependencies = [ "cc", "libc", @@ -3336,15 +2923,15 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.3.8" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] -name = "linux-raw-sys" -version = "0.4.14" +name = "litemap" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" [[package]] name = "lock_api" @@ -3364,12 +2951,9 @@ checksum = "9374ef4228402d4b7e403e5838cb880d9ee663314b0a900d5a6aabf0c213552e" [[package]] name = "log" -version = "0.4.21" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" -dependencies = [ - "value-bag", -] +checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" [[package]] name = "loom" @@ -3441,9 +3025,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.2" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "migrations_internals" @@ -3474,27 +3058,30 @@ checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "mime_guess" -version = "2.0.4" +version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" dependencies = [ "mime", "unicase", ] [[package]] -name = "minimal-lexical" -version = "0.2.1" +name = "miniz_oxide" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +dependencies = [ + "adler", +] [[package]] name = "miniz_oxide" -version = "0.7.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae" +checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" dependencies = [ - "adler", + "adler2", ] [[package]] @@ -3520,23 +3107,17 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "mirai-annotations" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" - [[package]] name = "moka" version = "0.12.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9321642ca94a4282428e6ea4af8cc2ca4eac48ac7a6a4ea8f33f76d0ce70926" dependencies = [ - "async-lock 3.4.0", + "async-lock", "crossbeam-channel", "crossbeam-epoch", "crossbeam-utils", - "event-listener 5.3.1", + "event-listener", "futures-util", "loom", "parking_lot", @@ -3544,8 +3125,8 @@ dependencies = [ "rustc_version", "smallvec", "tagptr", - "thiserror 1.0.61", - "uuid 1.8.0", + "thiserror 1.0.69", + "uuid 1.15.0", ] [[package]] @@ -3568,9 +3149,9 @@ dependencies = [ [[package]] name = "native-tls" -version = "0.2.12" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" dependencies = [ "libc", "log", @@ -3578,33 +3159,17 @@ dependencies = [ "openssl-probe", "openssl-sys", "schannel", - "security-framework", + "security-framework 2.11.1", "security-framework-sys", "tempfile", ] -[[package]] -name = "new_debug_unreachable" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" - [[package]] name = "no-std-compat" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c" -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - [[package]] name = "nonzero_ext" version = "0.3.0" @@ -3667,23 +3232,32 @@ dependencies = [ [[package]] name = "num_enum" -version = "0.5.11" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" +checksum = "4e613fc340b2220f734a8595782c551f1250e969d87d3be1ae0579e8d4065179" dependencies = [ "num_enum_derive", ] [[package]] name = "num_enum_derive" -version = "0.5.11" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" +checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ - "proc-macro-crate 1.3.1", + "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.98", +] + +[[package]] +name = "num_threads" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" +dependencies = [ + "libc", ] [[package]] @@ -3697,17 +3271,17 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" [[package]] name = "openssl" -version = "0.10.64" +version = "0.10.71" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" +checksum = "5e14130c6a98cd258fdcb0fb6d744152343ff729cbfcb28c656a9d12b999fbcd" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.8.0", "cfg-if", "foreign-types", "libc", @@ -3724,20 +3298,20 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] name = "openssl-probe" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-sys" -version = "0.9.102" +version = "0.9.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" +checksum = "8bb61ea9811cc39e3c2069f40b8b8e2e70d8569b361f879786cc7ed48b777cdd" dependencies = [ "cc", "libc", @@ -3756,7 +3330,7 @@ dependencies = [ "js-sys", "once_cell", "pin-project-lite", - "thiserror 1.0.61", + "thiserror 1.0.69", ] [[package]] @@ -3779,12 +3353,12 @@ checksum = "29e1f9c8b032d4f635c730c0efcf731d5e2530ea13fa8bef7939ddc8420696bd" dependencies = [ "async-trait", "futures-core", - "http 1.1.0", + "http 1.2.0", "opentelemetry", "opentelemetry-proto", "opentelemetry_sdk", "prost", - "thiserror 1.0.61", + "thiserror 1.0.69", "tokio", "tonic", ] @@ -3823,7 +3397,7 @@ dependencies = [ "percent-encoding", "rand 0.8.5", "serde_json", - "thiserror 1.0.61", + "thiserror 1.0.69", "tokio", "tokio-stream", ] @@ -3846,43 +3420,39 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" -[[package]] -name = "owo-colors" -version = "3.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" - [[package]] name = "parity-scale-codec" -version = "3.6.12" +version = "3.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" +checksum = "c9fde3d0718baf5bc92f577d652001da0f8d54cd03a7974e118d04fc888dc23d" dependencies = [ "arrayvec", "bitvec", "byte-slice-cast", + "const_format", "impl-trait-for-tuples", "parity-scale-codec-derive", + "rustversion", "serde", ] [[package]] name = "parity-scale-codec-derive" -version = "3.6.12" +version = "3.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" +checksum = "581c837bb6b9541ce7faa9377c20616e4fb7650f6b0f68bc93c827ee504fb7b3" dependencies = [ - "proc-macro-crate 3.1.0", + "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.98", ] [[package]] name = "parking" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" @@ -3902,7 +3472,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.1", + "redox_syscall", "smallvec", "windows-targets 0.52.6", ] @@ -3915,7 +3485,7 @@ checksum = "914a1c2265c98e2446911282c6ac86d8524f495792c38c5bd884f80499c7538a" dependencies = [ "parse-display-derive", "regex", - "regex-syntax 0.8.3", + "regex-syntax 0.8.5", ] [[package]] @@ -3927,9 +3497,9 @@ dependencies = [ "proc-macro2", "quote", "regex", - "regex-syntax 0.8.3", + "regex-syntax 0.8.5", "structmeta", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -3953,24 +3523,14 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" -[[package]] -name = "petgraph" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" -dependencies = [ - "fixedbitset", - "indexmap 2.2.6", -] - [[package]] name = "phf" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ "phf_macros", - "phf_shared 0.11.2", + "phf_shared", ] [[package]] @@ -3979,7 +3539,7 @@ version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ - "phf_shared 0.11.2", + "phf_shared", "rand 0.8.5", ] @@ -3990,61 +3550,46 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" dependencies = [ "phf_generator", - "phf_shared 0.11.2", + "phf_shared", "proc-macro2", "quote", - "syn 2.0.87", -] - -[[package]] -name = "phf_shared" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" -dependencies = [ - "siphasher", + "syn 2.0.98", ] [[package]] name = "phf_shared" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ "siphasher", ] -[[package]] -name = "pico-args" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" - [[package]] name = "pin-project" -version = "1.1.5" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +checksum = "dfe2e71e1471fe07709406bf725f710b02927c9c54b2b5b2ec0e8087d97c327d" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.5" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +checksum = "f6e859e6e5bd50440ab63c47e3ebabc90f26251f7c73c3d3e837b74a1cc3fa67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" @@ -4052,83 +3597,41 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" -[[package]] -name = "piper" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae1d5c74c9876f070d3e8fd503d748c7d974c3e48da8f41350fa5222ef9b4391" -dependencies = [ - "atomic-waker", - "fastrand 2.1.0", - "futures-io", -] - [[package]] name = "pkg-config" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" - -[[package]] -name = "polling" -version = "2.8.0" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" -dependencies = [ - "autocfg", - "bitflags 1.3.2", - "cfg-if", - "concurrent-queue", - "libc", - "log", - "pin-project-lite", - "windows-sys 0.48.0", -] - -[[package]] -name = "polling" -version = "3.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3ed00ed3fbf728b5816498ecd316d1716eecaced9c0c8d2c5a6740ca214985b" -dependencies = [ - "cfg-if", - "concurrent-queue", - "hermit-abi 0.4.0", - "pin-project-lite", - "rustix 0.38.34", - "tracing", - "windows-sys 0.52.0", -] +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "portable-atomic" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6" +checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" [[package]] name = "postgres-protocol" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49b6c5ef183cd3ab4ba005f1ca64c21e8bd97ce4699cfea9e8d9a2c4958ca520" +checksum = "76ff0abab4a9b844b93ef7b81f1efc0a366062aaef2cd702c76256b5dc075c54" dependencies = [ - "base64 0.21.7", + "base64 0.22.1", "byteorder", "bytes", "fallible-iterator", "hmac", "md-5", "memchr", - "rand 0.8.5", + "rand 0.9.0", "sha2", "stringprep", ] [[package]] name = "postgres-types" -version = "0.2.6" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d2234cdee9408b523530a9b6d2d6b373d1db34f6a8e51dc03ded1828d7fb67c" +checksum = "613283563cd90e1dfc3518d548caee47e0e725455ed619881f5cf21f36de4b48" dependencies = [ "bytes", "fallible-iterator", @@ -4143,9 +3646,12 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy 0.7.35", +] [[package]] name = "pq-sys" @@ -4158,48 +3664,33 @@ dependencies = [ [[package]] name = "pragma-common" -version = "0.1.0" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "246c2fc24dd4124b73552e456204b79c444f6bd61d14bf501b6b8ba19c3cca4c" dependencies = [ - "axum 0.8.1", - "bigdecimal", + "anyhow", + "async-trait", "cainome", - "chrono", - "color-eyre", - "deadpool-diesel", - "futures-util", - "indexmap 2.2.6", + "capnp", + "capnpc", + "futures", + "indexmap 2.7.1", "opentelemetry", "opentelemetry-appender-tracing", "opentelemetry-otlp", "opentelemetry-semantic-conventions", "opentelemetry_sdk", - "rstest", "serde", "serde_json", - "starknet 0.12.0", + "starknet 0.14.0", "starknet-crypto", - "strum 0.26.3", - "thiserror 1.0.61", + "thiserror 2.0.12", "tokio", + "tokio-util", "tracing", "tracing-opentelemetry", "tracing-subscriber", "utoipa", - "uuid 1.8.0", -] - -[[package]] -name = "pragma-consumer" -version = "0.1.1" -dependencies = [ - "httpmock", - "pragma-common", - "reqwest 0.12.5", - "rstest", - "serde_json", - "starknet 0.12.0", - "thiserror 1.0.61", - "tokio", ] [[package]] @@ -4207,35 +3698,38 @@ name = "pragma-entities" version = "0.1.0" dependencies = [ "axum 0.8.1", - "bigdecimal", + "bigdecimal 0.4.7", "chrono", "deadpool-diesel", "diesel", "diesel_migrations", "pragma-common", - "redis", "serde", "serde_json", - "starknet 0.12.0", - "thiserror 1.0.61", + "starknet 0.14.0", + "thiserror 2.0.12", "utoipa", - "uuid 1.8.0", + "uuid 1.15.0", ] [[package]] name = "pragma-ingestor" version = "0.1.0" dependencies = [ + "chrono", + "clap", "deadpool-diesel", "dotenvy", "envy", + "faucon-rs", + "futures-util", "lazy_static", "pragma-common", "pragma-entities", "rdkafka", "serde", "serde_json", - "thiserror 1.0.61", + "thiserror 2.0.12", "tokio", "tracing", ] @@ -4248,7 +3742,7 @@ dependencies = [ "arc-swap", "axum 0.6.20", "axum-macros 0.3.8", - "bigdecimal", + "bigdecimal 0.4.7", "chrono", "deadpool", "diesel", @@ -4258,7 +3752,7 @@ dependencies = [ "dotenv", "env_logger", "futures", - "hyper 0.14.29", + "hyper 0.14.32", "lazy_static", "moka", "num-bigint", @@ -4270,12 +3764,12 @@ dependencies = [ "serde_json", "starknet 0.11.0", "strum 0.25.0", - "thiserror 2.0.11", + "thiserror 2.0.12", "tokio", "tracing", "tracing-subscriber", "url", - "uuid 1.8.0", + "uuid 1.15.0", ] [[package]] @@ -4289,8 +3783,8 @@ dependencies = [ "axum-extra", "axum-macros 0.5.0", "axum-tracing-opentelemetry", - "bigdecimal", - "cainome", + "bigdecimal 0.3.1", + "bigdecimal 0.4.7", "chrono", "crossterm", "dashmap 6.1.0", @@ -4300,8 +3794,8 @@ dependencies = [ "envy", "futures", "futures-util", + "google-secretmanager1", "governor", - "indexmap 2.2.6", "lazy_static", "moka", "nonzero_ext", @@ -4311,38 +3805,31 @@ dependencies = [ "pragma-monitoring", "ratatui", "rdkafka", - "redis", - "reqwest 0.12.5", "rstest", "serde", "serde_json", - "starknet 0.12.0", + "starknet 0.14.0", "starknet-crypto", "strum 0.26.3", - "thiserror 1.0.61", + "thiserror 2.0.12", "tokio", "tokio-stream", "tokio-tungstenite 0.20.1", + "tokio-util", "tower-http", "tracing", "url", "utoipa", "utoipa-swagger-ui", "utoipauto", - "uuid 1.8.0", + "uuid 1.15.0", ] -[[package]] -name = "precomputed-hash" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" - [[package]] name = "pretty_assertions" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ "diff", "yansi", @@ -4350,12 +3837,12 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.20" +version = "0.2.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e" +checksum = "6924ced06e1f7dfe3fa48d57b9f74f55d8915f5036121bef647ef4b204895fac" dependencies = [ "proc-macro2", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -4363,31 +3850,21 @@ name = "primitive-types" version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" -dependencies = [ - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "uint", -] - -[[package]] -name = "proc-macro-crate" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" -dependencies = [ - "once_cell", - "toml_edit 0.19.15", +dependencies = [ + "fixed-hash", + "impl-codec", + "impl-rlp", + "impl-serde", + "uint", ] [[package]] name = "proc-macro-crate" -version = "3.1.0" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ - "toml_edit 0.21.1", + "toml_edit 0.22.24", ] [[package]] @@ -4416,9 +3893,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.89" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] @@ -4429,11 +3906,11 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "731e0d9356b0c25f16f33b5be79b1c57b562f141ebfcdb0ad8ac2c13a24293b4" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.8.0", "hex", "lazy_static", "procfs-core", - "rustix 0.38.34", + "rustix", ] [[package]] @@ -4442,7 +3919,7 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d3554923a69f4ce04c4a754260c338f505ce22642d3830e049a399fc2059a29" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.8.0", "hex", ] @@ -4460,14 +3937,14 @@ dependencies = [ "parking_lot", "procfs", "protobuf", - "thiserror 1.0.61", + "thiserror 1.0.69", ] [[package]] name = "prost" -version = "0.13.3" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b0487d90e047de87f984913713b85c601c05609aad5b0df4b4573fbf69aa13f" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" dependencies = [ "bytes", "prost-derive", @@ -4475,15 +3952,15 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.13.3" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9552f850d5f0964a4e4d0bf306459ac29323ddfbae05e35a7c0d35cb0803cc5" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" dependencies = [ "anyhow", - "itertools", + "itertools 0.14.0", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -4515,55 +3992,61 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quinn" -version = "0.11.2" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4ceeeeabace7857413798eb1ffa1e9c905a9946a57d81fb69b4b71c4d8eb3ad" +checksum = "62e96808277ec6f97351a2380e6c25114bc9e67037775464979f3037c92d05ef" dependencies = [ "bytes", "pin-project-lite", "quinn-proto", "quinn-udp", "rustc-hash", - "rustls 0.23.12", - "thiserror 1.0.61", + "rustls 0.23.23", + "socket2", + "thiserror 2.0.12", "tokio", "tracing", ] [[package]] name = "quinn-proto" -version = "0.11.3" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddf517c03a109db8100448a4be38d498df8a210a99fe0e1b9eaf39e78c640efe" +checksum = "a2fe5ef3495d7d2e377ff17b1a8ce2ee2ec2a18cde8b6ad6619d65d0701c135d" dependencies = [ "bytes", + "getrandom 0.2.15", "rand 0.8.5", "ring", "rustc-hash", - "rustls 0.23.12", + "rustls 0.23.23", + "rustls-pki-types", "slab", - "thiserror 1.0.61", + "thiserror 2.0.12", "tinyvec", "tracing", + "web-time", ] [[package]] name = "quinn-udp" -version = "0.5.3" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25a78e6f726d84fcf960409f509ae354a32648f090c8d32a2ea8b1a1bc3bab14" +checksum = "e46f3055866785f6b92bc6164b76be02ca8f2eb4b002c0354b28cf4c119e5944" dependencies = [ + "cfg_aliases", "libc", "once_cell", - "socket2 0.5.7", - "windows-sys 0.52.0", + "socket2", + "tracing", + "windows-sys 0.59.0", ] [[package]] name = "quote" -version = "1.0.37" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ "proc-macro2", ] @@ -4604,7 +4087,7 @@ checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" dependencies = [ "rand_chacha 0.9.0", "rand_core 0.9.2", - "zerocopy", + "zerocopy 0.8.20", ] [[package]] @@ -4643,7 +4126,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a509b1a2ffbe92afab0e55c8fd99dea1c280e8171bd2d88682bb20bc41cbc2c" dependencies = [ "getrandom 0.3.1", - "zerocopy", + "zerocopy 0.8.20", ] [[package]] @@ -4652,11 +4135,11 @@ version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ebc917cfb527a566c37ecb94c7e3fd098353516fb4eb6bea17015ade0182425" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.8.0", "cassowary", "crossterm", "indoc", - "itertools", + "itertools 0.11.0", "lru", "paste", "strum 0.25.0", @@ -4666,18 +4149,18 @@ dependencies = [ [[package]] name = "raw-cpuid" -version = "11.3.0" +version = "11.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6928fa44c097620b706542d428957635951bade7143269085389d42c8a4927e" +checksum = "529468c1335c1c03919960dfefdb1b3648858c20d7ec2d0663e728e4a717efbc" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.8.0", ] [[package]] name = "rdkafka" -version = "0.36.2" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1beea247b9a7600a81d4cc33f659ce1a77e1988323d7d2809c7ed1c21f4c316d" +checksum = "14b52c81ac3cac39c9639b95c20452076e74b8d9a71bc6fc4d83407af2ea6fff" dependencies = [ "futures-channel", "futures-util", @@ -4693,9 +4176,9 @@ dependencies = [ [[package]] name = "rdkafka-sys" -version = "4.7.0+2.3.0" +version = "4.8.0+2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55e0d2f9ba6253f6ec72385e453294f8618e9e15c2c6aba2a5c01ccf9622d615" +checksum = "ced38182dc436b3d9df0c77976f37a67134df26b050df1f0006688e46fc4c8be" dependencies = [ "libc", "libz-sys", @@ -4703,70 +4186,36 @@ dependencies = [ "pkg-config", ] -[[package]] -name = "redis" -version = "0.26.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e902a69d09078829137b4a5d9d082e0490393537badd7c91a3d69d14639e115f" -dependencies = [ - "arc-swap", - "async-trait", - "bytes", - "combine", - "futures-util", - "itoa", - "num-bigint", - "percent-encoding", - "pin-project-lite", - "ryu", - "serde", - "serde_json", - "sha1_smol", - "socket2 0.5.7", - "tokio", - "tokio-util", - "url", -] - -[[package]] -name = "redox_syscall" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_syscall" -version = "0.5.1" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" +checksum = "82b568323e98e49e2a0899dcee453dd679fae22d69adf9b11dd508d1549b7e2f" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.8.0", ] [[package]] name = "redox_users" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ "getrandom 0.2.15", "libredox", - "thiserror 1.0.61", + "thiserror 1.0.69", ] [[package]] name = "regex" -version = "1.10.4" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.6", - "regex-syntax 0.8.3", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", ] [[package]] @@ -4780,13 +4229,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.6" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.3", + "regex-syntax 0.8.5", ] [[package]] @@ -4803,9 +4252,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "relative-path" @@ -4827,7 +4276,7 @@ dependencies = [ "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.29", + "hyper 0.14.32", "hyper-rustls 0.24.2", "hyper-tls 0.5.0", "ipnet", @@ -4844,7 +4293,7 @@ dependencies = [ "serde_json", "serde_urlencoded", "sync_wrapper 0.1.2", - "system-configuration", + "system-configuration 0.5.1", "tokio", "tokio-native-tls", "tokio-rustls 0.24.1", @@ -4854,14 +4303,14 @@ dependencies = [ "wasm-bindgen-futures", "web-sys", "webpki-roots 0.25.4", - "winreg 0.50.0", + "winreg", ] [[package]] name = "reqwest" -version = "0.12.5" +version = "0.12.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7d6d2a27d57148378eb5e111173f4276ad26340ecc5c49a4a2152167a2d6a37" +checksum = "43e734407157c3c2034e0258f5e4473ddb361b1e85f95a66690d67264d7cd1da" dependencies = [ "base64 0.22.1", "bytes", @@ -4869,13 +4318,13 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2 0.4.5", + "h2 0.4.8", "hickory-resolver", - "http 1.1.0", - "http-body 1.0.0", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", - "hyper 1.4.1", - "hyper-rustls 0.27.2", + "hyper 1.6.0", + "hyper-rustls 0.27.5", "hyper-tls 0.6.0", "hyper-util", "ipnet", @@ -4887,25 +4336,26 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.12", - "rustls-native-certs 0.7.1", - "rustls-pemfile 2.1.2", + "rustls 0.23.23", + "rustls-native-certs 0.8.1", + "rustls-pemfile 2.2.0", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.1", - "system-configuration", + "sync_wrapper 1.0.2", + "system-configuration 0.6.1", "tokio", "tokio-native-tls", - "tokio-rustls 0.26.0", + "tokio-rustls 0.26.1", + "tower 0.5.2", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "webpki-roots 0.26.3", - "winreg 0.52.0", + "webpki-roots 0.26.8", + "windows-registry", ] [[package]] @@ -4936,15 +4386,14 @@ dependencies = [ [[package]] name = "ring" -version = "0.17.8" +version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +checksum = "da5349ae27d3887ca812fb375b45a4fbb36d8d12d2df394968cd86e35683fe73" dependencies = [ "cc", "cfg-if", "getrandom 0.2.15", "libc", - "spin", "untrusted", "windows-sys 0.52.0", ] @@ -4984,15 +4433,15 @@ dependencies = [ "regex", "relative-path", "rustc_version", - "syn 2.0.87", + "syn 2.0.98", "unicode-ident", ] [[package]] name = "rust-embed" -version = "8.4.0" +version = "8.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19549741604902eb99a7ed0ee177a0663ee1eda51a29f71401f166e47e77806a" +checksum = "0b3aba5104622db5c9fc61098de54708feb732e7763d7faa2fa625899f00bf6f" dependencies = [ "rust-embed-impl", "rust-embed-utils", @@ -5001,22 +4450,22 @@ dependencies = [ [[package]] name = "rust-embed-impl" -version = "8.4.0" +version = "8.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb9f96e283ec64401f30d3df8ee2aaeb2561f34c824381efa24a35f79bf40ee4" +checksum = "1f198c73be048d2c5aa8e12f7960ad08443e56fd39cc26336719fdb4ea0ebaae" dependencies = [ "proc-macro2", "quote", "rust-embed-utils", - "syn 2.0.87", + "syn 2.0.98", "walkdir", ] [[package]] name = "rust-embed-utils" -version = "8.4.0" +version = "8.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38c74a686185620830701348de757fd36bef4aa9680fd23c49fc539ddcc1af32" +checksum = "5a2fcdc9f40c8dc2922842ca9add611ad19f332227fc651d015881ad1552bd9a" dependencies = [ "sha2", "walkdir", @@ -5030,9 +4479,9 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" -version = "1.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustc-hex" @@ -5042,38 +4491,24 @@ checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" [[package]] name = "rustc_version" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver", ] [[package]] name = "rustix" -version = "0.37.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", -] - -[[package]] -name = "rustix" -version = "0.38.34" +version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.8.0", "errno", "libc", - "linux-raw-sys 0.4.14", - "windows-sys 0.52.0", + "linux-raw-sys", + "windows-sys 0.59.0", ] [[package]] @@ -5090,16 +4525,14 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.12" +version = "0.23.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c58f8c84392efc0a126acce10fa59ff7b3d2ac06ab451a33f2741989b806b044" +checksum = "47796c98c480fce5406ef69d1c76378375492c3b0a0de587be0c1d9feb12f395" dependencies = [ - "aws-lc-rs", - "log", "once_cell", "ring", "rustls-pki-types", - "rustls-webpki 0.102.6", + "rustls-webpki 0.102.8", "subtle", "zeroize", ] @@ -5113,20 +4546,32 @@ dependencies = [ "openssl-probe", "rustls-pemfile 1.0.4", "schannel", - "security-framework", + "security-framework 2.11.1", ] [[package]] name = "rustls-native-certs" -version = "0.7.1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.2.0", + "rustls-pki-types", + "schannel", + "security-framework 2.11.1", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a88d6d420651b496bdd98684116959239430022a115c1240e6c3993be0b15fba" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" dependencies = [ "openssl-probe", - "rustls-pemfile 2.1.2", "rustls-pki-types", "schannel", - "security-framework", + "security-framework 3.2.0", ] [[package]] @@ -5140,19 +4585,21 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" dependencies = [ - "base64 0.22.1", "rustls-pki-types", ] [[package]] name = "rustls-pki-types" -version = "1.7.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" +checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" +dependencies = [ + "web-time", +] [[package]] name = "rustls-webpki" @@ -5166,11 +4613,10 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.102.6" +version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e6b52d4fda176fd835fdc55a835d4a89b8499cad995885a21149d5ad62f852e" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ - "aws-lc-rs", "ring", "rustls-pki-types", "untrusted", @@ -5178,15 +4624,15 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.17" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" [[package]] name = "ryu" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" [[package]] name = "salsa20" @@ -5217,11 +4663,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.23" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -5235,12 +4681,11 @@ dependencies = [ [[package]] name = "scoped-futures" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1473e24c637950c9bd38763220bea91ec3e095a89f672bbd7a10d03e77ba467" +checksum = "1b24aae2d0636530f359e9d5ef0c04669d11c5e756699b27a6a6d845d8329091" dependencies = [ - "cfg-if", - "pin-utils", + "pin-project-lite", ] [[package]] @@ -5283,14 +4728,33 @@ version = "3.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b07779b9b918cc05650cb30f404d4d7835d26df37c235eded8a6832e2fb82cca" +[[package]] +name = "seahash" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" + [[package]] name = "security-framework" -version = "2.11.0" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.5.0", - "core-foundation", + "bitflags 2.8.0", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" +dependencies = [ + "bitflags 2.8.0", + "core-foundation 0.10.0", "core-foundation-sys", "libc", "security-framework-sys", @@ -5298,9 +4762,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.11.0" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" dependencies = [ "core-foundation-sys", "libc", @@ -5308,35 +4772,48 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.23" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +checksum = "f79dfe2d285b0488816f30e700a7438c5a73d816b5b7d3ac72fbc48b0d185e03" [[package]] name = "serde" -version = "1.0.204" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" +checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.204" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" +checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", +] + +[[package]] +name = "serde_html_form" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d2de91cf02bbc07cde38891769ccd5d4f073d22a40683aa4bc7a95781aaa2c4" +dependencies = [ + "form_urlencoded", + "indexmap 2.7.1", + "itoa", + "ryu", + "serde", ] [[package]] name = "serde_json" -version = "1.0.138" +version = "1.0.139" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" +checksum = "44f86c3acccc9c65b153fe1b85a3be07fe5515274ec9f0653b4a0875731c72a6" dependencies = [ "itoa", "memchr", @@ -5365,16 +4842,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_regex" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8136f1a4ea815d7eac4101cfd0b16dc0cb5e1fe1b8609dfd728058656b7badf" -dependencies = [ - "regex", - "serde", -] - [[package]] name = "serde_repr" version = "0.1.19" @@ -5383,14 +4850,14 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] name = "serde_spanned" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" dependencies = [ "serde", ] @@ -5425,19 +4892,19 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.9.0" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cecfa94848272156ea67b2b1a53f20fc7bc638c4a46d2f8abde08f05f4b857" +checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa" dependencies = [ "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.2.6", + "indexmap 2.7.1", "serde", "serde_derive", "serde_json", - "serde_with_macros 3.9.0", + "serde_with_macros 3.12.0", "time", ] @@ -5450,19 +4917,19 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] name = "serde_with_macros" -version = "3.9.0" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8fee4991ef4f274617a51ad4af30519438dacb2f56ac773b08a1922ff743350" +checksum = "8d00caa5193a3c8362ac2b73be6b9e768aa5a4b2f721d8f4b339600c3cb51f8e" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -5487,7 +4954,7 @@ checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -5501,12 +4968,6 @@ dependencies = [ "digest", ] -[[package]] -name = "sha1_smol" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" - [[package]] name = "sha2" version = "0.10.8" @@ -5579,17 +5040,11 @@ version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" -[[package]] -name = "similar" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" - [[package]] name = "siphasher" -version = "0.3.11" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "slab" @@ -5600,38 +5055,17 @@ dependencies = [ "autocfg", ] -[[package]] -name = "sluice" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5" -dependencies = [ - "async-channel 1.9.0", - "futures-core", - "futures-io", -] - [[package]] name = "smallvec" -version = "1.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" - -[[package]] -name = "socket2" -version = "0.4.10" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" -dependencies = [ - "libc", - "winapi", -] +checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", @@ -5652,6 +5086,12 @@ dependencies = [ "lock_api", ] +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "starknet" version = "0.11.0" @@ -5669,17 +5109,34 @@ dependencies = [ [[package]] name = "starknet" -version = "0.12.0" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc9b221c99a1ea1d65fb130e5b0dbaa6d362698430232902ebeb2a898a1ab531" +dependencies = [ + "starknet-accounts 0.12.0", + "starknet-contract 0.12.0", + "starknet-core 0.12.1", + "starknet-core-derive", + "starknet-crypto", + "starknet-macros", + "starknet-providers 0.12.1", + "starknet-signers 0.10.2", +] + +[[package]] +name = "starknet" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0c9ac3809cc7630784e8c8565fa3013af819d83c97aa2720d566016d439011" +checksum = "61e2e53e7705c9a9aad7f118a4bac7386afeb8db272b3eb445a464ca4c3dfee5" dependencies = [ - "starknet-accounts 0.11.0", - "starknet-contract 0.11.0", - "starknet-core 0.12.0", + "starknet-accounts 0.13.0", + "starknet-contract 0.13.0", + "starknet-core 0.13.0", + "starknet-core-derive", "starknet-crypto", "starknet-macros", - "starknet-providers 0.12.0", - "starknet-signers 0.10.0", + "starknet-providers 0.13.0", + "starknet-signers 0.11.0", ] [[package]] @@ -5694,22 +5151,37 @@ dependencies = [ "starknet-crypto", "starknet-providers 0.11.0", "starknet-signers 0.9.0", - "thiserror 1.0.61", + "thiserror 1.0.69", ] [[package]] name = "starknet-accounts" -version = "0.11.0" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3fc4364f5684e4a5dcb100847a9ea023deae3815f45526721a6fa94ab595651" +dependencies = [ + "async-trait", + "auto_impl", + "starknet-core 0.12.1", + "starknet-crypto", + "starknet-providers 0.12.1", + "starknet-signers 0.10.2", + "thiserror 1.0.69", +] + +[[package]] +name = "starknet-accounts" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ee27ded58ade61da410fccafd57ed5429b0e79a9d62a4ae8b65818cb9d6f400" +checksum = "eca52534db01eda3bf3250f398bd4597aed3856d0d17d84070efbc7919abad71" dependencies = [ "async-trait", "auto_impl", - "starknet-core 0.12.0", + "starknet-core 0.13.0", "starknet-crypto", - "starknet-providers 0.12.0", - "starknet-signers 0.10.0", - "thiserror 1.0.61", + "starknet-providers 0.13.0", + "starknet-signers 0.11.0", + "thiserror 1.0.69", ] [[package]] @@ -5724,22 +5196,37 @@ dependencies = [ "starknet-accounts 0.10.0", "starknet-core 0.11.1", "starknet-providers 0.11.0", - "thiserror 1.0.61", + "thiserror 1.0.69", ] [[package]] name = "starknet-contract" -version = "0.11.0" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f2102b8f763477a1bdece683da51514bc73829d5dcc3bbe75ff1b6aca6d4e02" +dependencies = [ + "serde", + "serde_json", + "serde_with 3.12.0", + "starknet-accounts 0.12.0", + "starknet-core 0.12.1", + "starknet-providers 0.12.1", + "thiserror 1.0.69", +] + +[[package]] +name = "starknet-contract" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6ee5762d24c4f06ab7e9406550925df406712e73719bd2de905c879c674a87" +checksum = "67d8d5a5306527eedcb4bd70afecfc6824add631a08eac8fd1cf9c2bdfd21e77" dependencies = [ "serde", "serde_json", - "serde_with 3.9.0", - "starknet-accounts 0.11.0", - "starknet-core 0.12.0", - "starknet-providers 0.12.0", - "thiserror 1.0.61", + "serde_with 3.12.0", + "starknet-accounts 0.13.0", + "starknet-core 0.13.0", + "starknet-providers 0.13.0", + "thiserror 1.0.69", ] [[package]] @@ -5763,28 +5250,66 @@ dependencies = [ [[package]] name = "starknet-core" -version = "0.12.0" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37abf0af45a3b866dd108880ace9949ae7830f6830adb8963024302ae9e82c24" +dependencies = [ + "base64 0.21.7", + "crypto-bigint", + "flate2", + "foldhash", + "hex", + "indexmap 2.7.1", + "num-traits", + "serde", + "serde_json", + "serde_json_pythonic", + "serde_with 3.12.0", + "sha3", + "starknet-core-derive", + "starknet-crypto", + "starknet-types-core", +] + +[[package]] +name = "starknet-core" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2538240cbe6663c673fe77465f294da707080f39678dd7066761554899e46100" +checksum = "b53a16799e4a75173839d868a1a48ff5d3e10456febd4dec91b04ba6521741d5" dependencies = [ "base64 0.21.7", "crypto-bigint", "flate2", + "foldhash", "hex", + "indexmap 2.7.1", + "num-traits", "serde", "serde_json", "serde_json_pythonic", - "serde_with 3.9.0", + "serde_with 3.12.0", "sha3", + "starknet-core-derive", "starknet-crypto", "starknet-types-core", ] +[[package]] +name = "starknet-core-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b08520b7d80eda7bf1a223e8db4f9bb5779a12846f15ebf8f8d76667eca7f5ad" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", +] + [[package]] name = "starknet-crypto" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ded22ccf4cb9e572ce3f77de6066af53560cd2520d508876c83bb1e6b29d5cbc" +checksum = "039a3bad70806b494c9e6b21c5238a6c8a373d66a26071859deb0ccca6f93634" dependencies = [ "crypto-bigint", "hex", @@ -5810,12 +5335,12 @@ dependencies = [ [[package]] name = "starknet-macros" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8986a940af916fc0a034f4e42c6ba76d94f1e97216d75447693dfd7aefaf3ef2" +checksum = "bb14b6714e7625aca063e91022e574ee0bca863df98071dd7191e24919a367b0" dependencies = [ - "starknet-core 0.12.0", - "syn 2.0.87", + "starknet-core 0.13.0", + "syn 2.0.98", ] [[package]] @@ -5835,15 +5360,15 @@ dependencies = [ "serde_json", "serde_with 2.3.3", "starknet-core 0.11.1", - "thiserror 1.0.61", + "thiserror 1.0.69", "url", ] [[package]] name = "starknet-providers" -version = "0.12.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60e8e69ba7a36dea2d28333be82b4011f8784333d3ae5618482b6587c1ffb66c" +checksum = "a9256247f718564b3e4c73cc941735012691c14903fbc25cea306745bcbfa384" dependencies = [ "async-trait", "auto_impl", @@ -5854,9 +5379,30 @@ dependencies = [ "reqwest 0.11.27", "serde", "serde_json", - "serde_with 3.9.0", - "starknet-core 0.12.0", - "thiserror 1.0.61", + "serde_with 3.12.0", + "starknet-core 0.12.1", + "thiserror 1.0.69", + "url", +] + +[[package]] +name = "starknet-providers" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c74c3850a661fa1ffd3c3e2cb9db6e28c94ab9aaaa0496503014a814f09cd455" +dependencies = [ + "async-trait", + "auto_impl", + "ethereum-types", + "flate2", + "getrandom 0.2.15", + "log", + "reqwest 0.11.27", + "serde", + "serde_json", + "serde_with 3.12.0", + "starknet-core 0.13.0", + "thiserror 1.0.69", "url", ] @@ -5874,14 +5420,31 @@ dependencies = [ "rand 0.8.5", "starknet-core 0.11.1", "starknet-crypto", - "thiserror 1.0.61", + "thiserror 1.0.69", ] [[package]] name = "starknet-signers" -version = "0.10.0" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "518c7de189cd4158d45d33067c580094ad5cd2f027a90ac76d49a81c89fa37bc" +dependencies = [ + "async-trait", + "auto_impl", + "crypto-bigint", + "eth-keystore", + "getrandom 0.2.15", + "rand 0.8.5", + "starknet-core 0.12.1", + "starknet-crypto", + "thiserror 1.0.69", +] + +[[package]] +name = "starknet-signers" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70b9e01b61ae51d722e2b100d6ef913c5a2e70d1ea672733d385f7296d6055ef" +checksum = "c2aeca13b8c61165b69d4775880d74ff9bbb9bafa36a297899e0f160619631b3" dependencies = [ "async-trait", "auto_impl", @@ -5889,9 +5452,9 @@ dependencies = [ "eth-keystore", "getrandom 0.2.15", "rand 0.8.5", - "starknet-core 0.12.0", + "starknet-core 0.13.0", "starknet-crypto", - "thiserror 1.0.61", + "thiserror 1.0.69", ] [[package]] @@ -5914,19 +5477,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" -[[package]] -name = "string_cache" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" -dependencies = [ - "new_debug_unreachable", - "once_cell", - "parking_lot", - "phf_shared 0.10.0", - "precomputed-hash", -] - [[package]] name = "stringprep" version = "0.1.5" @@ -5953,7 +5503,7 @@ dependencies = [ "proc-macro2", "quote", "structmeta-derive", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -5964,7 +5514,7 @@ checksum = "152a0b65a590ff6c3da95cabe2353ee04e6167c896b28e3b14478c2636c922fc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -5995,7 +5545,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -6008,14 +5558,14 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] name = "subtle" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" @@ -6030,9 +5580,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.87" +version = "2.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" +checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" dependencies = [ "proc-macro2", "quote", @@ -6047,9 +5597,23 @@ checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "sync_wrapper" -version = "1.0.1" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", +] [[package]] name = "system-configuration" @@ -6058,8 +5622,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", - "core-foundation", - "system-configuration-sys", + "core-foundation 0.9.4", + "system-configuration-sys 0.5.0", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.8.0", + "core-foundation 0.9.4", + "system-configuration-sys 0.6.0", ] [[package]] @@ -6072,6 +5647,16 @@ dependencies = [ "libc", ] +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "tagptr" version = "0.2.0" @@ -6086,25 +5671,16 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.10.1" +version = "3.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +checksum = "22e5a0acb1f3f55f65cc4a866c361b2fb2a0ff6366785ae6fbb5f85df07ba230" dependencies = [ "cfg-if", - "fastrand 2.1.0", - "rustix 0.38.34", - "windows-sys 0.52.0", -] - -[[package]] -name = "term" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" -dependencies = [ - "dirs-next", - "rustversion", - "winapi", + "fastrand", + "getrandom 0.3.1", + "once_cell", + "rustix", + "windows-sys 0.59.0", ] [[package]] @@ -6134,11 +5710,11 @@ dependencies = [ "memchr", "parse-display", "pin-project-lite", - "reqwest 0.12.5", + "reqwest 0.12.12", "serde", "serde_json", - "serde_with 3.9.0", - "thiserror 1.0.61", + "serde_with 3.12.0", + "thiserror 1.0.69", "tokio", "tokio-stream", "tokio-util", @@ -6158,16 +5734,17 @@ dependencies = [ name = "tests" version = "0.1.0" dependencies = [ - "bigdecimal", + "bigdecimal 0.4.7", "chrono", "deadpool-diesel", "diesel", "diesel-async", "futures-util", "pragma-common", + "pragma-node", "pretty_assertions", "rand 0.9.0", - "reqwest 0.12.5", + "reqwest 0.12.12", "rstest", "serde", "serde_json", @@ -6178,47 +5755,47 @@ dependencies = [ "tracing", "tracing-subscriber", "tracing-test", - "uuid 1.8.0", + "uuid 1.15.0", ] [[package]] name = "thiserror" -version = "1.0.61" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ - "thiserror-impl 1.0.61", + "thiserror-impl 1.0.69", ] [[package]] name = "thiserror" -version = "2.0.11" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" dependencies = [ - "thiserror-impl 2.0.11", + "thiserror-impl 2.0.12", ] [[package]] name = "thiserror-impl" -version = "1.0.61" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] name = "thiserror-impl" -version = "2.0.11" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -6233,13 +5810,15 @@ dependencies = [ [[package]] name = "time" -version = "0.3.36" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", + "libc", "num-conv", + "num_threads", "powerfmt", "serde", "time-core", @@ -6254,9 +5833,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", @@ -6271,11 +5850,21 @@ dependencies = [ "crunchy", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" dependencies = [ "tinyvec_macros", ] @@ -6299,7 +5888,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.7", + "socket2", "tokio-macros", "windows-sys 0.52.0", ] @@ -6312,7 +5901,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -6327,9 +5916,9 @@ dependencies = [ [[package]] name = "tokio-postgres" -version = "0.7.10" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d340244b32d920260ae7448cb72b6e238bddc3d4f7603394e7dd46ed8e48f5b8" +checksum = "6c95d533c83082bb6490e0189acaa0bbeef9084e60471b696ca6988cd0541fb0" dependencies = [ "async-trait", "byteorder", @@ -6344,8 +5933,8 @@ dependencies = [ "pin-project-lite", "postgres-protocol", "postgres-types", - "rand 0.8.5", - "socket2 0.5.7", + "rand 0.9.0", + "socket2", "tokio", "tokio-util", "whoami", @@ -6363,12 +5952,11 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" dependencies = [ - "rustls 0.23.12", - "rustls-pki-types", + "rustls 0.23.23", "tokio", ] @@ -6399,21 +5987,21 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.26.1" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4bf6fecd69fcdede0ec680aaf474cdab988f9de6bc73d3758f0160e3b7025a" +checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084" dependencies = [ "futures-util", "log", "tokio", - "tungstenite 0.26.1", + "tungstenite 0.26.2", ] [[package]] name = "tokio-util" -version = "0.7.11" +version = "0.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +checksum = "6b9590b93e6fcc1739458317cccd391ad3955e2bde8913edf6f95f9e65a8f034" dependencies = [ "bytes", "futures-core", @@ -6436,9 +6024,9 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] @@ -6449,22 +6037,22 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.2.6", + "indexmap 2.7.1", "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.5.40", ] [[package]] name = "toml_edit" -version = "0.21.1" +version = "0.22.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" +checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" dependencies = [ - "indexmap 2.2.6", + "indexmap 2.7.1", "toml_datetime", - "winnow", + "winnow 0.7.3", ] [[package]] @@ -6478,17 +6066,17 @@ dependencies = [ "axum 0.7.9", "base64 0.22.1", "bytes", - "h2 0.4.5", - "http 1.1.0", - "http-body 1.0.0", + "h2 0.4.8", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-timeout", "hyper-util", "percent-encoding", "pin-project", "prost", - "socket2 0.5.7", + "socket2", "tokio", "tokio-stream", "tower 0.4.13", @@ -6526,7 +6114,7 @@ dependencies = [ "futures-core", "futures-util", "pin-project-lite", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tokio", "tower-layer", "tower-service", @@ -6539,11 +6127,11 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "403fa3b783d4b626a8ad51d766ab03cb6d2dbfc46b1c5d4448395e6628dc9697" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.8.0", "bytes", "futures-util", - "http 1.1.0", - "http-body 1.0.0", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", "http-range-header", "httpdate", @@ -6572,9 +6160,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -6584,45 +6172,25 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", ] -[[package]] -name = "tracing-error" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d686ec1c0f384b1277f097b2f279a2ecc11afe8c133c1aabf036a27cb4cd206e" -dependencies = [ - "tracing", - "tracing-subscriber", -] - -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "pin-project", - "tracing", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -6658,7 +6226,7 @@ version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e23ebe790a95c3105f5a7ec8e7c33884ecc8462f751f7f719d2356a3cfa122b" dependencies = [ - "http 1.1.0", + "http 1.2.0", "opentelemetry", "tracing", "tracing-opentelemetry", @@ -6666,9 +6234,9 @@ dependencies = [ [[package]] name = "tracing-serde" -version = "0.1.3" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" dependencies = [ "serde", "tracing-core", @@ -6676,9 +6244,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "matchers", "nu-ansi-term", @@ -6713,7 +6281,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568" dependencies = [ "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -6737,34 +6305,33 @@ dependencies = [ "native-tls", "rand 0.8.5", "sha1", - "thiserror 1.0.61", + "thiserror 1.0.69", "url", "utf-8", ] [[package]] name = "tungstenite" -version = "0.26.1" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413083a99c579593656008130e29255e54dcaae495be556cc26888f211648c24" +checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" dependencies = [ - "byteorder", "bytes", "data-encoding", - "http 1.1.0", + "http 1.2.0", "httparse", "log", - "rand 0.8.5", + "rand 0.9.0", "sha1", - "thiserror 2.0.11", + "thiserror 2.0.12", "utf-8", ] [[package]] name = "typenum" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "uint" @@ -6780,39 +6347,36 @@ dependencies = [ [[package]] name = "unicase" -version = "2.7.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-bidi" -version = "0.3.15" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe" [[package]] name = "unicode-normalization" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" dependencies = [ "tinyvec", ] [[package]] name = "unicode-properties" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4259d9d4425d9f0661581b804cb85fe66a4c631cadd8f490d1c13a35d5d9291" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" [[package]] name = "unicode-segmentation" @@ -6828,9 +6392,9 @@ checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "unicode-xid" -version = "0.2.4" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" [[package]] name = "untrusted" @@ -6851,12 +6415,12 @@ dependencies = [ [[package]] name = "url" -version = "2.5.0" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", - "idna 0.5.0", + "idna", "percent-encoding", "serde", ] @@ -6873,6 +6437,18 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" @@ -6881,11 +6457,11 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "utoipa" -version = "5.2.0" +version = "5.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "514a48569e4e21c86d0b84b5612b5e73c0b2cf09db63260134ba426d4e8ea714" +checksum = "435c6f69ef38c9017b4b4eea965dfb91e71e53d869e896db40d1cf2441dd75c0" dependencies = [ - "indexmap 2.2.6", + "indexmap 2.7.1", "serde", "serde_json", "utoipa-gen", @@ -6893,24 +6469,25 @@ dependencies = [ [[package]] name = "utoipa-gen" -version = "5.2.0" +version = "5.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5629efe65599d0ccd5d493688cbf6e03aa7c1da07fe59ff97cf5977ed0637f66" +checksum = "a77d306bc75294fd52f3e99b13ece67c02c1a2789190a6f31d32f736624326f7" dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.87", - "uuid 1.8.0", + "syn 2.0.98", + "uuid 1.15.0", ] [[package]] name = "utoipa-swagger-ui" -version = "8.0.3" +version = "9.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5c80b4dd79ea382e8374d67dcce22b5c6663fa13a82ad3886441d1bbede5e35" +checksum = "161166ec520c50144922a625d8bc4925cc801b2dda958ab69878527c0e5c5d61" dependencies = [ - "axum 0.7.9", + "axum 0.8.1", + "base64 0.22.1", "mime_guess", "regex", "rust-embed", @@ -6938,7 +6515,7 @@ checksum = "39449c1c0079e06bca01fd954736a9cd8a1c999540c9c2c404eb74ce63e8eb73" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -6949,7 +6526,7 @@ checksum = "d1521871bcd9cb5024e0ec86437e014f8ac8cf36c15a177d4b8f97560a1699fa" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", "utoipauto-core", ] @@ -6965,26 +6542,20 @@ dependencies = [ [[package]] name = "uuid" -version = "1.8.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0" +checksum = "bd8dcafa1ca14750d8d7a05aa05988c17aab20886e1f3ae33a40223c58d92ef7" dependencies = [ - "getrandom 0.2.15", - "rand 0.8.5", + "getrandom 0.3.1", + "rand 0.9.0", "serde", ] [[package]] name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - -[[package]] -name = "value-bag" -version = "1.9.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a84c137d37ab0142f0f2ddfe332651fdbf252e7b7dbb4e67b6c1f1b2e925101" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] name = "vcpkg" @@ -6994,9 +6565,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "vsimd" @@ -7004,12 +6575,6 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" -[[package]] -name = "waker-fn" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317211a0dc0ceedd78fb2ca9a44aed3d7b9b26f81870d485c07122b4350673b7" - [[package]] name = "walkdir" version = "2.5.0" @@ -7052,46 +6617,48 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.92" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", + "once_cell", + "rustversion", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.92" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ "bumpalo", "log", - "once_cell", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.42" +version = "0.4.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" dependencies = [ "cfg-if", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.92" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -7099,28 +6666,31 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.92" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.92" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] [[package]] name = "web-sys" -version = "0.3.69" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" dependencies = [ "js-sys", "wasm-bindgen", @@ -7144,32 +6714,20 @@ checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" [[package]] name = "webpki-roots" -version = "0.26.3" +version = "0.26.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd7c23921eeb1713a4e851530e9b9756e4fb0e89978582942612524cf09f01cd" +checksum = "2210b291f7ea53617fbafcc4939f10914214ec15aace5ba62293a668f322c5c9" dependencies = [ "rustls-pki-types", ] -[[package]] -name = "which" -version = "4.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" -dependencies = [ - "either", - "home", - "once_cell", - "rustix 0.38.34", -] - [[package]] name = "whoami" -version = "1.5.1" +version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44ab49fad634e88f55bf8f9bb3abd2f27d7204172a112c7c9987e01c1c94ea9" +checksum = "372d5b87f58ec45c384ba03563b03544dc5fadc3983e434b286913f5b4a9bb6d" dependencies = [ - "redox_syscall 0.4.1", + "redox_syscall", "wasite", "web-sys", ] @@ -7198,11 +6756,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -7251,7 +6809,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] @@ -7262,7 +6820,24 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", +] + +[[package]] +name = "windows-link" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3" + +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result", + "windows-strings", + "windows-targets 0.52.6", ] [[package]] @@ -7302,6 +6877,15 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-targets" version = "0.48.5" @@ -7433,20 +7017,19 @@ dependencies = [ ] [[package]] -name = "winreg" -version = "0.50.0" +name = "winnow" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1" dependencies = [ - "cfg-if", - "windows-sys 0.48.0", + "memchr", ] [[package]] name = "winreg" -version = "0.52.0" +version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ "cfg-if", "windows-sys 0.48.0", @@ -7458,9 +7041,21 @@ version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.8.0", ] +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "wyz" version = "0.5.1" @@ -7478,9 +7073,70 @@ checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" [[package]] name = "yansi" -version = "0.5.1" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", + "synstructure", +] + +[[package]] +name = "yup-oauth2" +version = "11.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ed5f19242090128c5809f6535cc7b8d4e2c32433f6c6005800bbc20a644a7f0" +dependencies = [ + "anyhow", + "async-trait", + "base64 0.22.1", + "futures", + "http 1.2.0", + "http-body-util", + "hyper 1.6.0", + "hyper-rustls 0.27.5", + "hyper-util", + "log", + "percent-encoding", + "rustls 0.23.23", + "rustls-pemfile 2.2.0", + "seahash", + "serde", + "serde_json", + "time", + "tokio", + "url", +] + +[[package]] +name = "zerocopy" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive 0.7.35", +] [[package]] name = "zerocopy" @@ -7488,7 +7144,18 @@ version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dde3bb8c68a8f3f1ed4ac9221aad6b10cece3e60a8e2ea54a6a2dec806d0084c" dependencies = [ - "zerocopy-derive", + "zerocopy-derive 0.8.20", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", ] [[package]] @@ -7499,7 +7166,28 @@ checksum = "eea57037071898bf96a6da35fd626f4f27e9cee3ead2a6c703cf09d472b2e700" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", + "synstructure", ] [[package]] @@ -7507,35 +7195,43 @@ name = "zeroize" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" dependencies = [ - "zeroize_derive", + "yoke", + "zerofrom", + "zerovec-derive", ] [[package]] -name = "zeroize_derive" -version = "1.4.2" +name = "zerovec-derive" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.98", ] [[package]] name = "zip" -version = "2.1.6" +version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40dd8c92efc296286ce1fbd16657c5dbefff44f1b4ca01cc5f517d8b7b3d3e2e" +checksum = "b280484c454e74e5fff658bbf7df8fdbe7a07c6b2de4a53def232c15ef138f3a" dependencies = [ "arbitrary", "crc32fast", "crossbeam-utils", "displaydoc", "flate2", - "indexmap 2.2.6", + "indexmap 2.7.1", "memchr", - "thiserror 1.0.61", + "thiserror 2.0.12", "zopfli", ] diff --git a/Cargo.toml b/Cargo.toml index c944b8a6..616f847d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,14 +4,12 @@ members = [ "pragma-node", "pragma-ingestor", "pragma-entities", - "pragma-common", - "pragma-consumer", "tests", ] [workspace.package] version = "0.1.0" -edition = "2021" +edition = "2024" repository = "https://github.com/astraly-labs/pragma-node/" authors = ["Pragma Labs "] @@ -46,17 +44,29 @@ module_name_repetitions = "allow" needless_pass_by_value = "allow" or_fun_call = "allow" redundant_pub_crate = "allow" +missing_const_for_fn = "allow" [workspace.dependencies] -pragma-common = { path = "./pragma-common" } +pragma-common = { version = "0.2.12", features = [ + "capnp", + "serde", + "services", + "task-group", + "telemetry", + "utoipa", + "starknet", +] } pragma-entities = { path = "./pragma-entities" } pragma-ingestor = { path = "./pragma-ingestor" } pragma-node = { path = "./pragma-node" } -color-eyre = "0.6.3" +faucon-rs = { version = "0.1.5", registry = "pragma" } + +anyhow = "1.0.97" aws-config = { version = "1.5.1", features = ["behavior-version-latest"] } aws-sdk-secretsmanager = "1.32.0" +google-secretmanager1 = "6.0.0" axum = { version = "0.8", features = ["macros", "ws", "tokio"] } -axum-extra = { version = "0.10.0", features = ["typed-header"] } +axum-extra = { version = "0.10.0", features = ["typed-header", "query"] } axum-macros = "0.5" async-trait = "0.1.86" cainome = { git = "https://github.com/cartridge-gg/cainome", tag = "v0.4.5", features = [ @@ -86,24 +96,20 @@ serde = { version = "1.0.204", features = ["derive"] } moka = { version = "0.12", features = ["future"] } nonzero_ext = { version = "0.3.0" } serde_json = { version = "1.0.122", features = ["arbitrary_precision"] } -starknet = "0.12.0" -starknet-crypto = "0.7.3" -quote = "1.0.37" -redis = { version = "0.26.1", features = ["json", "tokio-comp"] } +starknet = "0.14.0" +starknet-crypto = "0.7.4" +clap = { version = "4.4", features = ["derive", "env"] } reqwest = { version = "0.12.5", features = ["blocking"] } -rdkafka = "0.36.2" -time = "0.3.29" -thiserror = "1.0.49" +rdkafka = "0.37.0" +thiserror = "2.0.12" strum = { version = "0.26", features = ["derive"] } -tracing-test = "0.2.5" url = "2.5.0" -tower = { version = "0.5" } tower-http = { version = "0.6.2", features = ["fs", "trace", "cors"] } tokio = { version = "^1.43.0", features = ["full"] } -toml = "0.8.8" -utoipa = { version = "5.0.0", features = ["axum_extras", "chrono", "uuid"] } +tokio-util = "0.7.14" +utoipa = { version = "5", features = ["axum_extras", "chrono", "uuid"] } utoipauto = "0.1.14" -utoipa-swagger-ui = { version = "8.0.3", features = ["axum"] } +utoipa-swagger-ui = { version = "9", features = ["axum"] } uuid = { version = "1.4", features = ["fast-rng", "v4", "serde"] } # David Bernard (sacre jojo) @@ -132,8 +138,8 @@ pragma-monitoring = { git = "https://github.com/astraly-labs/pragma-monitoring" # Test dependencies rstest = "0.18.2" serial_test = "3.2.0" -httpmock = { version = "0.7.0" } testcontainers = { version = "0.21.1" } +tracing-test = "0.2.5" testcontainers-modules = { version = "0.9.0", features = [ "postgres", "kafka", diff --git a/Dockerfile.ingestor b/Dockerfile.ingestor new file mode 100644 index 00000000..da29f99e --- /dev/null +++ b/Dockerfile.ingestor @@ -0,0 +1,80 @@ +# syntax=docker/dockerfile-upstream:master + +FROM lukemathwalker/cargo-chef:latest-rust-slim-bullseye AS cargo-chef +WORKDIR /app + +FROM cargo-chef AS planner +COPY . . + +RUN apt-get update && apt-get install -y curl +RUN cargo chef prepare --recipe-path recipe.json + +FROM cargo-chef AS builder + +COPY --from=planner /app/recipe.json recipe.json + +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ + libpq-dev \ + pkg-config \ + libssl-dev \ + bash \ + ca-certificates \ + curl \ + wget \ + libclang-dev \ + cmake \ + build-essential \ + capnproto \ + libcapnp-dev \ + && rm -rf /var/lib/apt/lists/* + +RUN cargo chef cook --profile release --recipe-path recipe.json + +COPY . . + +RUN cargo build --locked --release --workspace --exclude tests + +FROM debian:bullseye-slim AS final +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ + libpq-dev \ + libssl1.1 \ + procps \ + libclang-dev \ + cmake \ + bash \ + build-essential \ + pkg-config \ + capnproto \ + libcapnp-dev \ + && rm -rf /var/lib/apt/lists/* + +ARG APP_NAME=pragma-ingestor +ENV APP_NAME $APP_NAME + +# Create app directory +RUN mkdir /app + +# Copy the binary, certificates, config, and OpenAPI spec +COPY --from=builder /app/target/release/${APP_NAME} /bin/server +COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ + +RUN adduser \ + --disabled-password \ + --gecos "" \ + --home "/nonexistent" \ + --shell "/sbin/nologin" \ + --no-create-home \ + --uid 10001 \ + appuser + +# Set permissions for all required directories +RUN chown -R appuser:appuser /app && \ + chmod -R 755 /app + +USER appuser + +WORKDIR /app + +ENV RUST_LOG=info + +CMD ["/bin/server"] \ No newline at end of file diff --git a/Dockerfile.node b/Dockerfile.node new file mode 100644 index 00000000..51a95619 --- /dev/null +++ b/Dockerfile.node @@ -0,0 +1,84 @@ +# syntax=docker/dockerfile-upstream:master + +FROM lukemathwalker/cargo-chef:latest-rust-slim-bullseye AS cargo-chef +WORKDIR /app + +FROM cargo-chef AS planner +COPY . . + +RUN apt-get update && apt-get install -y curl +RUN cargo chef prepare --recipe-path recipe.json + +FROM cargo-chef AS builder + +COPY --from=planner /app/recipe.json recipe.json + +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ + libpq-dev \ + pkg-config \ + libssl-dev \ + bash \ + ca-certificates \ + curl \ + wget \ + libclang-dev \ + cmake \ + build-essential \ + capnproto \ + libcapnp-dev \ + && rm -rf /var/lib/apt/lists/* + +RUN cargo chef cook --profile release --recipe-path recipe.json + +COPY . . + +RUN cargo build --locked --release --workspace --exclude tests + +FROM debian:bullseye-slim AS final +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ + libpq-dev \ + libssl1.1 \ + procps \ + libclang-dev \ + cmake \ + bash \ + build-essential \ + pkg-config \ + capnproto \ + libcapnp-dev \ + && rm -rf /var/lib/apt/lists/* + +ARG APP_NAME=pragma-node +ENV APP_NAME $APP_NAME + +# Create app directory +RUN mkdir /app + +# Copy the binary +COPY --from=builder /app/target/release/${APP_NAME} /bin/server + +# Copy the certificates +COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ + +RUN adduser \ + --disabled-password \ + --gecos "" \ + --home "/nonexistent" \ + --shell "/sbin/nologin" \ + --no-create-home \ + --uid 10001 \ + appuser + +# Set permissions for all required directories +RUN chown -R appuser:appuser /app && \ + chmod -R 755 /app + +USER appuser + +WORKDIR /app + +EXPOSE 3000 + +ENV RUST_LOG=info + +CMD ["/bin/server"] \ No newline at end of file diff --git a/README.md b/README.md index 0ed68271..3436fd29 100644 --- a/README.md +++ b/README.md @@ -1,148 +1,64 @@ # Pragma Node 🧩 -This repository contains the source code of the Pragma Node, which comprises several services and libraries: +This repository contains the source code of the Pragma Node, a highly accurate, readily available, and fast API built on an extensive network of data providers. Pragma empowers developers to drive the evolution of next-generation applications with reliable prices and financial data at high frequency. -- **Pragma Node**: Service for querying and storing data. -- **Pragma Ingestor**: Service running to ingest data from Data Sources. -- **Pragma Common**: Library containing common models and functions. -- **Pragma Entities**: Library containing models/Data Transfer Objects (DTOs) and functions for entities. -- **Pragma Offchain Service**: Offchain service aggregating and storing data from Data Providers. +## Documentation -## Getting Started +For detailed information about API endpoints and usage, visit our documentation at [docs.pragma.build/api/overview](https://docs.pragma.build/api/overview). -Follow these steps to get started with your Rust backend project based on this template: +The Pragma Node swagger documentation is available at `http://localhost:3000/node/swagger-ui` when running locally. -1. Clone this repository: +The full spec is also available at [openapi.json](./openapi.json). - ```bash - git clone https://github.com/astraly-labs/pragma-node.git - ``` +## Development Setup -2. Choose a specific module/framework branch or work with the default configuration. +### Quick Setup -3. Customize the project to your needs. +> [!NOTE] +> The script is still minimal and does not include `pulse` or other pushing services for the offchain database. +> But it will work fine for the onchain database. -4. Build and run your Rust backend: - - ```bash - cargo run - ``` - -# Services description - -### Pragma Node - -The Pragma Node service allows querying and storing data within the Pragma database. It retrieves, verifies, and sends data to the Kafka service. It also provides the ability to query data stored in the database. - -### Pragma Ingestor - -This service listens on the Kafka service and stores the retrieved data in the database. It performs certain checks on the collected data. - -### Pragma Common - -This library contains the models and functions common to different services. - -### Pragma Entities - -This library contains models and DTOs related to the entities used in the services and Pragma's database. - -### Pragma Consumer - -SDK allowing a user to retrieve data from our Merkle feeds. See [the repository](./pragma-consumer) for more information. - -# Services Structure - -The project follows a modular structure to keep the code organized and maintainable. Here's a brief overview of the project structure: - -### Pragma Node - -- `src/`: Contains the main source code of the application. - - `handlers/`: Define your API handlers. - - `infra/`: Define your infrastructure logic. - - `kafka/`: Kafka logic. - - `repositories`: Repositories logic. - - `utils`: Defines utility functions. - - `config.rs`: File containing the configuration structure. - - `errors.rs`: Contains error kinds and error formatting logic. - - `main.rs`: Application's entry point. - - `routes.rs`: Defines application routes. - -### Pragma Ingestor - -- `src/`: Contains the main source code of the application. - - `main.rs`: Application's entry point. - - `config.rs`: File containing the configuration structure. - - `consumer.rs`: Defines message consumption logic. - - `errors.rs`: Contains error kinds and error formatting logic. - -### Pragma Entities - -- `migrations`: Contains database migrations. -- `src/`: Contains the main source code of the application. - - `models/`: Defines application models. - - `dto/`: Defines application DTOs. - - `errors.rs`: Contains error kinds and error formatting logic. - - `schema.rs`: Defines the database schema. - - `connection.rs`: Defines the database connection. - - `db.rs`: Defines the logic for executing migrations (@TODO: To be moved). - - `lib.rs`: Defines the library's entry point. - -### Pragma Common - -- `src/`: Contains the main source code of the application. - - `lib.rs`: Defines the library's entry point. - - `tracing.rs`: Defines common tracing logic. - -### Pragma Consumer - -- `src/`: Contains the main source code of the SDK. - - `lib.rs`: Defines the SDK's entry point. - - `builder.rs`: Defines how the Pragma Consumer client is built, - - `constants.rs`: General constants, mainly for endpoints, - - `consumer.rs`: Main logic that consumes data from our API, - - `types.rs`: Contains the `MerkleFeedCalldata` definition. -- `examples/`: Contains a crate example showcasing how to use the SDK. - -## Development - -Simply run the setup script using: +Run the setup script: ```bash -# Running the script with only "sh" will fail because of syntax issues. +# Running the script with only "sh" will fail bash scripts/run_dev.sh ``` You will be prompted to either use the `indexer-service` repository or use a backup file. -When using the `indexer` option, make sure you've clone the indexer-service repository at the same level than this repo. +When using the `indexer` option, make sure you've cloned the indexer-service repository at the same level as this repo. ```bash git clone git@github.com:astraly-labs/indexer-service.git ``` -You can optionally set: +Optional environment variables: +- `APIBARA_KEY`: will be used as your Apibara API key instead of asking for it. +- `STARTING_BLOCK`: will be used as the indexer starting block. -- `APIBARA_KEY` : will be used as your Apibara API key instead of asking for it, -- `STARTING_BLOCK` : will be used as the indexer starting block. +### Manual Setup -If you want to do the full flow manually, do the following +#### 1. Start Services -### 1. Start the services: +We have `compose` file for dev purposes. It only spin ups required services for `pragma-node` and let you run it locally using `cargo`. ```bash docker compose -f compose.dev.yaml up -d --build ``` -### 2. Fill the onchain database +#### 2. Kafka Setup -The database tables are created automatically using our migrations: +Just make sure the topics are correctly created: -- offchain migrations are in the `pragma-entities/migrations` folder. -- onchain migrations are in the `infra/pragma-node/postgres_migrations` folder. +```sh +make init-kafka-topics +``` -To fill the onchain tables with data you can either run the indexer or use a backup: +#### 3. Database Setup -#### Run the indexer: +#### Onchain Database +**Option 1: Using the indexer** ```bash git clone git@github.com:astraly-labs/indexer-service.git cd indexer-service @@ -150,10 +66,9 @@ cd indexer-service apibara run examples/pragma/testnet/sepolia-script-spot.js -A [YOUR_APIBARA_API_KEY] --connection-string postgres://postgres:test-password@localhost:5432/pragma --table-name spot_entry --timeout-duration-seconds=240 ``` -#### Use the backup (ask for a file): - +**Option 2: Using a backup file** ```bash -# copy the backup file to the container +# copy the backup file to the container docker cp /path/to/the/backup.sql pragma-node-postgre-db-1:/backup.sql # connect to the container docker exec -it pragma-node-postgre-db-1 bash @@ -161,7 +76,48 @@ docker exec -it pragma-node-postgre-db-1 bash PGPASSWORD=test-password pg_restore -h postgre-db -U postgres -d pragma /backup.sql ``` -### 3. Export the required environment variables: +#### Offchain Database + +First, make sure that you're correctly registered as a publisher before pushing prices. + +You can simply execute some SQL directly into the offchain database, for example: + +```sql +INSERT INTO PUBLISHERS +( + name, + master_key, + active_key, + active, + account_address +) VALUES +( + 'YOUR_PUBLISHER_NAME', -- or any other name you want + + -- For the keys below, make sure they correspond to a correct Starknet Account. + -- You can generate keys using any starknet wallet. + -- This is needed for publishing later, since you will need your private key. + '0x0257a51cd27e950a2ba767795446b4c6ed86116f297c820e5a7159c6b00c6ac9', + '0x0257a51cd27e950a2ba767795446b4c6ed86116f297c820e5a7159c6b00c6ac9', + true, + '0x012322c5EA7A94cC027970694ee70e45434f1F71050e0e2D0d9DE83f1DE66945' +); +``` + +Now, you can for example use `pulse`, a Pragma price-pushing service: + +```bash +git clone https://github.com/astraly-labs/pulse.git +cd pulse +cp .env.example .env # and fill the values +cargo run -- --config ./pulse.config.yaml +``` + +We also have the [python price-pusher](https://github.com/astraly-labs/pragma-sdk/tree/master/price-pusher) that should work with the API. + +#### 4. Environment Setup + +Either create a `.env` file following the `.env.example` or export the required variables: ```bash export MODE=dev @@ -173,13 +129,15 @@ export HOST="0.0.0.0" export PORT=3000 export METRICS_PORT=8080 export KAFKA_BROKERS=localhost:29092 +# Optional but allows you to export OTEL logs anywhere export OTEL_EXPORTER_OTLP_ENDPOINT=localhost:4317 ``` -### 4. Start the Pragma Node service: + +#### 5. Start Pragma Node + +Now that every services are correctly running, you can run the server: ```bash cargo run --bin pragma-node ``` - -The pragma-node swagger documentation is available at `http://localhost:3000/node/swagger-ui`. diff --git a/cloudbuild.yaml b/cloudbuild.yaml new file mode 100644 index 00000000..5ac6f64f --- /dev/null +++ b/cloudbuild.yaml @@ -0,0 +1,30 @@ +timeout: 3600s + +steps: + - name: 'gcr.io/cloud-builders/docker' + id: docker-build-ingestor + args: [ 'build', + '--tag=${_REGION}-docker.pkg.dev/${PROJECT_ID}/${_REPO_NAME}/pragma-ingestor:latest', + '--file=Dockerfile.ingestor', + '.' ] + + - name: 'gcr.io/cloud-builders/docker' + id: docker-build-node + args: [ 'build', + '--tag=${_REGION}-docker.pkg.dev/${PROJECT_ID}/${_REPO_NAME}/pragma-node:latest', + '--file=Dockerfile.node', + '.' ] + + - name: 'gcr.io/cloud-builders/docker' + id: docker-push-ingestor + args: ['push', '${_REGION}-docker.pkg.dev/${PROJECT_ID}/${_REPO_NAME}/pragma-ingestor:latest'] + waitFor: ['docker-build-ingestor'] + + - name: 'gcr.io/cloud-builders/docker' + id: docker-push-node + args: ['push', '${_REGION}-docker.pkg.dev/${PROJECT_ID}/${_REPO_NAME}/pragma-node:latest'] + waitFor: ['docker-build-node'] + +options: + logging: CLOUD_LOGGING_ONLY + machineType: 'E2_HIGHCPU_8' \ No newline at end of file diff --git a/compose.dev.yaml b/compose.dev.yaml index f9342951..92ddb65f 100644 --- a/compose.dev.yaml +++ b/compose.dev.yaml @@ -1,86 +1,23 @@ -# Comments are provided throughout this file to help you get started. -# If you need more help, visit the Docker compose reference guide at -# https://docs.docker.com/compose/compose-file/ - -# Here the instructions define your application as a service called "server". -# This service is built from the Dockerfile in the current directory. -# You can add other services your application may depend on here, such as a -# database or a cache. For examples, see the Awesome Compose repository: -# https://github.com/docker/awesome-compose services: - # server: - # environment: - # - PORT=3000 - # - POSTGRES_DATABASE_URL=postgres://postgres:postgres@host.docker.internal:5432/project_f - # - HOST=0.0.0.0 - # - RUST_LOG=info - # build: - # context: . - # target: final - # ports: - # - 3000:3000 - # The commented out section below is an example of how to define a PostgreSQL - # database that your application can use. `depends_on` tells Docker Compose to - # start the database before your application. The `db-data` volume persists the - # database data between container restarts. The `db-password` secret is used - # to set the database password. You must create `db/password.txt` and add - # a password of your choosing to it before running `docker compose up`. - # depends_on: - # db: - # condition: service_healthy - # pragma-node: - # container_name: "pragma-node" - # environment: - # - DATABASE_MAX_CONN=25 - # - TOPIC=pragma-data - # - KAFKA_BROKERS=pragma-kafka:9092 - # - OFFCHAIN_DATABASE_URL=postgres://postgres:test-password@offchain-db:5432/pragma - # - ONCHAIN_DATABASE_URL=postgres://postgres:test-password@onchain-db:5433/pragma - # - METRICS_PORT=8080 - # depends_on: - # pragma-kafka: - # condition: service_healthy - # offchain-db: - # condition: service_healthy - # onchain-db: - # condition: service_healthy - # networks: - # - pragma-kafka-network - # - pragma-db-network - # ports: - # - "3000:3000" - # - "8080:8080" - # build: - # dockerfile: infra/pragma-node/Dockerfile - onchain-db: - image: timescale/timescaledb-ha:pg14-latest + image: timescale/timescaledb-ha:pg17.4-ts2.18.2 restart: always user: postgres - secrets: - - db-password networks: - pragma-db-network volumes: - - ./infra/pragma-node/postgres_migrations/01-init.sql:/docker-entrypoint-initdb.d/01-init.sql - - ./infra/pragma-node/postgres_migrations/02-add-publishers.sql:/docker-entrypoint-initdb.d/02-add-publishers.sql - - ./infra/pragma-node/postgres_migrations/03-create-publishers-index.sql:/docker-entrypoint-initdb.d/03-create-publishers-index.sql - - ./infra/pragma-node/postgres_migrations/04-create-timescale-hypertables.sql:/docker-entrypoint-initdb.d/04-create-timescale-hypertables.sql - - ./infra/pragma-node/postgres_migrations/05-create-timescale-median-aggregates-spot.sql:/docker-entrypoint-initdb.d/05-create-timescale-median-aggregates-spot.sql - - ./infra/pragma-node/postgres_migrations/06-create-timescale-median-aggregates-future.sql:/docker-entrypoint-initdb.d/06-create-timescale-median-aggregates-future.sql - - ./infra/pragma-node/postgres_migrations/07-create-timescale-median-aggregates-mainnet-spot.sql:/docker-entrypoint-initdb.d/07-create-timescale-median-aggregates-mainnet-spot.sql - - ./infra/pragma-node/postgres_migrations/08-create-timescale-median-aggregates-mainnet-future.sql:/docker-entrypoint-initdb.d/08-create-timescale-median-aggregates-mainnet-future.sql - - ./infra/pragma-node/postgres_migrations/09-create-timescale-ohlc-aggregates-spot.sql:/docker-entrypoint-initdb.d/09-create-timescale-ohlc-aggregates-spot.sql - - ./infra/pragma-node/postgres_migrations/10-create-timescale-ohlc-aggregates-future.sql:/docker-entrypoint-initdb.d/10-create-timescale-ohlc-aggregates-future.sql - - ./infra/pragma-node/postgres_migrations/11-create-timescale-ohlc-aggregates-mainnet-spot.sql:/docker-entrypoint-initdb.d/11-create-timescale-ohlc-aggregates-mainnet-spot.sql - - ./infra/pragma-node/postgres_migrations/12-create-timescale-ohlc-aggregates-mainnet-future.sql:/docker-entrypoint-initdb.d/12-create-timescale-ohlc-aggregates-mainnet-future.sql - - ./infra/pragma-node/postgres_migrations/13-add-weekly-and-daily-median-aggregates.sql:/docker-entrypoint-initdb.d/13-add-weekly-and-daily-median-aggregates.sql - - ./infra/pragma-node/postgres_migrations/14-add-weekly-and-daily-ohlc-aggregates.sql:/docker-entrypoint-initdb.d/14-add-weekly-and-daily-ohlc-aggregates.sql - - ./infra/pragma-node/postgres_migrations/15-create-indexes.sql:/docker-entrypoint-initdb.d/15-create-indexes.sql + - ./sql/01-init.sql:/docker-entrypoint-initdb.d/01-init.sql + - ./sql/02-create-publishers.sql:/docker-entrypoint-initdb.d/02-add-publishers.sql + - ./sql/03-create-timescale-hypertables.sql:/docker-entrypoint-initdb.d/03-create-timescale-hypertables.sql + - ./sql/04-create-timescale-median-aggregates.sql:/docker-entrypoint-initdb.d/04-create-timescale-median-aggregates.sql + - ./sql/05-create-timescale-ohlc-aggregates.sql:/docker-entrypoint-initdb.d/05-create-timescale-ohlc-aggregates.sql + - ./sql/06-create-timescale-twap-aggregates.sql:/docker-entrypoint-initdb.d/06-create-timescale-twap-aggregates.sql + - ./sql/07-add-compression.sql:/docker-entrypoint-initdb.d/07-add-compression.sql + - ./sql/08-create-indexes.sql:/docker-entrypoint-initdb.d/08-create-indexes.sql - onchain-db-data:/var/lib/postgresql/data environment: - POSTGRES_DB=pragma - - POSTGRES_PASSWORD_FILE=/run/secrets/db-password + - POSTGRES_PASSWORD=test-password - PGPORT=5433 ports: - 5433:5433 @@ -93,18 +30,16 @@ services: retries: 5 offchain-db: - image: timescale/timescaledb-ha:pg14-latest + image: timescale/timescaledb-ha:pg17.4-ts2.18.2 restart: always user: postgres - secrets: - - db-password networks: - pragma-db-network volumes: - offchain-db-data:/var/lib/postgresql/data environment: - POSTGRES_DB=pragma - - POSTGRES_PASSWORD_FILE=/run/secrets/db-password + - POSTGRES_PASSWORD=test-password ports: - 5432:5432 healthcheck: @@ -157,21 +92,15 @@ services: KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://pragma-kafka:9092,PLAINTEXT_E://localhost:29092 KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,PLAINTEXT_E:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT - #KAFKA_SOCKET_REQUEST_MAX_BYTES: "10000000" - #KAFKA_MESSAGE_MAX_BYTES: "10000000" - #KAFKA_TOPIC_MAX_MESSAGE_BYTES: "15728640" - #KAFKA_REPLICA_FETCH_MAX_BYTES: "15728640" KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true" KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 pragma-ingestor-1: container_name: "pragma-ingestor-1" environment: + - MODE=dev - OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317 - DATABASE_MAX_CONN=25 - - BROKERS=pragma-kafka:9092 - - TOPIC=pragma-data - - GROUP_ID=pragma-data - OFFCHAIN_DATABASE_URL=postgres://postgres:test-password@offchain-db:5432/pragma - ONCHAIN_DATABASE_URL=postgres://postgres:test-password@onchain-db:5433/pragma depends_on: @@ -183,28 +112,15 @@ services: - pragma-kafka-network - pragma-db-network build: - dockerfile: infra/pragma-ingestor/Dockerfile + dockerfile: Dockerfile.ingestor networks: pragma-db-network: - # production - #driver: bridge - #internal: true pragma-kafka-network: - # production - #driver: bridge - #internal: true pragma-zookeeper-network: - # production - #driver: bridge - #internal: true volumes: offchain-db-data: onchain-db-data: pragma_zookeeper_data: pragma_zookeeper_logs: - -secrets: - db-password: - file: infra/db/password.txt diff --git a/compose.yaml b/compose.yaml index b09beffa..1b868781 100644 --- a/compose.yaml +++ b/compose.yaml @@ -54,7 +54,7 @@ services: dockerfile: infra/pragma-node/Dockerfile onchain-db: - image: timescale/timescaledb-ha:pg14-latest + image: timescale/timescaledb-ha:pg17.4-ts2.18.2 restart: always user: postgres secrets: @@ -62,20 +62,14 @@ services: networks: - pragma-db-network volumes: - - ./infra/pragma-node/postgres_migrations/01-init.sql:/docker-entrypoint-initdb.d/01-init.sql - - ./infra/pragma-node/postgres_migrations/02-add-publishers.sql:/docker-entrypoint-initdb.d/02-add-publishers.sql - - ./infra/pragma-node/postgres_migrations/03-create-publishers-index.sql:/docker-entrypoint-initdb.d/03-create-publishers-index.sql - - ./infra/pragma-node/postgres_migrations/04-create-timescale-hypertables.sql:/docker-entrypoint-initdb.d/04-create-timescale-hypertables.sql - - ./infra/pragma-node/postgres_migrations/05-create-timescale-median-aggregates-spot.sql:/docker-entrypoint-initdb.d/05-create-timescale-median-aggregates-spot.sql - - ./infra/pragma-node/postgres_migrations/06-create-timescale-median-aggregates-future.sql:/docker-entrypoint-initdb.d/06-create-timescale-median-aggregates-future.sql - - ./infra/pragma-node/postgres_migrations/07-create-timescale-median-aggregates-mainnet-spot.sql:/docker-entrypoint-initdb.d/07-create-timescale-median-aggregates-mainnet-spot.sql - - ./infra/pragma-node/postgres_migrations/08-create-timescale-median-aggregates-mainnet-future.sql:/docker-entrypoint-initdb.d/08-create-timescale-median-aggregates-mainnet-future.sql - - ./infra/pragma-node/postgres_migrations/09-create-timescale-ohlc-aggregates-spot.sql:/docker-entrypoint-initdb.d/09-create-timescale-ohlc-aggregates-spot.sql - - ./infra/pragma-node/postgres_migrations/10-create-timescale-ohlc-aggregates-future.sql:/docker-entrypoint-initdb.d/10-create-timescale-ohlc-aggregates-future.sql - - ./infra/pragma-node/postgres_migrations/11-create-timescale-ohlc-aggregates-mainnet-spot.sql:/docker-entrypoint-initdb.d/11-create-timescale-ohlc-aggregates-mainnet-spot.sql - - ./infra/pragma-node/postgres_migrations/12-create-timescale-ohlc-aggregates-mainnet-future.sql:/docker-entrypoint-initdb.d/12-create-timescale-ohlc-aggregates-mainnet-future.sql - - ./infra/pragma-node/postgres_migrations/13-add-weekly-and-daily-median-aggregates.sql:/docker-entrypoint-initdb.d/13-add-weekly-and-daily-median-aggregates.sql - - ./infra/pragma-node/postgres_migrations/14-add-weekly-and-daily-ohlc-aggregates.sql:/docker-entrypoint-initdb.d/14-add-weekly-and-daily-ohlc-aggregates.sql + - ./sql/01-init.sql:/docker-entrypoint-initdb.d/01-init.sql + - ./sql/02-create-publishers.sql:/docker-entrypoint-initdb.d/02-add-publishers.sql + - ./sql/03-create-timescale-hypertables.sql:/docker-entrypoint-initdb.d/03-create-timescale-hypertables.sql + - ./sql/04-create-timescale-median-aggregates.sql:/docker-entrypoint-initdb.d/04-create-timescale-median-aggregates.sql + - ./sql/05-create-timescale-ohlc-aggregates.sql:/docker-entrypoint-initdb.d/05-create-timescale-ohlc-aggregates.sql + - ./sql/06-create-timescale-twap-aggregates.sql:/docker-entrypoint-initdb.d/06-create-timescale-twap-aggregates.sql + - ./sql/07-add-compression.sql:/docker-entrypoint-initdb.d/07-add-compression.sql + - ./sql/08-create-indexes.sql:/docker-entrypoint-initdb.d/08-create-indexes.sql - onchain-db-data:/var/lib/postgresql/data environment: - POSTGRES_DB=pragma @@ -92,7 +86,7 @@ services: retries: 5 offchain-db: - image: timescale/timescaledb-ha:pg14-latest + image: timescale/timescaledb-ha:pg17.4-ts2.18.2 restart: always user: postgres secrets: diff --git a/helm/Chart.yaml b/helm/Chart.yaml new file mode 100644 index 00000000..7bc34c84 --- /dev/null +++ b/helm/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v2 +name: pragma-node +description: node, a Pragma price-pushing service. +version: 0.1.0 +appVersion: "1.0.0" +icon: https://raw.githubusercontent.com/kubernetes/kubernetes/refs/heads/master/logo/logo.svg \ No newline at end of file diff --git a/helm/templates/_helpers.tpl b/helm/templates/_helpers.tpl new file mode 100644 index 00000000..a8226ce6 --- /dev/null +++ b/helm/templates/_helpers.tpl @@ -0,0 +1,60 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "pragma-node.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +*/}} +{{- define "pragma-node.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "pragma-node.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "pragma-node.labels" -}} +helm.sh/chart: {{ include "pragma-node.chart" . }} +{{ include "pragma-node.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "pragma-node.selectorLabels" -}} +app.kubernetes.io/name: {{ include "pragma-node.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + +{{/* +Create the name of the service account to use +*/}} +{{- define "pragma-node.serviceAccountName" -}} +{{- if .Values.serviceAccount.create }} +{{- default (include "pragma-node.fullname" .) .Values.serviceAccount.name }} +{{- else }} +{{- default "default" .Values.serviceAccount.name }} +{{- end }} +{{- end }} \ No newline at end of file diff --git a/helm/templates/deployment.yaml b/helm/templates/deployment.yaml new file mode 100644 index 00000000..1c302721 --- /dev/null +++ b/helm/templates/deployment.yaml @@ -0,0 +1,90 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "pragma-node.fullname" . }} + annotations: + argocd.argoproj.io/sync-wave: "2" + labels: + {{- include "pragma-node.labels" . | nindent 4 }} +spec: + {{- if not .Values.autoscaling.enabled }} + replicas: {{ .Values.replicaCount }} + {{- end }} + strategy: + type: {{ .Values.deploymentStrategy}} + selector: + matchLabels: + {{- include "pragma-node.selectorLabels" . | nindent 6 }} + template: + metadata: + {{- with .Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "pragma-node.selectorLabels" . | nindent 8 }} + spec: + {{- with .Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + serviceAccountName: {{ include "pragma-node.serviceAccountName" . }} + securityContext: + {{- toYaml .Values.podSecurityContext | nindent 8 }} + containers: + - name: {{ include "pragma-node.fullname" . }} + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + ports: + {{- range .Values.service.ports }} + - containerPort: {{ .containerPort }} + name: {{ .portName }} + protocol: {{ .protocol }} + {{- end }} + + {{- with .Values.env }} + env: + {{- toYaml . | nindent 12 }} + {{- end }} + {{- if .Values.envFromSecret }} + envFrom: + - secretRef: + name: {{ .Values.envFromSecret.secretName }} + {{- end }} + {{- with .Values.livenessProbe }} + livenessProbe: + {{- toYaml . | nindent 12 }} + {{- end }} + {{- with .Values.readinessProbe }} + readinessProbe: + {{- toYaml . | nindent 12 }} + {{- end }} + resources: + {{- toYaml .Values.resources | nindent 12 }} + volumeMounts: + - name: openapi-volume + mountPath: /app/openapi + {{- if .Values.deployment.extraVolumeMounts }} + {{- toYaml .Values.deployment.extraVolumeMounts | nindent 12 }} + {{- end }} + terminationGracePeriodSeconds: {{ .Values.terminationGracePeriodSeconds }} + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} + volumes: + - name: openapi-volume + emptyDir: {} + {{- if .Values.deployment.extraVolumes }} + {{- toYaml .Values.deployment.extraVolumes | nindent 8 }} + {{- end }} diff --git a/helm/templates/externalSecret.yaml b/helm/templates/externalSecret.yaml new file mode 100644 index 00000000..1f7aed82 --- /dev/null +++ b/helm/templates/externalSecret.yaml @@ -0,0 +1,18 @@ +{{- if .Values.gcpSecrets.enabled }} +apiVersion: external-secrets.io/v1beta1 +kind: ExternalSecret +metadata: + name: {{ .Values.gcpSecrets.externalSecret.name }} + annotations: + argocd.argoproj.io/sync-wave: "1" +spec: + refreshInterval: {{ .Values.gcpSecrets.externalSecret.refreshInterval }} + secretStoreRef: + name: {{ .Values.gcpSecrets.secretStore.name }} + kind: SecretStore + target: + name: {{ .Values.envFromSecret.secretName }} + dataFrom: + - extract: + key: {{ .Values.gcpSecrets.externalSecret.secretPath }} +{{- end }} \ No newline at end of file diff --git a/helm/templates/hpa.yaml b/helm/templates/hpa.yaml new file mode 100644 index 00000000..b131f6ff --- /dev/null +++ b/helm/templates/hpa.yaml @@ -0,0 +1,34 @@ +{{- if .Values.autoscaling.enabled }} +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: {{ include "pragma-node.fullname" . }} + annotations: + argocd.argoproj.io/sync-wave: "2" + labels: + {{- include "pragma-node.labels" . | nindent 4 }} +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: {{ include "pragma-node.fullname" . }} + minReplicas: {{ .Values.autoscaling.minReplicas }} + maxReplicas: {{ .Values.autoscaling.maxReplicas }} + metrics: + {{- if .Values.autoscaling.targetCPUUtilizationPercentage }} + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }} + {{- end }} + {{- if .Values.autoscaling.targetMemoryUtilizationPercentage }} + - type: Resource + resource: + name: memory + target: + type: Utilization + averageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }} + {{- end }} +{{- end }} \ No newline at end of file diff --git a/helm/templates/ingress.yaml b/helm/templates/ingress.yaml new file mode 100644 index 00000000..b08abe21 --- /dev/null +++ b/helm/templates/ingress.yaml @@ -0,0 +1,50 @@ +{{- $fullname := include "pragma-node.fullname" . }} +{{- if .Values.ingress }} +{{- range .Values.ingress }} +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + {{- if .name }} + name: {{ .name }} + {{- else if .suffix }} + name: {{ $fullname }}-{{ .suffix }}-ingress + {{- else }} + name: {{ $fullname }}-ingress + {{- end }} + annotations: + argocd.argoproj.io/sync-wave: "2" + {{- with .annotations }} + {{- toYaml . | nindent 4 }} + {{- end }} +spec: + {{- if .className }} + ingressClassName: {{ .className }} + {{- end }} + {{- if .tls }} + tls: + {{- range .tls }} + - hosts: + {{- range .hosts }} + - {{ . | quote }} + {{- end }} + secretName: {{ .secretName }} + {{- end }} + {{- end }} + rules: + {{- range .hosts }} + - host: {{ .host | quote }} + http: + paths: + {{- range .paths }} + - path: {{ .path }} + pathType: {{ .pathType }} + backend: + service: + name: {{ $fullname }} + port: + number: {{ .servicePort }} + {{- end }} + {{- end }} +{{- end }} +{{- end }} diff --git a/helm/templates/pdb.yaml b/helm/templates/pdb.yaml new file mode 100644 index 00000000..70c39b64 --- /dev/null +++ b/helm/templates/pdb.yaml @@ -0,0 +1,20 @@ +{{- if .Values.podDisruptionBudget }} +apiVersion: policy/v1 +kind: PodDisruptionBudget +metadata: + annotations: + argocd.argoproj.io/sync-wave: "2" + name: {{ include "pragma-node.fullname" . }} + labels: + {{- include "pragma-node.labels" . | nindent 4 }} +spec: + {{- with .Values.podDisruptionBudget.minAvailable }} + minAvailable: {{ . }} + {{- end }} + {{- with .Values.podDisruptionBudget.maxUnavailable }} + maxUnavailable: {{ . }} + {{- end }} + selector: + matchLabels: + {{- include "pragma-node.selectorLabels" . | nindent 6 }} +{{- end }} \ No newline at end of file diff --git a/helm/templates/secretStore.yaml b/helm/templates/secretStore.yaml new file mode 100644 index 00000000..28fc484b --- /dev/null +++ b/helm/templates/secretStore.yaml @@ -0,0 +1,12 @@ +{{- if .Values.gcpSecrets.enabled }} +apiVersion: external-secrets.io/v1beta1 +kind: SecretStore +metadata: + name: {{ .Values.gcpSecrets.externalSecret.name }}-store + annotations: + argocd.argoproj.io/sync-wave: "0" +spec: + provider: + gcpsm: + projectID: {{ .Values.gcpSecrets.projectId }} +{{- end }} \ No newline at end of file diff --git a/helm/templates/service.yaml b/helm/templates/service.yaml new file mode 100644 index 00000000..a183dc69 --- /dev/null +++ b/helm/templates/service.yaml @@ -0,0 +1,27 @@ +{{- if .Values.service.enabled }} +apiVersion: v1 +kind: Service +metadata: + name: {{ include "pragma-node.fullname" . }} + labels: + {{- include "pragma-node.labels" . | nindent 4 }} + {{- with .Values.service.labels }} + {{- toYaml . | nindent 4 }} + {{- end }} + annotations: + argocd.argoproj.io/sync-wave: "2" + {{- with .Values.service.annotations }} + {{- toYaml . | nindent 4 }} + {{- end }} +spec: + type: {{ .Values.service.type }} + ports: + {{- range .Values.service.ports }} + - port: {{ .port }} + targetPort: {{ .containerPort }} + protocol: {{ .protocol }} + name: {{ .portName }} + {{- end }} + selector: + {{- include "pragma-node.selectorLabels" . | nindent 4 }} +{{- end }} diff --git a/helm/templates/serviceaccount.yaml b/helm/templates/serviceaccount.yaml new file mode 100644 index 00000000..487fdecc --- /dev/null +++ b/helm/templates/serviceaccount.yaml @@ -0,0 +1,14 @@ +{{- if .Values.serviceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "pragma-node.serviceAccountName" . }} + labels: + {{- include "pragma-node.labels" . | nindent 4 }} + annotations: + argocd.argoproj.io/sync-wave: "2" + {{- with .Values.serviceAccount.annotations }} + {{- toYaml . | nindent 4 }} + {{- end }} +automountServiceAccountToken: {{ .Values.serviceAccount.automount }} +{{- end }} \ No newline at end of file diff --git a/helm/values.yaml b/helm/values.yaml new file mode 100644 index 00000000..9ed7b63c --- /dev/null +++ b/helm/values.yaml @@ -0,0 +1,124 @@ +replicaCount: 1 + +image: + repository: ${_REGION}-docker.pkg.dev/${PROJECT_ID}/${_REPO_NAME}/pragma-node + pullPolicy: Always + tag: "latest" + +imagePullSecrets: [] +nameOverride: "" +fullnameOverride: "" + +serviceAccount: + # Specifies whether a service account should be created + create: false + # Automatically mount a ServiceAccount's API credentials? + automount: true + # Annotations to add to the service account + annotations: {} + # The name of the service account to use. + # If not set and create is true, a name is generated using the fullname template + name: "" + +deploymentStrategy: RollingUpdate + +podAnnotations: {} + +podSecurityContext: + runAsNonRoot: true + runAsUser: 10001 + +securityContext: + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + runAsNonRoot: true + runAsUser: 10001 + +autoscaling: + enabled: false + minReplicas: 1 + maxReplicas: 100 + targetCPUUtilizationPercentage: 80 + # targetMemoryUtilizationPercentage: 80 + + +service: + enabled: true + type: ClusterIP + annotations: {} + labels: {} + ports: + - port: 80 + containerPort: 8080 + protocol: TCP + portName: http + + +podDisruptionBudget: {} + # minAvailable: 1 + # maxUnavailable: "" + +resources: {} + # limits: + # cpu: 100m + # memory: 128Mi + # requests: + # cpu: 100m + # memory: 128Mi +nodeSelector: {} + # Nodelabel: "node-devnet" + +tolerations: [] + # - key: "devnet" + # operator: "Equal" + # value: "reserved" + # effect: "NoSchedule" + +affinity: {} + +ingress: {} + # - name: "dummy" + # className: "nginx" + # annotations: {} + # # kubernetes.io/ingress.class: nginx + # # Add other annotations as needed + # hosts: + # - host: chart-example.local + # paths: + # - path: / + # pathType: Prefix + # servicePort: 80 + # - host: chart-example-secure.local + # paths: + # - path: / + # pathType: Prefix + # servicePort: 443 + # tls: [] + # # - secretName: chart-example-tls + # # hosts: + # # - chart-example-secure.local + +# GCP Secrets Configuration +gcpSecrets: + enabled: false + projectId: ${PROJECT_ID} + externalSecret: + name: "node-secrets" + secretPath: "node-secrets" + refreshInterval: "1h" + +# Environment variables from secret +envFromSecret: {} + # secretName: "monitoring-secrets" + +terminationGracePeriodSeconds: 60 + +env: [] + # - name: RUST_BACKTRACE + # value: "1" + # - name: COINGECKO_RETRY_DELAY + # value: "5000" + # - name: COINGECKO_MAX_RETRIES + # value: "3" \ No newline at end of file diff --git a/infra/pragma-ingestor/Dockerfile b/infra/pragma-ingestor/Dockerfile deleted file mode 100644 index 96ac6bc7..00000000 --- a/infra/pragma-ingestor/Dockerfile +++ /dev/null @@ -1,34 +0,0 @@ -FROM rust:1.80 as builder - -# Install additional dependencies -RUN apt-get update && apt-get install -y \ - build-essential \ - libssl-dev \ - pkg-config \ - libclang-dev \ - cmake - -WORKDIR /home/pragma - -# Copy the entire project -COPY . . - -# Build the project -RUN cargo build --release --bin pragma-ingestor - -FROM debian:bookworm-slim - -RUN apt-get clean && apt-get update -RUN apt-get install -y libpq-dev bash - -RUN groupadd pragma -RUN useradd -G pragma ingestor - -USER ingestor:pragma - -WORKDIR /home/pragma-ingestor - -# Copy the built binary from the builder stage -COPY --from=builder --chown=ingestor:pragma /home/pragma/target/release/pragma-ingestor /usr/local/bin/pragma-ingestor - -CMD ["pragma-ingestor"] \ No newline at end of file diff --git a/infra/pragma-ingestor/buildspec.yml b/infra/pragma-ingestor/buildspec.yml deleted file mode 100644 index dfc81369..00000000 --- a/infra/pragma-ingestor/buildspec.yml +++ /dev/null @@ -1,30 +0,0 @@ -version: 0.2 -phases: - pre_build: - commands: - - echo Logging in to Amazon ECR... - - aws --version - - aws ecr get-login-password --region $ECR_REGION | docker login --username AWS --password-stdin $AWS_ACCOUNT_ID.dkr.ecr.$ECR_REGION.amazonaws.com - - REPOSITORY_URI=$AWS_ACCOUNT_ID.dkr.ecr.$ECR_REGION.amazonaws.com/$ECR_REPOSITORY_NAME - - COMMIT_HASH=$(echo $CODEBUILD_RESOLVED_SOURCE_VERSION | cut -c 1-7) - - IMAGE_TAG=${COMMIT_HASH:=latest} - build: - commands: - - echo Build started on `date` - - echo Building the Docker image... - - ls -ltr - - docker build -f infra/pragma-ingestor/Dockerfile -t $REPOSITORY_URI:latest . - - docker tag $REPOSITORY_URI:latest $REPOSITORY_URI:$IMAGE_TAG - post_build: - commands: - - echo Build completed on `date` - - echo Pushing the Docker images... - - docker push $REPOSITORY_URI:latest - - docker push $REPOSITORY_URI:$IMAGE_TAG - - echo Writing image definitions file... - - printf '[{"name":"%s","imageUri":"%s"}]' $ECS_CONTAINER_NAME $REPOSITORY_URI:$IMAGE_TAG > imagedefinitions.json -artifacts: - files: - - imagedefinitions.json - - infra/pragma-ingestor/config.yml - discard-paths: yes diff --git a/infra/pragma-ingestor/config.yml b/infra/pragma-ingestor/config.yml deleted file mode 100644 index 5a648cc6..00000000 --- a/infra/pragma-ingestor/config.yml +++ /dev/null @@ -1,12 +0,0 @@ -path: "/" -container_port: 8080 -health_check_path: "/" -container_environment: - - region: "eu-west-3" - - prefix: "/conf/{{ SERVICE_NAME }}/{{ RUN_ENV }}" - - keys: - - OFFCHAIN_DATABASE_URL - - DATABASE_MAX_CONN - - BROKERS - - TOPIC - - GROUP_ID diff --git a/infra/pragma-ingestor/config/.env.example b/infra/pragma-ingestor/config/.env.example deleted file mode 100644 index 86c3a176..00000000 --- a/infra/pragma-ingestor/config/.env.example +++ /dev/null @@ -1,6 +0,0 @@ -OFFCHAIN_DATABASE_URL="postgres://postgres:test-password@timescale-db:5432/pragma" -ONCHAIN_DATABASE_URL="postgres://postgres:test-password@postgre-db:5432/pragma" -BROKERS="pragma-kafka:29092" -TOPIC="pragma-data" -GROUP_ID="pragma-data" -OTEL_EXPORTER_OTLP_ENDPOINT=http://signoz.dev.pragma.build:4317 diff --git a/infra/pragma-node/Dockerfile b/infra/pragma-node/Dockerfile deleted file mode 100644 index 7175a02f..00000000 --- a/infra/pragma-node/Dockerfile +++ /dev/null @@ -1,31 +0,0 @@ -FROM rust:1.80 as builder - -WORKDIR /home/pragma - -# Copy the entire project -COPY . . - -# Build the project -RUN cargo build --release --bin pragma-node - -FROM debian:bookworm-slim - -# Install necessary packages including CA certificates -RUN apt-get clean && apt-get update && \ - apt-get install -y libpq-dev libssl-dev ca-certificates bash && \ - rm -rf /var/lib/apt/lists/* && \ - update-ca-certificates - -RUN groupadd pragma -RUN useradd -G pragma node - -USER node:pragma - -WORKDIR /home/pragma-node - -# Copy the built binary from the builder stage -COPY --from=builder --chown=node:pragma /home/pragma/target/release/pragma-node /usr/local/bin/pragma-node - -EXPOSE 3000 - -CMD ["pragma-node"] \ No newline at end of file diff --git a/infra/pragma-node/Dockerfile.bk b/infra/pragma-node/Dockerfile.bk deleted file mode 100644 index bad0fb8c..00000000 --- a/infra/pragma-node/Dockerfile.bk +++ /dev/null @@ -1,82 +0,0 @@ -# syntax=docker/dockerfile:1 - -# Comments are provided throughout this file to help you get started. -# If you need more help, visit the Dockerfile reference guide at -# https://docs.docker.com/engine/reference/builder/ - -################################################################################ -# Create a stage for building the application. - -ARG RUST_VERSION=1.75.0 -ARG APP_NAME=pragma-node -FROM rust:${RUST_VERSION}-slim-bullseye AS build -ARG APP_NAME -WORKDIR /app - - - -RUN apt update -RUN apt install -y libpq-dev - - - -# Build the application. -# Leverage a cache mount to /usr/local/cargo/registry/ -# for downloaded dependencies and a cache mount to /app/target/ for -# compiled dependencies which will speed up subsequent builds. -# Leverage a bind mount to the src directory to avoid having to copy the -# source code into the container. Once built, copy the executable to an -# output directory before the cache mounted /app/target is unmounted. -RUN --mount=type=bind,source=src,target=src \ - --mount=type=bind,source=Cargo.toml,target=Cargo.toml \ - --mount=type=bind,source=Cargo.lock,target=Cargo.lock \ - --mount=type=bind,source=migrations,target=migrations \ - --mount=type=cache,target=/app/target/ \ - --mount=type=cache,target=/usr/local/cargo/registry/ \ - < imagedefinitions.json -artifacts: - files: - - imagedefinitions.json - - infra/pragma-node/config.yml - discard-paths: yes diff --git a/infra/pragma-node/config.yml b/infra/pragma-node/config.yml deleted file mode 100644 index 98b710d4..00000000 --- a/infra/pragma-node/config.yml +++ /dev/null @@ -1,17 +0,0 @@ -path: "/" -container_port: 8080 -health_check_path: "/node" -container_environment: - - region: "eu-west-3" - - prefix: "/conf/{{ SERVICE_NAME }}/{{ RUN_ENV }}" - - keys: - - OFFCHAIN_DATABASE_URL - - ONCHAIN_DATABASE_URL - - DATABASE_MAX_CONN - - TOPIC - - HOST - - PORT - - METRICS_PORT - - KAFKA_BROKERS - - REDIS_HOST - - REDIS_PORT diff --git a/infra/pragma-node/config/.env.example b/infra/pragma-node/config/.env.example deleted file mode 100644 index 559badbd..00000000 --- a/infra/pragma-node/config/.env.example +++ /dev/null @@ -1,11 +0,0 @@ -OFFCHAIN_DATABASE_URL="postgres://postgres:test-password@timescale-db:5432/pragma" -ONCHAIN_DATABASE_URL="postgres://postgres:test-password@postgre-db:5432/pragma" -DATABASE_MAX_CONN=5 -TOPIC="pragma-data" -HOST="0.0.0.0" -PORT=3000 -METRICS_PORT=8080 -KAFKA_BROKERS="pragma-kafka:9092" -REDIS_HOST="0.0.0.0" -REDIS_PORT=6379 -OTEL_EXPORTER_OTLP_ENDPOINT=http://signoz.dev.pragma.build:4317 diff --git a/infra/pragma-node/postgres_migrations/03-create-publishers-index.sql b/infra/pragma-node/postgres_migrations/03-create-publishers-index.sql deleted file mode 100644 index 82012a52..00000000 --- a/infra/pragma-node/postgres_migrations/03-create-publishers-index.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE INDEX idx_publisher_name ON publishers (name); \ No newline at end of file diff --git a/infra/pragma-node/postgres_migrations/04-create-timescale-hypertables.sql b/infra/pragma-node/postgres_migrations/04-create-timescale-hypertables.sql deleted file mode 100644 index 858a9505..00000000 --- a/infra/pragma-node/postgres_migrations/04-create-timescale-hypertables.sql +++ /dev/null @@ -1,8 +0,0 @@ -SELECT create_hypertable('mainnet_spot_entry', 'timestamp'); -SELECT create_hypertable('spot_entry', 'timestamp'); -SELECT create_hypertable('mainnet_future_entry', 'timestamp'); -SELECT create_hypertable('future_entry', 'timestamp'); -SELECT create_hypertable('mainnet_spot_checkpoints', 'timestamp'); -SELECT create_hypertable('spot_checkpoints', 'timestamp'); -SELECT create_hypertable('vrf_requests', 'updated_at'); -SELECT create_hypertable('oo_requests', 'updated_at'); diff --git a/infra/pragma-node/postgres_migrations/05-create-timescale-median-aggregates-spot.sql b/infra/pragma-node/postgres_migrations/05-create-timescale-median-aggregates-spot.sql deleted file mode 100644 index e7866207..00000000 --- a/infra/pragma-node/postgres_migrations/05-create-timescale-median-aggregates-spot.sql +++ /dev/null @@ -1,84 +0,0 @@ --- 10 seconds aggregation -CREATE MATERIALIZED VIEW spot_price_10_s_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('10 seconds'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_price_10_s_agg', - start_offset => INTERVAL '1 day', - end_offset => INTERVAL '10 seconds', - schedule_interval => INTERVAL '10 seconds'); - --- 1 minute aggregation -CREATE MATERIALIZED VIEW spot_price_1_min_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 min'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_price_1_min_agg', - start_offset => NULL, - end_offset => INTERVAL '1 min', - schedule_interval => INTERVAL '1 min'); - --- 15 minutes aggregation -CREATE MATERIALIZED VIEW spot_price_15_min_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('15 min'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_price_15_min_agg', - start_offset => NULL, - end_offset => INTERVAL '15 min', - schedule_interval => INTERVAL '15 min'); - --- 1 hour aggregation -CREATE MATERIALIZED VIEW spot_price_1_hour_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 hour'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_price_1_hour_agg', - start_offset => NULL, - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); - --- 2 hours aggregation -CREATE MATERIALIZED VIEW spot_price_2_hour_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('2 hour'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_price_2_hour_agg', - start_offset => NULL, - end_offset => INTERVAL '2 hour', - schedule_interval => INTERVAL '2 hour'); diff --git a/infra/pragma-node/postgres_migrations/06-create-timescale-median-aggregates-future.sql b/infra/pragma-node/postgres_migrations/06-create-timescale-median-aggregates-future.sql deleted file mode 100644 index 074f817c..00000000 --- a/infra/pragma-node/postgres_migrations/06-create-timescale-median-aggregates-future.sql +++ /dev/null @@ -1,84 +0,0 @@ --- 10 seconds aggregation -CREATE MATERIALIZED VIEW future_price_10_s_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('10 seconds'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_price_10_s_agg', - start_offset => INTERVAL '1 day', - end_offset => INTERVAL '10 seconds', - schedule_interval => INTERVAL '10 seconds'); - --- 1 minute aggregation -CREATE MATERIALIZED VIEW future_price_1_min_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 min'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_price_1_min_agg', - start_offset => NULL, - end_offset => INTERVAL '1 min', - schedule_interval => INTERVAL '1 min'); - --- 15 minutes aggregation -CREATE MATERIALIZED VIEW future_price_15_min_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('15 min'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_price_15_min_agg', - start_offset => NULL, - end_offset => INTERVAL '15 min', - schedule_interval => INTERVAL '15 min'); - --- 1 hour aggregation -CREATE MATERIALIZED VIEW future_price_1_hour_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 hour'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_price_1_hour_agg', - start_offset => NULL, - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); - --- 2 hours aggregation -CREATE MATERIALIZED VIEW future_price_2_hour_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('2 hour'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_price_2_hour_agg', - start_offset => NULL, - end_offset => INTERVAL '2 hour', - schedule_interval => INTERVAL '2 hour'); diff --git a/infra/pragma-node/postgres_migrations/07-create-timescale-median-aggregates-mainnet-spot.sql b/infra/pragma-node/postgres_migrations/07-create-timescale-median-aggregates-mainnet-spot.sql deleted file mode 100644 index 2a071561..00000000 --- a/infra/pragma-node/postgres_migrations/07-create-timescale-median-aggregates-mainnet-spot.sql +++ /dev/null @@ -1,84 +0,0 @@ --- 10 seconds aggregation -CREATE MATERIALIZED VIEW mainnet_spot_price_10_s_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('10 seconds'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_price_10_s_agg', - start_offset => INTERVAL '1 day', - end_offset => INTERVAL '10 seconds', - schedule_interval => INTERVAL '10 seconds'); - --- 1 minute aggregation -CREATE MATERIALIZED VIEW mainnet_spot_price_1_min_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 min'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_price_1_min_agg', - start_offset => NULL, - end_offset => INTERVAL '1 min', - schedule_interval => INTERVAL '1 min'); - --- 15 minutes aggregation -CREATE MATERIALIZED VIEW mainnet_spot_price_15_min_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('15 min'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_price_15_min_agg', - start_offset => NULL, - end_offset => INTERVAL '15 min', - schedule_interval => INTERVAL '15 min'); - --- 1 hour aggregation -CREATE MATERIALIZED VIEW mainnet_spot_price_1_hour_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 hour'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_price_1_hour_agg', - start_offset => NULL, - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); - --- 2 hours aggregation -CREATE MATERIALIZED VIEW mainnet_spot_price_2_hour_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('2 hour'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_price_2_hour_agg', - start_offset => NULL, - end_offset => INTERVAL '2 hour', - schedule_interval => INTERVAL '2 hour'); diff --git a/infra/pragma-node/postgres_migrations/08-create-timescale-median-aggregates-mainnet-future.sql b/infra/pragma-node/postgres_migrations/08-create-timescale-median-aggregates-mainnet-future.sql deleted file mode 100644 index 41d651ee..00000000 --- a/infra/pragma-node/postgres_migrations/08-create-timescale-median-aggregates-mainnet-future.sql +++ /dev/null @@ -1,82 +0,0 @@ -CREATE MATERIALIZED VIEW mainnet_future_price_10_s_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('10 seconds'::interval, timestamp) as bucket, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_price_10_s_agg', - start_offset => INTERVAL '1 day', - end_offset => INTERVAL '10 seconds', - schedule_interval => INTERVAL '10 seconds'); - -CREATE MATERIALIZED VIEW mainnet_future_price_1_min_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 min'::interval, timestamp) as bucket, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_price_1_min_agg', - start_offset => INTERVAL '1 day', - end_offset => INTERVAL '1 min', - schedule_interval => INTERVAL '1 min'); - - -CREATE MATERIALIZED VIEW mainnet_future_price_15_min_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('15 min'::interval, timestamp) as bucket, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_price_15_min_agg', - start_offset => NULL, - end_offset => INTERVAL '15 min', - schedule_interval => INTERVAL '15 min'); - - -CREATE MATERIALIZED VIEW mainnet_future_price_1_hour_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 hour'::interval, timestamp) as bucket, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_price_1_hour_agg', - start_offset => NULL, - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); - - -CREATE MATERIALIZED VIEW mainnet_future_price_2_hour_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('2 hour'::interval, timestamp) as bucket, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_price_2_hour_agg', - start_offset => NULL, - end_offset => INTERVAL '2 hour', - schedule_interval => INTERVAL '2 hour'); diff --git a/infra/pragma-node/postgres_migrations/09-create-timescale-ohlc-aggregates-spot.sql b/infra/pragma-node/postgres_migrations/09-create-timescale-ohlc-aggregates-spot.sql deleted file mode 100644 index def88b40..00000000 --- a/infra/pragma-node/postgres_migrations/09-create-timescale-ohlc-aggregates-spot.sql +++ /dev/null @@ -1,98 +0,0 @@ --- 2 hours candle -CREATE MATERIALIZED VIEW spot_2_hours_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('2 hours', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_2_hours_candle', - start_offset => INTERVAL '6 hours', - end_offset => INTERVAL '2 hours', - schedule_interval => INTERVAL '2 hours'); - - --- 1 hour candle -CREATE MATERIALIZED VIEW spot_1_hour_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 hour', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_1_hour_candle', - start_offset => INTERVAL '3 hours', - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); - - --- 15 minute candle -CREATE MATERIALIZED VIEW spot_15_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('15 minutes', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket)::numeric AS "open", - MAX(median_price)::numeric AS high, - MIN(median_price)::numeric AS low, - LAST(median_price, bucket)::numeric AS "close" - FROM spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_15_min_candle', - start_offset => INTERVAL '45 minutes', - end_offset => INTERVAL '15 minutes', - schedule_interval => INTERVAL '15 minutes'); - - --- 5 minute candle -CREATE MATERIALIZED VIEW spot_5_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('5 minutes', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_5_min_candle', - start_offset => INTERVAL '15 minutes', - end_offset => INTERVAL '5 minutes', - schedule_interval => INTERVAL '5 minutes'); - - --- 1 minute candle -CREATE MATERIALIZED VIEW spot_1_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 minute', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_1_min_candle', - start_offset => INTERVAL '3 minutes', - end_offset => INTERVAL '1 minute', - schedule_interval => INTERVAL '1 minute'); diff --git a/infra/pragma-node/postgres_migrations/10-create-timescale-ohlc-aggregates-future.sql b/infra/pragma-node/postgres_migrations/10-create-timescale-ohlc-aggregates-future.sql deleted file mode 100644 index 9f779b50..00000000 --- a/infra/pragma-node/postgres_migrations/10-create-timescale-ohlc-aggregates-future.sql +++ /dev/null @@ -1,94 +0,0 @@ --- 2 hours candle -CREATE MATERIALIZED VIEW future_2_hours_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('2 hours', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_2_hours_candle', - start_offset => INTERVAL '6 hours', - end_offset => INTERVAL '2 hours', - schedule_interval => INTERVAL '2 hours'); - --- 1 hour candle -CREATE MATERIALIZED VIEW future_1_hour_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 hour', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_1_hour_candle', - start_offset => INTERVAL '3 hours', - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); - --- 15 minute candle -CREATE MATERIALIZED VIEW future_15_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('15 minutes', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket)::numeric AS "open", - MAX(median_price)::numeric AS high, - MIN(median_price)::numeric AS low, - LAST(median_price, bucket)::numeric AS "close" - FROM future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_15_min_candle', - start_offset => INTERVAL '45 minutes', - end_offset => INTERVAL '15 minutes', - schedule_interval => INTERVAL '15 minutes'); - --- 5 minute candle -CREATE MATERIALIZED VIEW future_5_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('5 minutes', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_5_min_candle', - start_offset => INTERVAL '15 minutes', - end_offset => INTERVAL '5 minutes', - schedule_interval => INTERVAL '5 minutes'); - --- 1 minute candle -CREATE MATERIALIZED VIEW future_1_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 minute', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_1_min_candle', - start_offset => INTERVAL '3 minutes', - end_offset => INTERVAL '1 minute', - schedule_interval => INTERVAL '1 minute'); \ No newline at end of file diff --git a/infra/pragma-node/postgres_migrations/11-create-timescale-ohlc-aggregates-mainnet-spot.sql b/infra/pragma-node/postgres_migrations/11-create-timescale-ohlc-aggregates-mainnet-spot.sql deleted file mode 100644 index fd2775bb..00000000 --- a/infra/pragma-node/postgres_migrations/11-create-timescale-ohlc-aggregates-mainnet-spot.sql +++ /dev/null @@ -1,94 +0,0 @@ --- 2 hours candle -CREATE MATERIALIZED VIEW mainnet_spot_2_hours_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('2 hours', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM mainnet_spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_2_hours_candle', - start_offset => INTERVAL '6 hours', - end_offset => INTERVAL '2 hours', - schedule_interval => INTERVAL '2 hours'); - --- 1 hour candle -CREATE MATERIALIZED VIEW mainnet_spot_1_hour_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 hour', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM mainnet_spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_1_hour_candle', - start_offset => INTERVAL '3 hours', - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); - --- 15 minute candle -CREATE MATERIALIZED VIEW mainnet_spot_15_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('15 minutes', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket)::numeric AS "open", - MAX(median_price)::numeric AS high, - MIN(median_price)::numeric AS low, - LAST(median_price, bucket)::numeric AS "close" - FROM mainnet_spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_15_min_candle', - start_offset => INTERVAL '45 minutes', - end_offset => INTERVAL '15 minutes', - schedule_interval => INTERVAL '15 minutes'); - --- 5 minute candle -CREATE MATERIALIZED VIEW mainnet_spot_5_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('5 minutes', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM mainnet_spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_5_min_candle', - start_offset => INTERVAL '15 minutes', - end_offset => INTERVAL '5 minutes', - schedule_interval => INTERVAL '5 minutes'); - --- 1 minute candle -CREATE MATERIALIZED VIEW mainnet_spot_1_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 minute', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM mainnet_spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_1_min_candle', - start_offset => INTERVAL '3 minutes', - end_offset => INTERVAL '1 minute', - schedule_interval => INTERVAL '1 minute'); \ No newline at end of file diff --git a/infra/pragma-node/postgres_migrations/12-create-timescale-ohlc-aggregates-mainnet-future.sql b/infra/pragma-node/postgres_migrations/12-create-timescale-ohlc-aggregates-mainnet-future.sql deleted file mode 100644 index 6c971461..00000000 --- a/infra/pragma-node/postgres_migrations/12-create-timescale-ohlc-aggregates-mainnet-future.sql +++ /dev/null @@ -1,94 +0,0 @@ --- 2 hours candle -CREATE MATERIALIZED VIEW mainnet_future_2_hours_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('2 hours', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM mainnet_future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_2_hours_candle', - start_offset => INTERVAL '6 hours', - end_offset => INTERVAL '2 hours', - schedule_interval => INTERVAL '2 hours'); - --- 1 hour candle -CREATE MATERIALIZED VIEW mainnet_future_1_hour_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 hour',bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM mainnet_future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_1_hour_candle', - start_offset => INTERVAL '3 hours', - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); - --- 15 minute candle -CREATE MATERIALIZED VIEW mainnet_future_15_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('15 minutes',bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket)::numeric AS "open", - MAX(median_price)::numeric AS high, - MIN(median_price)::numeric AS low, - LAST(median_price, bucket)::numeric AS "close" - FROM mainnet_future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_15_min_candle', - start_offset => INTERVAL '45 minutes', - end_offset => INTERVAL '15 minutes', - schedule_interval => INTERVAL '15 minutes'); - --- 5 minute candle -CREATE MATERIALIZED VIEW mainnet_future_5_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('5 minutes',bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM mainnet_future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_5_min_candle', - start_offset => INTERVAL '15 minutes', - end_offset => INTERVAL '5 minutes', - schedule_interval => INTERVAL '5 minutes'); - --- 1 minute candle -CREATE MATERIALIZED VIEW mainnet_future_1_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 minute',bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM mainnet_future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_1_min_candle', - start_offset => INTERVAL '3 minutes', - end_offset => INTERVAL '1 minute', - schedule_interval => INTERVAL '1 minute'); \ No newline at end of file diff --git a/infra/pragma-node/postgres_migrations/13-add-weekly-and-daily-median-aggregates.sql b/infra/pragma-node/postgres_migrations/13-add-weekly-and-daily-median-aggregates.sql deleted file mode 100644 index 62061990..00000000 --- a/infra/pragma-node/postgres_migrations/13-add-weekly-and-daily-median-aggregates.sql +++ /dev/null @@ -1,131 +0,0 @@ ---testnet spot -CREATE MATERIALIZED VIEW spot_price_1_day_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 day'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_price_1_day_agg', - start_offset => NULL, - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - -CREATE MATERIALIZED VIEW spot_price_1_week_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 week'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_price_1_week_agg', - start_offset => NULL, - end_offset => INTERVAL '1 week', - schedule_interval => INTERVAL '1 week'); - ---testnet future -CREATE MATERIALIZED VIEW future_price_1_day_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 day'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_price_1_day_agg', - start_offset => NULL, - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - -CREATE MATERIALIZED VIEW future_price_1_week_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 week'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_price_1_week_agg', - start_offset => NULL, - end_offset => INTERVAL '1 week', - schedule_interval => INTERVAL '1 week'); - ---mainnet spot -CREATE MATERIALIZED VIEW mainnet_spot_price_1_day_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 day'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_price_1_day_agg', - start_offset => NULL, - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - -CREATE MATERIALIZED VIEW mainnet_spot_price_1_week_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 week'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_spot_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_price_1_week_agg', - start_offset => NULL, - end_offset => INTERVAL '1 week', - schedule_interval => INTERVAL '1 week'); - ---mainnet future -CREATE MATERIALIZED VIEW mainnet_future_price_1_day_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 day'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_price_1_day_agg', - start_offset => NULL, - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - -CREATE MATERIALIZED VIEW mainnet_future_price_1_week_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 week'::interval, timestamp) as bucket, - percentile_disc(0.5) WITHIN GROUP (ORDER BY price)::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM mainnet_future_entry -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_price_1_week_agg', - start_offset => NULL, - end_offset => INTERVAL '1 week', - schedule_interval => INTERVAL '1 week'); diff --git a/infra/pragma-node/postgres_migrations/14-add-weekly-and-daily-ohlc-aggregates.sql b/infra/pragma-node/postgres_migrations/14-add-weekly-and-daily-ohlc-aggregates.sql deleted file mode 100644 index e635f15d..00000000 --- a/infra/pragma-node/postgres_migrations/14-add-weekly-and-daily-ohlc-aggregates.sql +++ /dev/null @@ -1,154 +0,0 @@ --- testnet spot -CREATE MATERIALIZED VIEW spot_1_week_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 week', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_1_week_candle', - start_offset => INTERVAL '3 week', - end_offset => INTERVAL '1 week', - schedule_interval => INTERVAL '1 week'); - -CREATE MATERIALIZED VIEW spot_1_day_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 day', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('spot_1_day_candle', - start_offset => INTERVAL '3 day', - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - ---testnet future -CREATE MATERIALIZED VIEW future_1_week_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 week', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_1_week_candle', - start_offset => INTERVAL '3 week', - end_offset => INTERVAL '1 week', - schedule_interval => INTERVAL '1 week'); - - -CREATE MATERIALIZED VIEW future_1_day_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 day', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('future_1_day_candle', - start_offset => INTERVAL '3 day', - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - - --- mainnet spot -CREATE MATERIALIZED VIEW mainnet_spot_1_week_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 week', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM mainnet_spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_1_week_candle', - start_offset => INTERVAL '3 week', - end_offset => INTERVAL '1 week', - schedule_interval => INTERVAL '1 week'); - - -CREATE MATERIALIZED VIEW mainnet_spot_1_day_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 day', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM mainnet_spot_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_spot_1_day_candle', - start_offset => INTERVAL '3 day', - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - - --- mainnet future --- 1 week candle -CREATE MATERIALIZED VIEW mainnet_future_1_week_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 week', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM mainnet_future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_1_week_candle', - start_offset => INTERVAL '3 week', - end_offset => INTERVAL '1 week', - schedule_interval => INTERVAL '1 week'); - --- 1 day candle -CREATE MATERIALIZED VIEW mainnet_future_1_day_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 day', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM mainnet_future_price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('mainnet_future_1_day_candle', - start_offset => INTERVAL '3 day', - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - diff --git a/openapi.json b/openapi.json index 585dc3fa..2d50182a 100644 --- a/openapi.json +++ b/openapi.json @@ -1,2962 +1 @@ -{ - "openapi": "3.0.3", - "info": { - "title": "pragma-node", - "description": "", - "license": { - "name": "" - }, - "version": "0.1.0" - }, - "servers": [ - { - "url": "https://{environment}.pragma.build", - "variables": { - "environment": { - "default": "api.dev", - "enum": ["api.dev", "api.prod"] - } - } - } - ], - "paths": { - "/node/v1/aggregation/candlestick/{base}/{quote}": { - "get": { - "tags": ["crate::handlers::get_ohlc"], - "operationId": "get_ohlc", - "parameters": [ - { - "name": "base", - "in": "path", - "description": "Base Asset", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "quote", - "in": "path", - "description": "Quote Asset", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "timestamp", - "in": "query", - "description": "The unix timestamp in seconds. This endpoint will return the first update whose\ntimestamp is <= the provided value.", - "required": false, - "schema": { - "allOf": [ - { - "type": "integer" - } - ], - "nullable": true - } - }, - { - "name": "interval", - "in": "query", - "required": false, - "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/Interval" - } - ], - "nullable": true - } - }, - { - "name": "routing", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "nullable": true - } - }, - { - "name": "aggregation", - "in": "query", - "required": false, - "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/AggregationMode" - } - ], - "nullable": true - } - }, - { - "name": "entry_type", - "in": "query", - "required": false, - "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/EntryType" - } - ], - "nullable": true - } - }, - { - "name": "expiry", - "in": "query", - "required": false, - "schema": { - "type": "string", - "nullable": true - } - } - ], - "responses": { - "200": { - "description": "Get OHLC data successfuly", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/GetOHLCResponse" - } - } - } - } - } - } - } - }, - "/node/v1/data/publish": { - "post": { - "tags": ["crate::handlers::create_entry"], - "operationId": "create_entries", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateEntryRequest" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Entries published successfuly", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateEntryResponse" - } - } - } - }, - "401": { - "description": "Unauthorized Publisher", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/EntryError" - } - } - } - } - } - } - }, - "/node/v1/data/publish_future": { - "post": { - "tags": ["crate::handlers::create_future_entry"], - "operationId": "create_future_entries", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateFutureEntryRequest" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Entries published successfuly", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateFutureEntryResponse" - } - } - } - }, - "401": { - "description": "Unauthorized Publisher", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/EntryError" - } - } - } - } - } - } - }, - "/node/v1/data/subscribe": { - "get": { - "tags": ["crate::handlers::subscribe_to_entry"], - "operationId": "subscribe_to_entry", - "responses": { - "200": { - "description": "Subscribe to a list of entries", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SubscribeToEntryResponse" - } - } - } - } - } - } - } - }, - "/node/v1/data/{base}/{quote}": { - "get": { - "tags": ["crate::handlers::get_entry"], - "operationId": "get_entry", - "parameters": [ - { - "name": "base", - "in": "path", - "description": "Base Asset", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "quote", - "in": "path", - "description": "Quote Asset", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "timestamp", - "in": "query", - "description": "The unix timestamp in seconds. This endpoint will return the first update whose\ntimestamp is <= the provided value.", - "required": false, - "schema": { - "allOf": [ - { - "type": "integer" - } - ], - "nullable": true - } - }, - { - "name": "interval", - "in": "query", - "required": false, - "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/Interval" - } - ], - "nullable": true - } - }, - { - "name": "routing", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "nullable": true - } - }, - { - "name": "aggregation", - "in": "query", - "required": false, - "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/AggregationMode" - } - ], - "nullable": true - } - }, - { - "name": "entry_type", - "in": "query", - "required": false, - "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/EntryType" - } - ], - "nullable": true - } - }, - { - "name": "expiry", - "in": "query", - "required": false, - "schema": { - "type": "string", - "nullable": true - } - } - ], - "responses": { - "200": { - "description": "Get median entry successfuly", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/GetEntryResponse" - } - } - } - } - } - } - } - }, - "/node/v1/data/{base}/{quote}/future_expiries": { - "get": { - "tags": ["crate::handlers::get_expiries"], - "operationId": "get_expiries", - "parameters": [ - { - "name": "base", - "in": "path", - "description": "Base Asset", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "quote", - "in": "path", - "description": "Quote Asset", - "required": true, - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Get available future expiries for a pair", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/GetEntryResponse" - } - } - } - } - } - } - } - }, - "/node/v1/merkle_feeds/options/{instrument}": { - "get": { - "tags": ["crate::handlers::merkle_feeds::get_option"], - "operationId": "get_merkle_feeds_option", - "parameters": [ - { - "name": "instrument", - "in": "path", - "description": "Name of the instrument", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "network", - "in": "query", - "required": false, - "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/Network" - } - ], - "nullable": true - } - }, - { - "name": "block_id", - "in": "query", - "required": false, - "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/BlockId" - } - ], - "nullable": true - } - } - ], - "responses": { - "200": { - "description": "Get the option", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/GetOptionResponse" - } - } - } - } - } - } - } - }, - "/node/v1/merkle_feeds/proof/{option_hash}": { - "get": { - "tags": ["crate::handlers::merkle_feeds::get_merkle_proof"], - "operationId": "get_merkle_feeds_proof", - "parameters": [ - { - "name": "option_hash", - "in": "path", - "description": "Hexadecimal hash of the option", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "network", - "in": "query", - "required": false, - "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/Network" - } - ], - "nullable": true - } - }, - { - "name": "block_id", - "in": "query", - "required": false, - "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/BlockId" - } - ], - "nullable": true - } - } - ], - "responses": { - "200": { - "description": "Get the merkle proof", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/GetMerkleProofResponse" - } - } - } - } - } - } - } - }, - "/node/v1/onchain/checkpoints/{base}/{quote}": { - "get": { - "tags": ["crate::handlers::onchain::get_checkpoints"], - "operationId": "get_onchain_checkpoints", - "parameters": [ - { - "name": "base", - "in": "path", - "description": "Base Asset", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "quote", - "in": "path", - "description": "Quote Asset", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "network", - "in": "query", - "required": true, - "schema": { - "$ref": "#/components/schemas/Network" - } - }, - { - "name": "limit", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "nullable": true, - "minimum": 0 - } - } - ], - "responses": { - "200": { - "description": "Get the onchain checkpoints for a pair", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GetOnchainCheckpointsResponse" - } - } - } - } - } - } - }, - "/node/v1/onchain/history/{base}/{quote}": { - "get": { - "tags": ["crate::handlers::onchain::get_history"], - "operationId": "get_onchain_history", - "parameters": [ - { - "name": "base", - "in": "path", - "description": "Base Asset", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "quote", - "in": "path", - "description": "Quote Asset", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "network", - "in": "query", - "required": true, - "schema": { - "$ref": "#/components/schemas/Network" - } - }, - { - "name": "timestamp", - "in": "query", - "required": true, - "schema": { - "$ref": "#/components/schemas/TimestampRange" - } - }, - { - "name": "chunk_interval", - "in": "query", - "required": false, - "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/Interval" - } - ], - "nullable": true - } - }, - { - "name": "routing", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "nullable": true - } - } - ], - "responses": { - "200": { - "description": "Get the historical onchain median price", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GetOnchainHistoryResponse" - } - } - } - } - } - } - }, - "/node/v1/onchain/ohlc/subscribe": { - "get": { - "tags": ["crate::handlers::onchain::subscribe_to_ohlc"], - "operationId": "subscribe_to_onchain_ohlc", - "responses": { - "200": { - "description": "Subscribe to a list of OHLC entries", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SubscribeToEntryResponse" - } - } - } - } - } - } - } - }, - "/node/v1/onchain/publishers": { - "get": { - "tags": ["crate::handlers::onchain::get_publishers"], - "operationId": "get_onchain_publishers", - "parameters": [ - { - "name": "network", - "in": "query", - "required": true, - "schema": { - "$ref": "#/components/schemas/Network" - } - }, - { - "name": "data_type", - "in": "query", - "required": true, - "schema": { - "$ref": "#/components/schemas/DataType" - } - } - ], - "responses": { - "200": { - "description": "Get the onchain publishers", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GetOnchainPublishersResponse" - } - } - } - } - } - } - }, - "/node/v1/onchain/{base}/{quote}": { - "get": { - "tags": ["crate::handlers::onchain::get_entry"], - "operationId": "get_onchain_entry", - "parameters": [ - { - "name": "base", - "in": "path", - "description": "Base Asset", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "quote", - "in": "path", - "description": "Quote Asset", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "network", - "in": "query", - "required": true, - "schema": { - "$ref": "#/components/schemas/Network" - } - }, - { - "name": "aggregation", - "in": "query", - "required": false, - "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/AggregationMode" - } - ], - "nullable": true - } - }, - { - "name": "routing", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "nullable": true - } - }, - { - "name": "timestamp", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "nullable": true - } - }, - { - "name": "components", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "nullable": true - } - }, - { - "name": "variations", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "nullable": true - } - } - ], - "responses": { - "200": { - "description": "Get the onchain entry", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GetOnchainEntryResponse" - } - } - } - } - } - } - }, - "/node/v1/volatility/{quote}/{base}": { - "get": { - "tags": ["crate::handlers::get_volatility"], - "operationId": "get_volatility", - "parameters": [ - { - "name": "quote", - "in": "path", - "description": "Quote Asset", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "base", - "in": "path", - "description": "Base Asset", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "start", - "in": "query", - "description": "Initial timestamp, combined with final_timestamp, it helps define the period over which the mean is computed", - "required": true, - "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - }, - { - "name": "end", - "in": "query", - "description": "Final timestamp", - "required": true, - "schema": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - ], - "responses": { - "200": { - "description": "Get realized volatility successfuly", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/GetVolatilityResponse" - } - } - } - } - } - } - } - }, - "node/v1/optimistic/assertions": { - "get": { - "tags": ["crate::handlers::optimistic_oracle::get_assertions"], - "operationId": "get_assertions", - "parameters": [ - { - "name": "status", - "in": "query", - "description": "Filter by assertion status", - "required": false, - "schema": { - "type": "string", - "nullable": true - } - }, - { - "name": "page", - "in": "query", - "description": "Page number for pagination", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "nullable": true, - "minimum": 0 - } - }, - { - "name": "limit", - "in": "query", - "description": "Number of items per page", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "nullable": true, - "minimum": 0 - } - } - ], - "responses": { - "200": { - "description": "Get assertions successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GetAssertionsResponse" - } - } - } - } - } - } - }, - "node/v1/optimistic/assertions/{assertion_id}": { - "get": { - "tags": ["crate::handlers::optimistic_oracle::get_assertion_details"], - "operationId": "get_assertion_details", - "parameters": [ - { - "name": "assertion_id", - "in": "path", - "description": "Unique identifier of the assertion", - "required": true, - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Get assertion details successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AssertionDetails" - } - } - } - } - } - } - }, - "node/v1/optimistic/disputed-assertions": { - "get": { - "tags": ["crate::handlers::optimistic_oracle::get_disputed_assertions"], - "operationId": "get_disputed_assertions", - "parameters": [ - { - "name": "page", - "in": "query", - "description": "Page number for pagination", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "nullable": true, - "minimum": 0 - } - }, - { - "name": "limit", - "in": "query", - "description": "Number of items per page", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "nullable": true, - "minimum": 0 - } - } - ], - "responses": { - "200": { - "description": "Get disputed assertions successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GetDisputedAssertionsResponse" - } - } - } - } - } - } - }, - "node/v1/optimistic/resolved-assertions": { - "get": { - "tags": ["crate::handlers::optimistic_oracle::get_resolved_assertions"], - "operationId": "get_resolved_assertions", - "parameters": [ - { - "name": "page", - "in": "query", - "description": "Page number for pagination", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "nullable": true, - "minimum": 0 - } - }, - { - "name": "limit", - "in": "query", - "description": "Number of items per page", - "required": false, - "schema": { - "type": "integer", - "format": "int32", - "nullable": true, - "minimum": 0 - } - } - ], - "responses": { - "200": { - "description": "Get resolved assertions successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GetResolvedAssertionsResponse" - } - } - } - } - } - } - } - }, - "components": { - "schemas": { - "AggregationMode": { - "type": "string", - "enum": ["median", "mean", "twap"] - }, - "Assertion": { - "type": "object", - "required": [ - "assertion_id", - "claim", - "bond", - "expiration_time", - "identifier", - "status", - "timestamp" - ], - "properties": { - "assertion_id": { - "type": "string" - }, - "bond": { - "type": "integer" - }, - "claim": { - "type": "string" - }, - "expiration_time": { - "type": "string", - "format": "date-time" - }, - "identifier": { - "type": "string" - }, - "status": { - "$ref": "#/components/schemas/Status" - }, - "timestamp": { - "type": "string", - "format": "date-time" - } - } - }, - "AssertionDetails": { - "type": "object", - "required": [ - "assertion", - "domain_id", - "asserter", - "disputer", - "disputed", - "callback_recipient", - "caller", - "settled", - "settlement_resolution" - ], - "properties": { - "asserter": { - "type": "string" - }, - "assertion": { - "$ref": "#/components/schemas/Assertion" - }, - "callback_recipient": { - "type": "string" - }, - "caller": { - "type": "string" - }, - "disputed": { - "type": "boolean" - }, - "disputer": { - "type": "string" - }, - "domain_id": { - "type": "string" - }, - "settled": { - "type": "boolean" - }, - "settlement_resolution": { - "$ref": "#/components/schemas/SettlementResolution" - } - } - }, - "AssetOraclePrice": { - "type": "object", - "required": [ - "global_asset_id", - "median_price", - "signature", - "signed_prices" - ], - "properties": { - "global_asset_id": { - "type": "string" - }, - "median_price": { - "type": "string" - }, - "signature": { - "type": "string" - }, - "signed_prices": { - "type": "array", - "items": { - "$ref": "#/components/schemas/SignedPublisherPrice" - } - } - } - }, - "BaseEntry": { - "type": "object", - "required": ["timestamp", "source", "publisher"], - "properties": { - "publisher": { - "type": "string" - }, - "source": { - "type": "string" - }, - "timestamp": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - }, - "BlockId": { - "oneOf": [ - { - "type": "object", - "required": ["Tag"], - "properties": { - "Tag": { - "$ref": "#/components/schemas/BlockTag" - } - } - }, - { - "type": "object", - "required": ["Number"], - "properties": { - "Number": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - } - ], - "description": "Block identifier in the form of hash, number or tag." - }, - "BlockTag": { - "type": "string", - "description": "Block tag.\n\nA tag specifying a dynamic reference to a block.", - "enum": ["Latest", "Pending"] - }, - "Checkpoint": { - "type": "object", - "required": ["tx_hash", "price", "timestamp", "sender_address"], - "properties": { - "price": { - "type": "string" - }, - "sender_address": { - "type": "string" - }, - "timestamp": { - "type": "integer", - "format": "int64", - "minimum": 0 - }, - "tx_hash": { - "type": "string" - } - } - }, - "CreateEntryRequest": { - "type": "object", - "required": ["signature", "entries"], - "properties": { - "entries": { - "type": "array", - "items": { - "$ref": "#/components/schemas/Entry" - } - }, - "signature": { - "type": "array", - "items": { - "type": "string" - } - } - } - }, - "CreateEntryResponse": { - "type": "object", - "required": ["number_entries_created"], - "properties": { - "number_entries_created": { - "type": "integer", - "minimum": 0 - } - } - }, - "CreateFutureEntryRequest": { - "type": "object", - "required": ["signature", "entries"], - "properties": { - "entries": { - "type": "array", - "items": { - "$ref": "#/components/schemas/FutureEntry" - } - }, - "signature": { - "type": "array", - "items": { - "type": "string" - } - } - } - }, - "CreateFutureEntryResponse": { - "type": "object", - "required": ["number_entries_created"], - "properties": { - "number_entries_created": { - "type": "integer", - "minimum": 0 - } - } - }, - "Currency": { - "type": "object", - "required": ["id", "name", "decimals", "is_abstract"], - "properties": { - "decimals": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "ethereum_address": { - "type": "string", - "nullable": true - }, - "id": { - "type": "string", - "format": "uuid" - }, - "is_abstract": { - "type": "boolean" - }, - "name": { - "type": "string" - } - } - }, - "CurrencyError": { - "oneOf": [ - { - "type": "string", - "enum": ["InternalServerError"] - }, - { - "type": "object", - "required": ["NotFound"], - "properties": { - "NotFound": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["InfraError"], - "properties": { - "InfraError": { - "$ref": "#/components/schemas/InfraError" - } - } - } - ] - }, - "DataType": { - "type": "string", - "enum": ["spot_entry", "perp_entry", "future_entry"] - }, - "DisputedAssertion": { - "type": "object", - "required": ["assertion", "disputer", "disputed_at", "disputed_tx"], - "properties": { - "assertion": { - "$ref": "#/components/schemas/Assertion" - }, - "disputed_at": { - "type": "string", - "format": "date-time" - }, - "disputed_tx": { - "type": "string" - }, - "disputer": { - "type": "string" - } - } - }, - "Entry": { - "type": "object", - "required": [ - "id", - "pair_id", - "publisher", - "source", - "timestamp", - "price" - ], - "properties": { - "id": { - "type": "string", - "format": "uuid" - }, - "pair_id": { - "type": "string" - }, - "price": { - "type": "integer", - "minimum": 0 - }, - "publisher": { - "type": "string" - }, - "publisher_signature": { - "type": "string", - "nullable": true - }, - "source": { - "type": "string" - }, - "timestamp": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - }, - "EntryError": { - "oneOf": [ - { - "type": "string", - "enum": ["InternalServerError"] - }, - { - "type": "string", - "enum": ["BadRequest"] - }, - { - "type": "object", - "required": ["NotFound"], - "properties": { - "NotFound": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["InfraError"], - "properties": { - "InfraError": { - "$ref": "#/components/schemas/InfraError" - } - } - }, - { - "type": "object", - "required": ["InvalidSignature"], - "properties": { - "InvalidSignature": { - "type": "string" - } - } - }, - { - "type": "string", - "enum": ["InvalidSigner"] - }, - { - "type": "object", - "required": ["Unauthorized"], - "properties": { - "Unauthorized": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["InvalidTimestamp"], - "properties": { - "InvalidTimestamp": { - "type": "string" - } - } - }, - { - "type": "string", - "enum": ["InvalidExpiry"] - }, - { - "type": "object", - "required": ["MissingData"], - "properties": { - "MissingData": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["PublisherError"], - "properties": { - "PublisherError": { - "$ref": "#/components/schemas/PublisherError" - } - } - }, - { - "type": "object", - "required": ["UnknownPairId"], - "properties": { - "UnknownPairId": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["VolatilityError"], - "properties": { - "VolatilityError": { - "$ref": "#/components/schemas/VolatilityError" - } - } - }, - { - "type": "object", - "required": ["PublishData"], - "properties": { - "PublishData": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["BuildPublish"], - "properties": { - "BuildPublish": { - "type": "string" - } - } - } - ] - }, - "EntryType": { - "type": "string", - "enum": ["spot", "perp", "future"] - }, - "FutureEntry": { - "type": "object", - "required": [ - "id", - "pair_id", - "publisher", - "source", - "timestamp", - "publisher_signature", - "price" - ], - "properties": { - "expiration_timestamp": { - "type": "integer", - "format": "int64", - "nullable": true, - "minimum": 0 - }, - "id": { - "type": "string", - "format": "uuid" - }, - "pair_id": { - "type": "string" - }, - "price": { - "type": "integer", - "minimum": 0 - }, - "publisher": { - "type": "string" - }, - "publisher_signature": { - "type": "string" - }, - "source": { - "type": "string" - }, - "timestamp": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - }, - "GetAssertionsParams": { - "type": "object", - "properties": { - "limit": { - "type": "integer", - "format": "int32", - "nullable": true, - "minimum": 0 - }, - "page": { - "type": "integer", - "format": "int32", - "nullable": true, - "minimum": 0 - }, - "status": { - "type": "string", - "nullable": true - } - } - }, - "GetAssertionsResponse": { - "type": "object", - "required": [ - "assertions", - "total_count", - "current_page", - "total_pages" - ], - "properties": { - "assertions": { - "type": "array", - "items": { - "$ref": "#/components/schemas/Assertion" - } - }, - "current_page": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "total_count": { - "type": "integer", - "format": "int64" - }, - "total_pages": { - "type": "integer", - "format": "int32", - "minimum": 0 - } - } - }, - "GetDisputedAssertionsParams": { - "type": "object", - "properties": { - "limit": { - "type": "integer", - "format": "int32", - "nullable": true, - "minimum": 0 - }, - "page": { - "type": "integer", - "format": "int32", - "nullable": true, - "minimum": 0 - } - } - }, - "GetDisputedAssertionsResponse": { - "type": "object", - "required": [ - "disputed_assertions", - "total_count", - "current_page", - "total_pages" - ], - "properties": { - "current_page": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "disputed_assertions": { - "type": "array", - "items": { - "$ref": "#/components/schemas/DisputedAssertion" - } - }, - "total_count": { - "type": "integer", - "minimum": 0 - }, - "total_pages": { - "type": "integer", - "format": "int32", - "minimum": 0 - } - } - }, - "GetEntryParams": { - "type": "object", - "required": ["timestamp"], - "properties": { - "aggregation": { - "allOf": [ - { - "$ref": "#/components/schemas/AggregationMode" - } - ], - "nullable": true - }, - "entry_type": { - "allOf": [ - { - "$ref": "#/components/schemas/EntryType" - } - ], - "nullable": true - }, - "expiry": { - "type": "string", - "nullable": true - }, - "interval": { - "allOf": [ - { - "$ref": "#/components/schemas/Interval" - } - ], - "nullable": true - }, - "routing": { - "type": "boolean", - "nullable": true - }, - "timestamp": { - "type": "integer", - "format": "int64", - "description": "The unix timestamp in seconds. This endpoint will return the first update whose\ntimestamp is <= the provided value." - } - } - }, - "GetEntryResponse": { - "type": "object", - "required": [ - "num_sources_aggregated", - "pair_id", - "price", - "timestamp", - "decimals" - ], - "properties": { - "decimals": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "num_sources_aggregated": { - "type": "integer", - "minimum": 0 - }, - "pair_id": { - "type": "string" - }, - "price": { - "type": "string" - }, - "timestamp": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - }, - "GetMerkleProofQuery": { - "type": "object", - "properties": { - "block_id": { - "allOf": [ - { - "$ref": "#/components/schemas/BlockId" - } - ], - "nullable": true - }, - "network": { - "allOf": [ - { - "$ref": "#/components/schemas/Network" - } - ], - "nullable": true - } - } - }, - "GetMerkleProofResponse": { - "$ref": "#/components/schemas/MerkleProof" - }, - "GetOHLCResponse": { - "type": "object", - "required": ["pair_id", "data"], - "properties": { - "data": { - "type": "array", - "items": { - "$ref": "#/components/schemas/OHLCEntry" - } - }, - "pair_id": { - "type": "string" - } - } - }, - "GetOnchainCheckpointsParams": { - "type": "object", - "required": ["network"], - "properties": { - "limit": { - "type": "integer", - "format": "int64", - "nullable": true, - "minimum": 0 - }, - "network": { - "$ref": "#/components/schemas/Network" - } - } - }, - "GetOnchainCheckpointsResponse": { - "type": "array", - "items": { - "$ref": "#/components/schemas/Checkpoint" - } - }, - "GetOnchainEntryParams": { - "type": "object", - "required": ["network"], - "properties": { - "aggregation": { - "allOf": [ - { - "$ref": "#/components/schemas/AggregationMode" - } - ], - "nullable": true - }, - "components": { - "type": "boolean", - "nullable": true - }, - "network": { - "$ref": "#/components/schemas/Network" - }, - "routing": { - "type": "boolean", - "nullable": true - }, - "timestamp": { - "type": "integer", - "format": "int64", - "nullable": true - }, - "variations": { - "type": "boolean", - "nullable": true - } - } - }, - "GetOnchainEntryResponse": { - "type": "object", - "required": [ - "pair_id", - "last_updated_timestamp", - "price", - "decimals", - "nb_sources_aggregated", - "asset_type" - ], - "properties": { - "asset_type": { - "type": "string" - }, - "components": { - "type": "array", - "items": { - "$ref": "#/components/schemas/OnchainEntry" - }, - "nullable": true - }, - "decimals": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "last_updated_timestamp": { - "type": "integer", - "format": "int64", - "minimum": 0 - }, - "nb_sources_aggregated": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "pair_id": { - "type": "string" - }, - "price": { - "type": "string" - }, - "variations": { - "type": "object", - "additionalProperties": { - "type": "number", - "format": "float" - }, - "nullable": true - } - } - }, - "GetOnchainHistoryEntry": { - "type": "object", - "required": [ - "pair_id", - "timestamp", - "median_price", - "decimals", - "nb_sources_aggregated" - ], - "properties": { - "decimals": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "median_price": { - "type": "string" - }, - "nb_sources_aggregated": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "pair_id": { - "type": "string" - }, - "timestamp": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - }, - "GetOnchainHistoryParams": { - "type": "object", - "required": ["network", "timestamp"], - "properties": { - "chunk_interval": { - "allOf": [ - { - "$ref": "#/components/schemas/Interval" - } - ], - "nullable": true - }, - "network": { - "$ref": "#/components/schemas/Network" - }, - "routing": { - "type": "boolean", - "nullable": true - }, - "timestamp": { - "$ref": "#/components/schemas/TimestampRange" - } - } - }, - "GetOnchainHistoryResponse": { - "type": "array", - "items": { - "$ref": "#/components/schemas/GetOnchainHistoryEntry" - } - }, - "GetOnchainOHLCResponse": { - "type": "object", - "required": ["pair_id", "data"], - "properties": { - "data": { - "type": "array", - "items": { - "$ref": "#/components/schemas/OHLCEntry" - } - }, - "pair_id": { - "type": "string" - } - } - }, - "GetOnchainPublishersParams": { - "type": "object", - "required": ["network", "data_type"], - "properties": { - "data_type": { - "$ref": "#/components/schemas/DataType" - }, - "network": { - "$ref": "#/components/schemas/Network" - } - } - }, - "GetOnchainPublishersResponse": { - "type": "array", - "items": { - "$ref": "#/components/schemas/Publisher" - } - }, - "GetOptionQuery": { - "type": "object", - "properties": { - "block_id": { - "allOf": [ - { - "$ref": "#/components/schemas/BlockId" - } - ], - "nullable": true - }, - "network": { - "allOf": [ - { - "$ref": "#/components/schemas/Network" - } - ], - "nullable": true - } - } - }, - "GetOptionResponse": { - "allOf": [ - { - "$ref": "#/components/schemas/OptionData" - }, - { - "type": "object", - "required": ["hash"], - "properties": { - "hash": { - "type": "string" - } - } - } - ] - }, - "GetResolvedAssertionsParams": { - "type": "object", - "properties": { - "limit": { - "type": "integer", - "format": "int32", - "nullable": true, - "minimum": 0 - }, - "page": { - "type": "integer", - "format": "int32", - "nullable": true, - "minimum": 0 - } - } - }, - "GetResolvedAssertionsResponse": { - "type": "object", - "required": [ - "resolved_assertions", - "total_count", - "current_page", - "total_pages" - ], - "properties": { - "current_page": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "resolved_assertions": { - "type": "array", - "items": { - "$ref": "#/components/schemas/ResolvedAssertion" - } - }, - "total_count": { - "type": "integer", - "minimum": 0 - }, - "total_pages": { - "type": "integer", - "format": "int32", - "minimum": 0 - } - } - }, - "GetVolatilityResponse": { - "type": "object", - "required": ["pair_id", "volatility", "decimals"], - "properties": { - "decimals": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "pair_id": { - "type": "string" - }, - "volatility": { - "type": "number", - "format": "double" - } - } - }, - "InfraError": { - "oneOf": [ - { - "type": "string", - "enum": ["InternalServerError"] - }, - { - "type": "string", - "enum": ["RoutingError"] - }, - { - "type": "string", - "enum": ["NotFound"] - }, - { - "type": "string", - "enum": ["DisputerNotSet"] - }, - { - "type": "string", - "enum": ["SettlerNotSet"] - }, - { - "type": "object", - "required": ["InvalidTimestamp"], - "properties": { - "InvalidTimestamp": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["NonZeroU32Conversion"], - "properties": { - "NonZeroU32Conversion": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["AxumError"], - "properties": { - "AxumError": { - "type": "string" - } - } - } - ] - }, - "Instrument": { - "type": "object", - "description": "An instrument.", - "required": [ - "base_currency", - "expiration_date", - "strike_price", - "option_type" - ], - "properties": { - "base_currency": { - "$ref": "#/components/schemas/OptionCurrency" - }, - "expiration_date": { - "type": "string", - "format": "date" - }, - "option_type": { - "$ref": "#/components/schemas/OptionType" - }, - "strike_price": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - }, - "Interval": { - "type": "string", - "enum": ["1min", "15min", "1h", "2h", "1d", "1w"] - }, - "MerkleFeedError": { - "oneOf": [ - { - "type": "string", - "enum": ["InternalServerError"] - }, - { - "type": "string", - "enum": ["RedisConnection"] - }, - { - "type": "object", - "required": ["OptionNotFound"], - "properties": { - "OptionNotFound": { - "type": "array", - "items": { - "type": "object" - }, - "description": "", - "maxItems": 2, - "minItems": 2 - } - } - }, - { - "type": "object", - "required": ["MerkleTreeNotFound"], - "properties": { - "MerkleTreeNotFound": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - }, - { - "type": "object", - "required": ["InvalidOptionHash"], - "properties": { - "InvalidOptionHash": { - "type": "string" - } - } - }, - { - "type": "string", - "enum": ["TreeDeserialization"] - }, - { - "type": "object", - "required": ["MerkleProof"], - "properties": { - "MerkleProof": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["NoBlocks"], - "properties": { - "NoBlocks": { - "type": "string" - } - } - } - ] - }, - "MerkleProof": { - "type": "array", - "items": { - "type": "string" - }, - "description": "The merkle proof but with hexadecimal hashes instead of Field elements." - }, - "Network": { - "type": "string", - "enum": ["sepolia", "mainnet"] - }, - "OHLCEntry": { - "type": "object", - "required": ["time", "open", "low", "high", "close"], - "properties": { - "close": { - "type": "integer", - "format": "int64", - "minimum": 0 - }, - "high": { - "type": "integer", - "format": "int64", - "minimum": 0 - }, - "low": { - "type": "integer", - "format": "int64", - "minimum": 0 - }, - "open": { - "type": "integer", - "format": "int64", - "minimum": 0 - }, - "time": { - "type": "string", - "format": "date-time" - } - } - }, - "OnchainEntry": { - "type": "object", - "required": ["publisher", "source", "price", "tx_hash", "timestamp"], - "properties": { - "price": { - "type": "string" - }, - "publisher": { - "type": "string" - }, - "source": { - "type": "string" - }, - "timestamp": { - "type": "integer", - "format": "int64", - "minimum": 0 - }, - "tx_hash": { - "type": "string" - } - } - }, - "OptimisticOracleError": { - "oneOf": [ - { - "type": "string", - "enum": ["InternalServerError"] - }, - { - "type": "string", - "enum": ["DatabaseConnection"] - }, - { - "type": "object", - "required": ["AssertionDetailsIssue"], - "properties": { - "AssertionDetailsIssue": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["DisputerNotSet"], - "properties": { - "DisputerNotSet": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["SettlerNotSet"], - "properties": { - "SettlerNotSet": { - "type": "string" - } - } - }, - { - "type": "string", - "enum": ["NoAssertionsFound"] - } - ] - }, - "OptionCurrency": { - "type": "string", - "description": "The available currencies supported.", - "enum": ["BTC", "ETH"] - }, - "OptionData": { - "type": "object", - "description": "An instrument option with its mark price for a certain timestamp.", - "required": [ - "instrument_name", - "base_currency", - "current_timestamp", - "mark_price" - ], - "properties": { - "base_currency": { - "$ref": "#/components/schemas/OptionCurrency" - }, - "current_timestamp": { - "type": "integer", - "format": "int64" - }, - "instrument_name": { - "type": "string" - }, - "mark_price": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - }, - "OptionType": { - "type": "string", - "description": "The possible types for an option.", - "enum": ["Put", "Call"] - }, - "PerpEntry": { - "type": "object", - "required": ["base", "pair_id", "price", "volume"], - "properties": { - "base": { - "$ref": "#/components/schemas/BaseEntry" - }, - "pair_id": { - "type": "string" - }, - "price": { - "type": "integer", - "minimum": 0 - }, - "volume": { - "type": "integer", - "minimum": 0 - } - } - }, - "Publisher": { - "type": "object", - "required": [ - "id", - "name", - "master_key", - "active_key", - "account_address", - "active" - ], - "properties": { - "account_address": { - "type": "string" - }, - "active": { - "type": "boolean" - }, - "active_key": { - "type": "string" - }, - "id": { - "type": "string", - "format": "uuid" - }, - "master_key": { - "type": "string" - }, - "name": { - "type": "string" - } - } - }, - "PublisherEntry": { - "type": "object", - "required": [ - "pair_id", - "last_updated_timestamp", - "price", - "source", - "decimals", - "daily_updates" - ], - "properties": { - "daily_updates": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "decimals": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "last_updated_timestamp": { - "type": "integer", - "format": "int64", - "minimum": 0 - }, - "pair_id": { - "type": "string" - }, - "price": { - "type": "string" - }, - "source": { - "type": "string" - } - } - }, - "PublisherError": { - "oneOf": [ - { - "type": "string", - "enum": ["InternalServerError"] - }, - { - "type": "object", - "required": ["InvalidKey"], - "properties": { - "InvalidKey": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["InvalidAddress"], - "properties": { - "InvalidAddress": { - "type": "string" - } - } - }, - { - "type": "object", - "required": ["InactivePublisher"], - "properties": { - "InactivePublisher": { - "type": "string" - } - } - }, - { - "type": "string", - "enum": ["NotFound"] - } - ] - }, - "RedisError": { - "oneOf": [ - { - "type": "string", - "enum": ["InternalServerError"] - }, - { - "type": "string", - "enum": ["Connection"] - }, - { - "type": "object", - "required": ["OptionNotFound"], - "properties": { - "OptionNotFound": { - "type": "array", - "items": { - "type": "object" - }, - "description": "", - "maxItems": 2, - "minItems": 2 - } - } - }, - { - "type": "object", - "required": ["MerkleTreeNotFound"], - "properties": { - "MerkleTreeNotFound": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - }, - { - "type": "object", - "required": ["InvalidOptionHash"], - "properties": { - "InvalidOptionHash": { - "type": "string" - } - } - }, - { - "type": "string", - "enum": ["TreeDeserialization"] - }, - { - "type": "object", - "required": ["NoBlocks"], - "properties": { - "NoBlocks": { - "type": "string" - } - } - } - ] - }, - "ResolvedAssertion": { - "type": "object", - "required": [ - "assertion", - "settled_address", - "settlement_resolution", - "settled_at", - "disputed", - "settlement_tx" - ], - "properties": { - "assertion": { - "$ref": "#/components/schemas/Assertion" - }, - "disputed": { - "type": "boolean" - }, - "settled_address": { - "type": "string" - }, - "settled_at": { - "type": "string", - "format": "date-time" - }, - "settlement_resolution": { - "$ref": "#/components/schemas/SettlementResolution" - }, - "settlement_tx": { - "type": "string" - } - } - }, - "SettlementResolution": { - "type": "string", - "enum": ["True", "False", "Undefined"] - }, - "SignedPublisherPrice": { - "type": "object", - "required": [ - "oracle_asset_id", - "oracle_price", - "signing_key", - "signature", - "timestamp" - ], - "properties": { - "oracle_asset_id": { - "type": "string" - }, - "oracle_price": { - "type": "string" - }, - "signature": { - "type": "string" - }, - "signing_key": { - "type": "string" - }, - "timestamp": { - "type": "string" - } - } - }, - "Status": { - "type": "string", - "enum": ["Active", "Disputed", "Settled"] - }, - "SubscribeToEntryResponse": { - "type": "object", - "required": ["oracle_prices", "timestamp"], - "properties": { - "oracle_prices": { - "type": "array", - "items": { - "$ref": "#/components/schemas/AssetOraclePrice" - } - }, - "timestamp": { - "type": "integer", - "format": "int64" - } - } - }, - "TimestampRange": { - "type": "string", - "description": "Represents a range of timestamps" - }, - "VolatilityError": { - "oneOf": [ - { - "type": "object", - "required": ["InvalidTimestampsRange"], - "properties": { - "InvalidTimestampsRange": { - "type": "array", - "items": { - "type": "integer", - "format": "int64", - "minimum": 0 - }, - "description": "", - "maxItems": 2, - "minItems": 2 - } - } - } - ] - } - }, - "responses": { - "CreateEntryResponse": { - "description": "", - "content": { - "application/json": { - "schema": { - "type": "object", - "required": ["number_entries_created"], - "properties": { - "number_entries_created": { - "type": "integer", - "minimum": 0 - } - } - } - } - } - }, - "CreateFutureEntryResponse": { - "description": "", - "content": { - "application/json": { - "schema": { - "type": "object", - "required": ["number_entries_created"], - "properties": { - "number_entries_created": { - "type": "integer", - "minimum": 0 - } - } - } - } - } - }, - "GetEntryResponse": { - "description": "", - "content": { - "application/json": { - "schema": { - "type": "object", - "required": [ - "num_sources_aggregated", - "pair_id", - "price", - "timestamp", - "decimals" - ], - "properties": { - "decimals": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "num_sources_aggregated": { - "type": "integer", - "minimum": 0 - }, - "pair_id": { - "type": "string" - }, - "price": { - "type": "string" - }, - "timestamp": { - "type": "integer", - "format": "int64", - "minimum": 0 - } - } - } - } - } - }, - "GetMerkleProofResponse": { - "description": "", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/MerkleProof" - } - } - } - }, - "GetOHLCResponse": { - "description": "", - "content": { - "application/json": { - "schema": { - "type": "object", - "required": ["pair_id", "data"], - "properties": { - "data": { - "type": "array", - "items": { - "$ref": "#/components/schemas/OHLCEntry" - } - }, - "pair_id": { - "type": "string" - } - } - } - } - } - }, - "GetOnchainCheckpointsResponse": { - "description": "", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/Checkpoint" - } - } - } - } - }, - "GetOnchainEntryResponse": { - "description": "", - "content": { - "application/json": { - "schema": { - "type": "object", - "required": [ - "pair_id", - "last_updated_timestamp", - "price", - "decimals", - "nb_sources_aggregated", - "asset_type" - ], - "properties": { - "asset_type": { - "type": "string" - }, - "components": { - "type": "array", - "items": { - "$ref": "#/components/schemas/OnchainEntry" - }, - "nullable": true - }, - "decimals": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "last_updated_timestamp": { - "type": "integer", - "format": "int64", - "minimum": 0 - }, - "nb_sources_aggregated": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "pair_id": { - "type": "string" - }, - "price": { - "type": "string" - }, - "variations": { - "type": "object", - "additionalProperties": { - "type": "number", - "format": "float" - }, - "nullable": true - } - } - } - } - } - }, - "GetOnchainHistoryResponse": { - "description": "", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/GetOnchainHistoryEntry" - } - } - } - } - }, - "GetOnchainOHLCResponse": { - "description": "", - "content": { - "application/json": { - "schema": { - "type": "object", - "required": ["pair_id", "data"], - "properties": { - "data": { - "type": "array", - "items": { - "$ref": "#/components/schemas/OHLCEntry" - } - }, - "pair_id": { - "type": "string" - } - } - } - } - } - }, - "GetOnchainPublishersResponse": { - "description": "", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/Publisher" - } - } - } - } - }, - "GetOptionResponse": { - "description": "", - "content": { - "application/json": { - "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/OptionData" - }, - { - "type": "object", - "required": ["hash"], - "properties": { - "hash": { - "type": "string" - } - } - } - ] - } - } - } - }, - "GetVolatilityResponse": { - "description": "", - "content": { - "application/json": { - "schema": { - "type": "object", - "required": ["pair_id", "volatility", "decimals"], - "properties": { - "decimals": { - "type": "integer", - "format": "int32", - "minimum": 0 - }, - "pair_id": { - "type": "string" - }, - "volatility": { - "type": "number", - "format": "double" - } - } - } - } - } - }, - "SubscribeToEntryResponse": { - "description": "", - "content": { - "application/json": { - "schema": { - "type": "object", - "required": ["oracle_prices", "timestamp"], - "properties": { - "oracle_prices": { - "type": "array", - "items": { - "$ref": "#/components/schemas/AssetOraclePrice" - } - }, - "timestamp": { - "type": "integer", - "format": "int64" - } - } - } - } - } - } - }, - "securitySchemes": { - "api_key": { - "type": "apiKey", - "in": "header", - "name": "x-api-key" - } - } - }, - "tags": [ - { - "name": "pragma-node", - "description": "Pragma Node API" - } - ] -} +{"openapi":"3.1.0","info":{"title":"pragma-node","description":"","license":{"name":""},"version":"0.1.0"},"servers":[{"url":"https://{environment}.pragma.build","variables":{"environment":{"default":"api.devnet"}}}],"paths":{"/node/v1/aggregation/candlestick/{base}/{quote}":{"get":{"tags":["Market Data"],"operationId":"get_ohlc","parameters":[{"name":"base","in":"path","description":"Base asset symbol","required":true,"schema":{"type":"string"}},{"name":"quote","in":"path","description":"Quote asset symbol","required":true,"schema":{"type":"string"}},{"name":"timestamp","in":"query","description":"The unix timestamp in seconds to retrieve historical price data.\nThis endpoint will return the first update whose timestamp is <= the provided value.\n\nIf not provided, returns the latest available price.\n\n# Examples\n- `1_647_820_800`: Returns price data from March 21, 2022 00:00:00 UTC\n- `null`: Returns the most recent price update\n\nNOTE: This only works for `median` aggregation","required":false,"schema":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/i64"}]}},{"name":"interval","in":"query","description":"Time interval for aggregated price data. Different intervals affect how price data is\naggregated and can be used to get OHLC (Open/High/Low/Close) data at various timeframes.\n\n# Available intervals\n- `100ms`: 100 milliseconds - High frequency trading\n- `1s`: 1 second - Real-time trading\n- `5s`: 5 seconds - Short-term price movements\n- `1min`: 1 minute - Intraday trading\n- `15min`: 15 minutes - Medium-term analysis\n- `1h`: 1 hour - Daily trading patterns\n- `2h`: 2 hours (default) - Extended market analysis\n- `1d`: 1 day - Long-term trends\n- `1w`: 1 week - Strategic market overview","required":false,"schema":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/Interval"}]}},{"name":"routing","in":"query","description":"Enable price routing through intermediate pairs.\nWhen true, if a direct price for the requested pair is not available,\nthe system will attempt to calculate it using intermediate pairs.\n\n# Example\nFor BTC/EUR when routing is enabled:\n- If direct BTC/EUR price is unavailable\n- System might route through BTC/USD and EUR/USD\n\nDefault: true","required":false,"schema":{"type":["boolean","null"]}},{"name":"aggregation","in":"query","description":"Method used to aggregate prices from multiple sources.\n\n# Available modes\n- `median`: Middle value (default, more manipulation resistant)\n- `mean`: Average of all values\n- `twap`: Time-Weighted Average Price","required":false,"schema":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/AggregationMode"}]}},{"name":"entry_type","in":"query","description":"Type of market entry to retrieve\n\n# Available types\n- `spot`: Spot market prices (default)\n- `perp`: Perpetual futures prices\n- `future`: Fixed-expiry futures prices","required":false,"schema":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/EntryType"}]}},{"name":"expiry","in":"query","description":"Expiry date for future contracts in ISO 8601 format.\nOnly applicable when `entry_type` is \"future\".\n\n# Example\n- `\"2024-12-31\"`: December 31, 2024 expiry\n- `null`: Not applicable for spot/perp markets","required":false,"schema":{"type":["string","null"]}},{"name":"with_components","in":"query","description":"Include source components in the response.\nWhen true, the response will include price data from individual sources.\n\n# Example\n- `true`: Include source breakdown in response\n- `false`: Return aggregated data only (default)","required":false,"schema":{"type":["boolean","null"]}}],"responses":{"200":{"description":"Successfully retrieved OHLC data","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GetOHLCResponse"},"example":{"data":[{"close":"82208749021850000000000","high":"82289627995410000000000","low":"82023393045000000000000","open":"82069269773700000000000","time":"2025-03-10T07:30:00"}],"pair_id":"BTC/USD"}}}},"400":{"description":"Invalid parameters","content":{"application/json":{"schema":{"$ref":"#/components/schemas/EntryError"},"example":{"happened_at":"2025-03-10T08:27:29.324879945Z","message":"Invalid timestamp: Timestamp range error: End timestamp is in the future","resource":"EntryModel"}}}},"404":{"description":"No data found","content":{"application/json":{"schema":{"$ref":"#/components/schemas/EntryError"},"example":{"happened_at":"2025-03-10T08:27:29.324879945Z","message":"Entry not found","resource":"EntryModel"}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/EntryError"},"example":{"happened_at":"2025-03-10T08:27:29.324879945Z","message":"Database error: connection failed","resource":"EntryModel"}}}}},"security":[{"api_key":[]}]}},"/node/v1/data/multi/stream":{"get":{"tags":["Stream"],"operationId":"stream_entry_multi_pair","parameters":[{"name":"get_entry_params","in":"query","description":"Base parameters for entry requests including interval, aggregation mode, and routing options","required":true,"schema":{"$ref":"#/components/schemas/GetEntryParams"}},{"name":"pairs[]","in":"query","description":"List of trading pairs to stream prices for (e.g. `[\"ETH/USD\", \"BTC/USD\"]`)","required":true,"schema":{"type":"array","items":{"type":"string"}},"example":["ETH/USD","BTC/USD"]},{"name":"historical_prices","in":"query","description":"Number of historical price entries to fetch on initial connection (default: 100)","required":false,"schema":{"type":["integer","null"],"minimum":0},"example":100}],"responses":{"200":{"description":"Server-sent events stream of price entries for multiple pairs","content":{"text/event-stream":{}}}}}},"/node/v1/data/subscribe":{"get":{"tags":["StarkEx Oracle"],"operationId":"subscribe_to_entry","responses":{"101":{"description":"WebSocket connection upgraded successfully"},"403":{"description":"Forbidden - Rate limit exceeded","content":{"application/json":{"schema":{"$ref":"#/components/schemas/EntryError"}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/EntryError"},"example":{"error":"Locked: Pragma signer not found"}}}}}}},"/node/v1/data/{base}/{quote}":{"get":{"tags":["Price Data"],"summary":"Get the latest price entry for a trading pair","operationId":"get_entry","parameters":[{"name":"base","in":"path","description":"Base asset symbol (e.g. BTC)","required":true,"schema":{"type":"string"}},{"name":"quote","in":"path","description":"Quote asset symbol (e.g. USD)","required":true,"schema":{"type":"string"}},{"name":"timestamp","in":"query","description":"The unix timestamp in seconds to retrieve historical price data.\nThis endpoint will return the first update whose timestamp is <= the provided value.\n\nIf not provided, returns the latest available price.\n\n# Examples\n- `1_647_820_800`: Returns price data from March 21, 2022 00:00:00 UTC\n- `null`: Returns the most recent price update\n\nNOTE: This only works for `median` aggregation","required":false,"schema":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/i64"}]}},{"name":"interval","in":"query","description":"Time interval for aggregated price data. Different intervals affect how price data is\naggregated and can be used to get OHLC (Open/High/Low/Close) data at various timeframes.\n\n# Available intervals\n- `100ms`: 100 milliseconds - High frequency trading\n- `1s`: 1 second - Real-time trading\n- `5s`: 5 seconds - Short-term price movements\n- `1min`: 1 minute - Intraday trading\n- `15min`: 15 minutes - Medium-term analysis\n- `1h`: 1 hour - Daily trading patterns\n- `2h`: 2 hours (default) - Extended market analysis\n- `1d`: 1 day - Long-term trends\n- `1w`: 1 week - Strategic market overview","required":false,"schema":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/Interval"}]}},{"name":"routing","in":"query","description":"Enable price routing through intermediate pairs.\nWhen true, if a direct price for the requested pair is not available,\nthe system will attempt to calculate it using intermediate pairs.\n\n# Example\nFor BTC/EUR when routing is enabled:\n- If direct BTC/EUR price is unavailable\n- System might route through BTC/USD and EUR/USD\n\nDefault: true","required":false,"schema":{"type":["boolean","null"]}},{"name":"aggregation","in":"query","description":"Method used to aggregate prices from multiple sources.\n\n# Available modes\n- `median`: Middle value (default, more manipulation resistant)\n- `mean`: Average of all values\n- `twap`: Time-Weighted Average Price","required":false,"schema":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/AggregationMode"}]}},{"name":"entry_type","in":"query","description":"Type of market entry to retrieve\n\n# Available types\n- `spot`: Spot market prices (default)\n- `perp`: Perpetual futures prices\n- `future`: Fixed-expiry futures prices","required":false,"schema":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/EntryType"}]}},{"name":"expiry","in":"query","description":"Expiry date for future contracts in ISO 8601 format.\nOnly applicable when `entry_type` is \"future\".\n\n# Example\n- `\"2024-12-31\"`: December 31, 2024 expiry\n- `null`: Not applicable for spot/perp markets","required":false,"schema":{"type":["string","null"]}},{"name":"with_components","in":"query","description":"Include source components in the response.\nWhen true, the response will include price data from individual sources.\n\n# Example\n- `true`: Include source breakdown in response\n- `false`: Return aggregated data only (default)","required":false,"schema":{"type":["boolean","null"]}}],"responses":{"200":{"description":"Successfully retrieved price entry","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GetEntryResponse"},"example":{"components":[{"price":"0x6cc61113f5871b1000","source":"BINANCE","timestamp":1743082057}],"decimals":18,"num_sources_aggregated":5,"pair_id":"BTC/USD","price":"0x1234567890abcdef","timestamp":1647820800}}}},"400":{"description":"Invalid request parameters","content":{"application/json":{"schema":{"$ref":"#/components/schemas/EntryError"}}}},"404":{"description":"Price entry not found","content":{"application/json":{"schema":{"$ref":"#/components/schemas/EntryError"}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/EntryError"}}}}},"security":[{"api_key":[]}]}},"/node/v1/onchain/checkpoints/{base}/{quote}":{"get":{"tags":["crate::handlers::onchain::get_checkpoints"],"operationId":"get_onchain_checkpoints","parameters":[{"name":"base","in":"path","description":"Base Asset","required":true,"schema":{"type":"string"}},{"name":"quote","in":"path","description":"Quote Asset","required":true,"schema":{"type":"string"}},{"name":"network","in":"query","required":true,"schema":{"$ref":"#/components/schemas/StarknetNetwork"}},{"name":"limit","in":"query","required":false,"schema":{"type":["integer","null"],"format":"int64","minimum":0}}],"responses":{"200":{"description":"Get the onchain checkpoints for a pair","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GetOnchainCheckpointsResponse"}}}}}}},"/node/v1/onchain/history/{base}/{quote}":{"get":{"tags":["crate::handlers::onchain::get_history"],"operationId":"get_onchain_history","parameters":[{"name":"base","in":"path","description":"Base Asset","required":true,"schema":{"type":"string"}},{"name":"quote","in":"path","description":"Quote Asset","required":true,"schema":{"type":"string"}},{"name":"network","in":"query","required":true,"schema":{"$ref":"#/components/schemas/StarknetNetwork"}},{"name":"timestamp","in":"query","required":true,"schema":{"$ref":"#/components/schemas/TimestampRange"}},{"name":"chunk_interval","in":"query","required":false,"schema":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/Interval"}]}},{"name":"routing","in":"query","required":false,"schema":{"type":["boolean","null"]}}],"responses":{"200":{"description":"Get the historical onchain median price","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GetOnchainHistoryResponse"}}}}}}},"/node/v1/onchain/publishers":{"get":{"tags":["crate::handlers::onchain::get_publishers"],"operationId":"get_onchain_publishers","parameters":[{"name":"network","in":"query","required":true,"schema":{"$ref":"#/components/schemas/StarknetNetwork"}},{"name":"data_type","in":"query","required":true,"schema":{"$ref":"#/components/schemas/InstrumentType"}}],"responses":{"200":{"description":"Get the onchain publishers","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GetOnchainPublishersResponse"}}}}}}},"/node/v1/onchain/{base}/{quote}":{"get":{"tags":["crate::handlers::onchain::get_entry"],"operationId":"get_onchain_entry","parameters":[{"name":"base","in":"path","description":"Base Asset","required":true,"schema":{"type":"string"}},{"name":"quote","in":"path","description":"Quote Asset","required":true,"schema":{"type":"string"}},{"name":"network","in":"query","required":true,"schema":{"$ref":"#/components/schemas/StarknetNetwork"}},{"name":"aggregation","in":"query","required":false,"schema":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/AggregationMode"}]}},{"name":"routing","in":"query","required":false,"schema":{"type":["boolean","null"]}},{"name":"timestamp","in":"query","required":false,"schema":{"type":["integer","null"],"format":"int64"}},{"name":"components","in":"query","required":false,"schema":{"type":["boolean","null"]}},{"name":"variations","in":"query","required":false,"schema":{"type":["boolean","null"]}}],"responses":{"200":{"description":"Get the onchain entry","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GetOnchainEntryResponse"}}}}}}}},"components":{"schemas":{"AggregationMode":{"type":"string","enum":["median","twap"]},"AssetOraclePrice":{"type":"object","description":"Price data structure for `StarkEx` oracle integration","required":["global_asset_id","median_price","signature","signed_prices"],"properties":{"global_asset_id":{"type":"string","description":"Global asset identifier in `StarkEx` hex format\nFormat: --00..00","example":"0x534f4c2d5553442d38000000000000"},"median_price":{"type":"string","description":"Median price in `StarkEx` 18 decimals format","example":"128065038090000007168"},"signature":{"type":"string","description":"Pragma's signature of the price data in `StarkEx` format","example":"0x02ba39e956bb5b29a0fab31d61c7678228f79dddee2998b4ff3de5c7a6ae1e770636712af81b0506749555e1439004b4ce905419d2ba946b9bd06eb87de7a167"},"signed_prices":{"type":"array","items":{"$ref":"#/components/schemas/SignedPublisherPrice"},"description":"Individual signed prices from publishers"}}},"Checkpoint":{"type":"object","required":["tx_hash","price","timestamp","sender_address"],"properties":{"price":{"type":"string"},"sender_address":{"type":"string"},"timestamp":{"type":"integer","format":"int64","minimum":0},"tx_hash":{"type":"string"}}},"ConversionError":{"oneOf":[{"type":"string","enum":["FailedSerialization"]},{"type":"string","enum":["InvalidDateTime"]},{"type":"string","enum":["BigDecimalConversion"]},{"type":"string","enum":["FeltConversion"]},{"type":"string","enum":["U128Conversion"]},{"type":"string","enum":["StringTimestampConversion"]},{"type":"string","enum":["StringPriceConversion"]},{"type":"object","required":["FailedSignature"],"properties":{"FailedSignature":{"type":"string"}}}]},"Entry":{"type":"object","required":["id","pair_id","publisher","source","timestamp","price"],"properties":{"id":{"type":"string","format":"uuid"},"pair_id":{"type":"string"},"price":{"type":"integer","minimum":0},"publisher":{"type":"string"},"publisher_signature":{"type":["string","null"]},"source":{"type":"string"},"timestamp":{"type":"integer","format":"int64","minimum":0}}},"EntryComponent":{"type":"object","required":["source","price","timestamp"],"properties":{"price":{"type":"string"},"source":{"type":"string"},"timestamp":{"type":"integer","format":"int64","minimum":0}}},"EntryError":{"oneOf":[{"type":"object","required":["InvalidSignature"],"properties":{"InvalidSignature":{"$ref":"#/components/schemas/SignerError"}},"example":"invalid signature"},{"type":"object","required":["InvalidTimestamp"],"properties":{"InvalidTimestamp":{"$ref":"#/components/schemas/TimestampError"}},"example":"invalid timestamp"},{"type":"string","enum":["InvalidExpiry"],"example":"invalid expiry"},{"type":"object","required":["InvalidInterval"],"properties":{"InvalidInterval":{"type":"array","items":{"type":"object"},"maxItems":2,"minItems":2}},"example":"unsupported interval 1s for aggregation median"},{"type":"object","required":["InvalidOnchainInterval"],"properties":{"InvalidOnchainInterval":{"$ref":"#/components/schemas/Interval"}},"example":"unsupported interval 1s for onchain data"},{"type":"object","required":["InvalidLoginMessage"],"properties":{"InvalidLoginMessage":{"type":"string"}}},{"type":"object","required":["InvalidDataTypeForNetwork"],"properties":{"InvalidDataTypeForNetwork":{"type":"array","items":{"type":"object"},"maxItems":2,"minItems":2}}},{"type":"object","required":["Unauthorized"],"properties":{"Unauthorized":{"type":"string"}}},{"type":"object","required":["PairNotFound"],"properties":{"PairNotFound":{"type":"string"}}},{"type":"object","required":["EntryNotFound"],"properties":{"EntryNotFound":{"type":"string"}}},{"type":"object","required":["PublisherNotFound"],"properties":{"PublisherNotFound":{"type":"string"}}},{"type":"object","required":["RouteNotFound"],"properties":{"RouteNotFound":{"type":"string"}}},{"type":"string","enum":["HistoryNotFound"]},{"type":"object","required":["PublisherError"],"properties":{"PublisherError":{"$ref":"#/components/schemas/PublisherError"}}},{"type":"object","required":["PublishData"],"properties":{"PublishData":{"type":"string"}}},{"type":"object","required":["BuildPublish"],"properties":{"BuildPublish":{"type":"string"}}},{"type":"string","enum":["InvalidSigner"]},{"type":"object","required":["DatabaseError"],"properties":{"DatabaseError":{"type":"string"}}},{"type":"object","required":["InternalServerError"],"properties":{"InternalServerError":{"type":"string"}}},{"type":"object","required":["WebSocketError"],"properties":{"WebSocketError":{"$ref":"#/components/schemas/WebSocketError"}}}],"example":{"code":"UNAUTHORIZED","message":"Unauthorized request: Invalid API key","timestamp":"2024-03-20T10:30:00Z"}},"EntryType":{"type":"string","enum":["spot","perp","future"]},"FutureEntry":{"type":"object","required":["id","pair_id","publisher","source","timestamp","publisher_signature","price"],"properties":{"expiration_timestamp":{"type":["integer","null"],"format":"int64","minimum":0},"id":{"type":"string","format":"uuid"},"pair_id":{"type":"string"},"price":{"type":"integer","minimum":0},"publisher":{"type":"string"},"publisher_signature":{"type":"string"},"source":{"type":"string"},"timestamp":{"type":"integer","format":"int64","minimum":0}}},"GetEntryParams":{"type":"object","description":"Parameters for retrieving price entries","required":["timestamp"],"properties":{"aggregation":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/AggregationMode","description":"Method used to aggregate prices from multiple sources.\n\n# Available modes\n- `median`: Middle value (default, more manipulation resistant)\n- `mean`: Average of all values\n- `twap`: Time-Weighted Average Price"}]},"entry_type":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/EntryType","description":"Type of market entry to retrieve\n\n# Available types\n- `spot`: Spot market prices (default)\n- `perp`: Perpetual futures prices\n- `future`: Fixed-expiry futures prices"}]},"expiry":{"type":["string","null"],"description":"Expiry date for future contracts in ISO 8601 format.\nOnly applicable when `entry_type` is \"future\".\n\n# Example\n- `\"2024-12-31\"`: December 31, 2024 expiry\n- `null`: Not applicable for spot/perp markets","example":"2024-12-31"},"interval":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/Interval","description":"Time interval for aggregated price data. Different intervals affect how price data is\naggregated and can be used to get OHLC (Open/High/Low/Close) data at various timeframes.\n\n# Available intervals\n- `100ms`: 100 milliseconds - High frequency trading\n- `1s`: 1 second - Real-time trading\n- `5s`: 5 seconds - Short-term price movements\n- `1min`: 1 minute - Intraday trading\n- `15min`: 15 minutes - Medium-term analysis\n- `1h`: 1 hour - Daily trading patterns\n- `2h`: 2 hours (default) - Extended market analysis\n- `1d`: 1 day - Long-term trends\n- `1w`: 1 week - Strategic market overview"}]},"routing":{"type":["boolean","null"],"description":"Enable price routing through intermediate pairs.\nWhen true, if a direct price for the requested pair is not available,\nthe system will attempt to calculate it using intermediate pairs.\n\n# Example\nFor BTC/EUR when routing is enabled:\n- If direct BTC/EUR price is unavailable\n- System might route through BTC/USD and EUR/USD\n\nDefault: true","example":true},"timestamp":{"type":"integer","format":"int64","description":"The unix timestamp in seconds to retrieve historical price data.\nThis endpoint will return the first update whose timestamp is <= the provided value.\n\nIf not provided, returns the latest available price.\n\n# Examples\n- `1_647_820_800`: Returns price data from March 21, 2022 00:00:00 UTC\n- `null`: Returns the most recent price update\n\nNOTE: This only works for `median` aggregation","example":1647820800},"with_components":{"type":["boolean","null"],"description":"Include source components in the response.\nWhen true, the response will include price data from individual sources.\n\n# Example\n- `true`: Include source breakdown in response\n- `false`: Return aggregated data only (default)","example":false}}},"GetEntryResponse":{"type":"object","required":["num_sources_aggregated","pair_id","price","timestamp","decimals"],"properties":{"components":{"type":["array","null"],"items":{"$ref":"#/components/schemas/EntryComponent"}},"decimals":{"type":"integer","format":"int32","minimum":0},"num_sources_aggregated":{"type":"integer","minimum":0},"pair_id":{"type":"string"},"price":{"type":"string"},"timestamp":{"type":"integer","format":"int64","minimum":0}}},"GetOHLCResponse":{"type":"object","description":"Response containing OHLC (candlestick) data for a trading pair","required":["pair_id","data"],"properties":{"data":{"type":"array","items":{"$ref":"#/components/schemas/OHLCEntry"},"description":"Array of OHLC entries ordered by timestamp"},"pair_id":{"type":"string","description":"Trading pair identifier (e.g., \"BTC/USD\")"}},"example":{"data":[{"close":"82208749021850000000000","high":"82289627995410000000000","low":"82023393045000000000000","open":"82069269773700000000000","time":"2025-03-10T07:30:00"}],"pair_id":"BTC/USD"}},"GetOnchainCheckpointsParams":{"type":"object","required":["network"],"properties":{"limit":{"type":["integer","null"],"format":"int64","minimum":0},"network":{"$ref":"#/components/schemas/StarknetNetwork"}}},"GetOnchainCheckpointsResponse":{"type":"array","items":{"$ref":"#/components/schemas/Checkpoint"}},"GetOnchainEntryParams":{"type":"object","required":["network"],"properties":{"aggregation":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/AggregationMode"}]},"components":{"type":["boolean","null"]},"network":{"$ref":"#/components/schemas/StarknetNetwork"},"routing":{"type":["boolean","null"]},"timestamp":{"type":["integer","null"],"format":"int64"},"variations":{"type":["boolean","null"]}}},"GetOnchainEntryResponse":{"type":"object","required":["pair_id","last_updated_timestamp","price","decimals","nb_sources_aggregated","asset_type"],"properties":{"asset_type":{"type":"string"},"components":{"type":["array","null"],"items":{"$ref":"#/components/schemas/OnchainEntry"}},"decimals":{"type":"integer","format":"int32","minimum":0},"last_updated_timestamp":{"type":"integer","format":"int64","minimum":0},"nb_sources_aggregated":{"type":"integer","format":"int32","minimum":0},"pair_id":{"type":"string"},"price":{"type":"string"},"variations":{"type":["object","null"],"additionalProperties":{"type":"number","format":"float"},"propertyNames":{"type":"string","enum":["100ms","1s","5s","10s","1min","5min","15min","1h","2h","1d","1w"]}}}},"GetOnchainHistoryEntry":{"type":"object","required":["pair_id","timestamp","median_price","decimals","nb_sources_aggregated"],"properties":{"decimals":{"type":"integer","format":"int32","minimum":0},"median_price":{"type":"string"},"nb_sources_aggregated":{"type":"integer","format":"int32","minimum":0},"pair_id":{"type":"string"},"timestamp":{"type":"integer","format":"int64","minimum":0}}},"GetOnchainHistoryParams":{"type":"object","required":["network","timestamp"],"properties":{"chunk_interval":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/Interval"}]},"network":{"$ref":"#/components/schemas/StarknetNetwork"},"routing":{"type":["boolean","null"]},"timestamp":{"$ref":"#/components/schemas/TimestampRange"}}},"GetOnchainHistoryResponse":{"type":"array","items":{"$ref":"#/components/schemas/GetOnchainHistoryEntry"}},"GetOnchainOHLCResponse":{"type":"object","required":["pair_id","data"],"properties":{"data":{"type":"array","items":{"$ref":"#/components/schemas/OHLCEntry"}},"pair_id":{"type":"string"}}},"GetOnchainPublishersParams":{"type":"object","required":["network","data_type"],"properties":{"data_type":{"$ref":"#/components/schemas/InstrumentType"},"network":{"$ref":"#/components/schemas/StarknetNetwork"}}},"GetOnchainPublishersResponse":{"type":"array","items":{"$ref":"#/components/schemas/Publisher"}},"InstrumentType":{"type":"string","enum":["Spot","Perp"]},"Interval":{"type":"string","enum":["100ms","1s","5s","10s","1min","5min","15min","1h","2h","1d","1w"]},"OHLCEntry":{"type":"object","required":["time","open","low","high","close"],"properties":{"close":{"type":"integer","format":"int64","minimum":0},"high":{"type":"integer","format":"int64","minimum":0},"low":{"type":"integer","format":"int64","minimum":0},"open":{"type":"integer","format":"int64","minimum":0},"time":{"type":"string","format":"date-time"}}},"OnchainEntry":{"type":"object","required":["publisher","source","price","tx_hash","timestamp"],"properties":{"price":{"type":"string"},"publisher":{"type":"string"},"source":{"type":"string"},"timestamp":{"type":"integer","format":"int64","minimum":0},"tx_hash":{"type":"string"}}},"Publisher":{"type":"object","required":["publisher","website_url","type","nb_feeds","daily_updates","total_updates","components"],"properties":{"components":{"type":"array","items":{"$ref":"#/components/schemas/PublisherEntry"}},"daily_updates":{"type":"integer","format":"int32","minimum":0},"last_updated_timestamp":{"type":["integer","null"],"format":"int64","minimum":0},"nb_feeds":{"type":"integer","format":"int32","minimum":0},"publisher":{"type":"string"},"total_updates":{"type":"integer","format":"int32","minimum":0},"type":{"type":"integer","format":"int32","minimum":0},"website_url":{"type":"string"}}},"PublisherEntry":{"type":"object","required":["pair_id","last_updated_timestamp","price","source","decimals","daily_updates"],"properties":{"daily_updates":{"type":"integer","format":"int32","minimum":0},"decimals":{"type":"integer","format":"int32","minimum":0},"last_updated_timestamp":{"type":"integer","format":"int64","minimum":0},"pair_id":{"type":"string"},"price":{"type":"string"},"source":{"type":"string"}}},"PublisherError":{"oneOf":[{"type":"object","required":["InvalidKey"],"properties":{"InvalidKey":{"type":"string"}}},{"type":"object","required":["InvalidAddress"],"properties":{"InvalidAddress":{"type":"string"}}},{"type":"object","required":["InactivePublisher"],"properties":{"InactivePublisher":{"type":"string"}}},{"type":"string","enum":["NotFound"]},{"type":"string","enum":["InternalServerError"]}]},"SignedPublisherPrice":{"type":"object","description":"Response format for `StarkEx` price subscriptions","required":["oracle_asset_id","oracle_price","signing_key","timestamp"],"properties":{"oracle_asset_id":{"type":"string","description":"StarkEx-specific asset identifier in hex format\nFormat: 00..00PRAGMA00","example":"0x534f4c55534400000000000000000000505241474d4100"},"oracle_price":{"type":"string","description":"Price in `StarkEx` 18 decimals","example":"128065038090000000000"},"signing_key":{"type":"string","description":"Public key of the price signer (Pragma's `StarkEx` key)","example":"0x624EBFB99865079BD58CFCFB925B6F5CE940D6F6E41E118B8A72B7163FB435C"},"timestamp":{"type":"string","description":"Unix timestamp as string","example":"1741594457"}}},"SignerError":{"oneOf":[{"type":"object","required":["ConversionError"],"properties":{"ConversionError":{"$ref":"#/components/schemas/ConversionError"}}},{"type":"object","required":["SigningError"],"properties":{"SigningError":{"type":"string"}}},{"type":"object","required":["InvalidSignature"],"properties":{"InvalidSignature":{"type":"string"}}},{"type":"object","required":["Unauthorized"],"properties":{"Unauthorized":{"type":"string"}}},{"type":"object","required":["InvalidMessage"],"properties":{"InvalidMessage":{"type":"string"}}}]},"StarknetNetwork":{"type":"string","enum":["starknet-mainnet","starknet-sepolia"]},"StreamEntryMultipairParams":{"allOf":[{"$ref":"#/components/schemas/GetEntryParams","description":"Base parameters for entry requests including interval, aggregation mode, and routing options"},{"type":"object","required":["pairs[]"],"properties":{"historical_prices":{"type":["integer","null"],"description":"Number of historical price entries to fetch on initial connection (default: 100)","minimum":0},"pairs[]":{"type":"array","items":{"type":"string"},"description":"List of trading pairs to stream prices for (e.g. `[\"ETH/USD\", \"BTC/USD\"]`)"}}}]},"SubscribeToEntryResponse":{"type":"object","description":"WebSocket response message for `StarkEx` price updates","required":["oracle_prices","timestamp"],"properties":{"oracle_prices":{"type":"array","items":{"$ref":"#/components/schemas/AssetOraclePrice"},"description":"Array of price data for subscribed assets"},"timestamp":{"type":"integer","format":"int64","description":"Unix timestamp of the update","example":1741594458}},"example":{"oracle_prices":[{"global_asset_id":"0x534f4c2d5553442d38000000000000","median_price":"128065038090000007168","signature":"0x02ba39e956bb5b29a0fab31d61c7678228f79dddee2998b4ff3de5c7a6ae1e770636712af81b0506749555e1439004b4ce905419d2ba946b9bd06eb87de7a167","signed_prices":[{"oracle_asset_id":"0x534f4c55534400000000000000000000505241474d4100","oracle_price":"128065038090000000000","signing_key":"0x624EBFB99865079BD58CFCFB925B6F5CE940D6F6E41E118B8A72B7163FB435C","timestamp":"1741594457"}]}],"timestamp":1741594458}},"SubscribeToPriceResponse":{"type":"object","required":["oracle_prices","timestamp"],"properties":{"oracle_prices":{"type":"array","items":{"$ref":"#/components/schemas/AssetOraclePrice"}},"timestamp":{"type":"integer","format":"int64"}}},"TimestampError":{"oneOf":[{"type":"object","required":["RangeError"],"properties":{"RangeError":{"$ref":"#/components/schemas/TimestampRangeError"}}},{"type":"object","required":["ToDatetimeErrorU64"],"properties":{"ToDatetimeErrorU64":{"type":"integer","format":"int64","minimum":0}}},{"type":"object","required":["ToDatetimeErrorI64"],"properties":{"ToDatetimeErrorI64":{"type":"integer","format":"int64"}}},{"type":"object","required":["Other"],"properties":{"Other":{"type":"string"}}}]},"TimestampRange":{"type":"string","description":"Represents a range of timestamps"},"TimestampRangeError":{"oneOf":[{"type":"string","enum":["StartAfterEnd"]},{"type":"string","enum":["EndInFuture"]},{"type":"string","enum":["StartEqualsEnd"]},{"type":"string","enum":["ConversionError"]},{"type":"object","required":["Other"],"properties":{"Other":{"type":"string"}}}]},"WebSocketError":{"oneOf":[{"type":"string","enum":["ChannelInit"]},{"type":"object","required":["MessageDecode"],"properties":{"MessageDecode":{"type":"string"}}},{"type":"string","enum":["ChannelClose"]}]}},"responses":{"AssetOraclePrice":{"description":"","content":{"application/json":{"schema":{"type":"object","required":["num_sources_aggregated","pair_id","price"],"properties":{"num_sources_aggregated":{"type":"integer","minimum":0},"pair_id":{"type":"string"},"price":{"type":"string"}}}}}},"EntryComponent":{"description":"","content":{"application/json":{"schema":{"type":"object","required":["source","price","timestamp"],"properties":{"price":{"type":"string"},"source":{"type":"string"},"timestamp":{"type":"integer","format":"int64","minimum":0}}}}}},"GetEntryResponse":{"description":"","content":{"application/json":{"schema":{"type":"object","required":["num_sources_aggregated","pair_id","price","timestamp","decimals"],"properties":{"components":{"type":["array","null"],"items":{"$ref":"#/components/schemas/EntryComponent"}},"decimals":{"type":"integer","format":"int32","minimum":0},"num_sources_aggregated":{"type":"integer","minimum":0},"pair_id":{"type":"string"},"price":{"type":"string"},"timestamp":{"type":"integer","format":"int64","minimum":0}}}}}},"GetOHLCResponse":{"description":"Response containing OHLC (candlestick) data for a trading pair","content":{"application/json":{"schema":{"type":"object","description":"Response containing OHLC (candlestick) data for a trading pair","required":["pair_id","data"],"properties":{"data":{"type":"array","items":{"$ref":"#/components/schemas/OHLCEntry"},"description":"Array of OHLC entries ordered by timestamp"},"pair_id":{"type":"string","description":"Trading pair identifier (e.g., \"BTC/USD\")"}}}}}},"GetOnchainCheckpointsResponse":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/Checkpoint"}}}}},"GetOnchainEntryResponse":{"description":"","content":{"application/json":{"schema":{"type":"object","required":["pair_id","last_updated_timestamp","price","decimals","nb_sources_aggregated","asset_type"],"properties":{"asset_type":{"type":"string"},"components":{"type":["array","null"],"items":{"$ref":"#/components/schemas/OnchainEntry"}},"decimals":{"type":"integer","format":"int32","minimum":0},"last_updated_timestamp":{"type":"integer","format":"int64","minimum":0},"nb_sources_aggregated":{"type":"integer","format":"int32","minimum":0},"pair_id":{"type":"string"},"price":{"type":"string"},"variations":{"type":["object","null"],"additionalProperties":{"type":"number","format":"float"},"propertyNames":{"type":"string","enum":["100ms","1s","5s","10s","1min","5min","15min","1h","2h","1d","1w"]}}}}}}},"GetOnchainHistoryResponse":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/GetOnchainHistoryEntry"}}}}},"GetOnchainOHLCResponse":{"description":"","content":{"application/json":{"schema":{"type":"object","required":["pair_id","data"],"properties":{"data":{"type":"array","items":{"$ref":"#/components/schemas/OHLCEntry"}},"pair_id":{"type":"string"}}}}}},"GetOnchainPublishersResponse":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/Publisher"}}}}},"SubscribeToEntryResponse":{"description":"WebSocket response message for `StarkEx` price updates","content":{"application/json":{"schema":{"type":"object","description":"WebSocket response message for `StarkEx` price updates","required":["oracle_prices","timestamp"],"properties":{"oracle_prices":{"type":"array","items":{"$ref":"#/components/schemas/AssetOraclePrice"},"description":"Array of price data for subscribed assets"},"timestamp":{"type":"integer","format":"int64","description":"Unix timestamp of the update","example":1741594458}}}}}},"SubscribeToPriceResponse":{"description":"","content":{"application/json":{"schema":{"type":"object","required":["oracle_prices","timestamp"],"properties":{"oracle_prices":{"type":"array","items":{"$ref":"#/components/schemas/AssetOraclePrice"}},"timestamp":{"type":"integer","format":"int64"}}}}}}},"securitySchemes":{"api_key":{"type":"apiKey","in":"header","name":"x-api-key"}}},"tags":[{"name":"pragma-node","description":"Pragma Node API"}]} \ No newline at end of file diff --git a/pragma-common/Cargo.toml b/pragma-common/Cargo.toml deleted file mode 100644 index c5146ff6..00000000 --- a/pragma-common/Cargo.toml +++ /dev/null @@ -1,43 +0,0 @@ -[package] -name = "pragma-common" -version = "0.1.0" -edition = "2021" -license = "MIT" -homepage = "https://pragma.build/" -repository = "https://github.com/astraly-labs/pragma-node/" -description = "Common utilities and types for Pragma SDKs" -readme = "README.md" -keywords = ["pragma", "sdk", "consumer", "data", "feeds"] - -[lints] -workspace = true - -[dependencies] -axum = { workspace = true } -bigdecimal = { workspace = true, features = ["serde"] } -cainome = { workspace = true } -chrono = { workspace = true } -color-eyre = { workspace = true } -deadpool-diesel = { workspace = true } -futures-util = { workspace = true } -indexmap = { workspace = true } -opentelemetry = { workspace = true } -opentelemetry-appender-tracing = { workspace = true } -opentelemetry-otlp = { workspace = true } -opentelemetry-semantic-conventions = { workspace = true } -opentelemetry_sdk = { workspace = true } -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true } -starknet = { workspace = true } -starknet-crypto = { workspace = true } -strum = { workspace = true, features = ["derive"] } -thiserror = { workspace = true } -tokio = { workspace = true } -tracing = { workspace = true } -tracing-opentelemetry = { workspace = true } -tracing-subscriber = { workspace = true, features = ["env-filter"] } -utoipa = { workspace = true } -uuid = { workspace = true } - -[dev-dependencies] -rstest = { workspace = true } diff --git a/pragma-common/README.md b/pragma-common/README.md deleted file mode 100644 index d41186f1..00000000 --- a/pragma-common/README.md +++ /dev/null @@ -1,6 +0,0 @@ -# Pragma Common - -Common utilities and types used in Pragma rust libraries. - -This includes a simple [Merkle Tree implementation](./src/types/merkle_tree.rs) used -in Merkle Feeds. diff --git a/pragma-common/src/errors.rs b/pragma-common/src/errors.rs deleted file mode 100644 index 68e0be20..00000000 --- a/pragma-common/src/errors.rs +++ /dev/null @@ -1,15 +0,0 @@ -use utoipa::ToSchema; - -#[derive(Debug, thiserror::Error, ToSchema)] -pub enum ConversionError { - #[error("failed to serialize")] - FailedSerialization, - #[error("invalid date time")] - InvalidDateTime, - #[error("failed to convert big decimal")] - BigDecimalConversion, - #[error("failed to convert felt")] - FeltConversion, - #[error("failed to convert u128")] - U128Conversion, -} diff --git a/pragma-common/src/hash.rs b/pragma-common/src/hash.rs deleted file mode 100644 index daf8fa59..00000000 --- a/pragma-common/src/hash.rs +++ /dev/null @@ -1,13 +0,0 @@ -use std::cmp::Ordering; - -use starknet::core::types::Felt; - -/// The first element A of a pedersen hash (A,B) follows the rule: -/// A <= B -pub fn pedersen_hash(a: &Felt, b: &Felt) -> Felt { - let (a_sorted, b_sorted) = match a.cmp(b) { - Ordering::Less | Ordering::Equal => (a, b), - Ordering::Greater => (b, a), - }; - starknet::core::crypto::pedersen_hash(a_sorted, b_sorted) -} diff --git a/pragma-common/src/lib.rs b/pragma-common/src/lib.rs deleted file mode 100644 index 2868a904..00000000 --- a/pragma-common/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub mod errors; -pub mod hash; -pub mod signing; -pub mod telemetry; -pub mod types; -pub mod utils; - -// Re-export types from the types module for backward compatibility -pub use types::auth; -pub use types::entries; -pub use types::hex_hash; -pub use types::timestamp; -pub use types::typed_data; -pub use types::utils as types_utils; diff --git a/pragma-common/src/signing/mod.rs b/pragma-common/src/signing/mod.rs deleted file mode 100644 index a995413b..00000000 --- a/pragma-common/src/signing/mod.rs +++ /dev/null @@ -1,123 +0,0 @@ -pub mod starkex; - -use crate::errors::ConversionError; -use serde::{Deserialize, Serialize}; -use starknet::{ - core::{ - crypto::{ecdsa_verify, EcdsaSignError, Signature}, - types::Felt, - }, - signers::SigningKey, -}; -use thiserror::Error; -use utoipa::ToSchema; - -use crate::types::entries::{build_publish_message, EntryTrait}; -use crate::types::typed_data::TypedData; - -#[derive(Debug, Error, ToSchema)] -pub enum SignerError { - #[error(transparent)] - ConversionError(#[from] ConversionError), - #[error("cannot sign: {0}")] - #[schema(value_type = String)] - SigningError(#[from] EcdsaSignError), - #[error("invalid signature for message hash {0:?}")] - #[schema(value_type = String)] - InvalidSignature(Felt), - #[error("unauthorized: {0}")] - Unauthorized(String), - #[error("invalid message: {0}")] - InvalidMessage(String), -} - -pub trait Signable { - fn try_get_hash(&self) -> Result; -} - -/// Sign the passed data with the signer & return the signature 0x prefixed. -pub fn sign_data(signer: &SigningKey, data: &impl Signable) -> Result { - let hash_to_sign = data.try_get_hash()?; - let signature = signer.sign(&hash_to_sign)?; - Ok(format!("0x{signature:}")) -} - -/// Assert that a new entries request is correctly signed -/// by the publisher. -/// If it is, we return the signature. -#[allow(clippy::trait_duplication_in_bounds)] -pub fn assert_request_signature_is_valid( - new_entries_request: &R, - publisher_account: &Felt, - publisher_public_key: &Felt, -) -> Result -where - R: AsRef<[Felt]> + AsRef<[E]>, - E: EntryTrait + Serialize + for<'de> Deserialize<'de>, -{ - let signature = assert_signature_is_valid::( - new_entries_request, - publisher_account, - publisher_public_key, - )?; - Ok(signature) -} - -/// Assert that a request (passed with the request for creating new -/// entries) is correctly signed by the publisher and in a valid format. -/// Returns the signature if it is correct. -#[allow(clippy::trait_duplication_in_bounds)] -fn assert_signature_is_valid( - new_entries_request: &R, - account_address: &Felt, - public_key: &Felt, -) -> Result -where - R: AsRef<[Felt]> + AsRef<[E]>, - E: EntryTrait + Serialize + for<'de> Deserialize<'de>, -{ - let entries: &[E] = new_entries_request.as_ref(); - let published_message = build_publish_message(entries); - let message_hash = published_message - .encode(*account_address) - .map_err(|e| SignerError::InvalidMessage(e.to_string()))? - .hash; - - let signature_slice: &[Felt] = new_entries_request.as_ref(); - let signature = Signature { - r: signature_slice[0], - s: signature_slice[1], - }; - - if !ecdsa_verify(public_key, &message_hash, &signature) - .map_err(|_| SignerError::InvalidSignature(message_hash))? - { - return Err(SignerError::Unauthorized(format!( - "Invalid signature for message hash {:?}", - &message_hash - ))); - } - Ok(signature) -} - -pub fn assert_login_is_valid( - login_message: TypedData, - signature: &Signature, - account_address: &Felt, - public_key: &Felt, -) -> Result<(), SignerError> { - let message_hash = login_message - .encode(*account_address) - .map_err(|e| SignerError::InvalidMessage(e.to_string()))? - .hash; - - if !ecdsa_verify(public_key, &message_hash, signature) - .map_err(|_| SignerError::InvalidSignature(message_hash))? - { - return Err(SignerError::Unauthorized(format!( - "Invalid signature for message hash {:?}", - &message_hash - ))); - } - Ok(()) -} diff --git a/pragma-common/src/telemetry.rs b/pragma-common/src/telemetry.rs deleted file mode 100644 index 5fab8ad4..00000000 --- a/pragma-common/src/telemetry.rs +++ /dev/null @@ -1,112 +0,0 @@ -use color_eyre::eyre::Result; -use opentelemetry::trace::TracerProvider; -use opentelemetry::{global, KeyValue}; -use opentelemetry_appender_tracing::layer::OpenTelemetryTracingBridge; -use opentelemetry_otlp::WithExportConfig; -use opentelemetry_sdk::logs::{BatchConfig, LoggerProvider}; -use opentelemetry_sdk::metrics::reader::DefaultTemporalitySelector; -use opentelemetry_sdk::metrics::{MeterProviderBuilder, PeriodicReader}; -use opentelemetry_sdk::{runtime, trace::BatchConfigBuilder}; -use opentelemetry_sdk::{ - trace::{Config, Tracer}, - Resource, -}; -use opentelemetry_semantic_conventions::resource::SERVICE_NAME; -use tracing::level_filters::LevelFilter; -use tracing::Level; -use tracing_opentelemetry::OpenTelemetryLayer; -use tracing_subscriber::layer::SubscriberExt; -use tracing_subscriber::util::SubscriberInitExt; - -pub fn init_telemetry( - app_name: String, - collection_endpoint: String, - log_level: Option, -) -> Result<()> { - let tracing_subscriber = tracing_subscriber::registry() - .with(LevelFilter::from_level(log_level.unwrap_or(Level::INFO))) - .with( - tracing_subscriber::fmt::layer() - .with_target(false) - .with_file(false) - .with_line_number(false) - .pretty(), - ); - - let tracer_provider = init_tracer_provider(&app_name, &collection_endpoint); - - let logger_provider = init_logs_provider(&app_name, &collection_endpoint)?; - init_meter_provider(&app_name, &collection_endpoint)?; - - tracing_subscriber - .with(OpenTelemetryLayer::new(tracer_provider)) - .with(OpenTelemetryTracingBridge::new(&logger_provider)) - .try_init()?; - - Ok(()) -} - -#[allow(dead_code)] -fn init_tracer_provider(app_name: &str, collection_endpoint: &str) -> Tracer { - let provider = opentelemetry_otlp::new_pipeline() - .tracing() - .with_batch_config(BatchConfigBuilder::default().build()) - .with_trace_config( - Config::default().with_resource(Resource::new(vec![KeyValue::new( - SERVICE_NAME, - format!("{app_name}-trace-service"), - )])), - ) - .with_exporter( - opentelemetry_otlp::new_exporter() - .tonic() - .with_endpoint(collection_endpoint), - ) - .install_batch(runtime::Tokio) - .expect("Failed to install tracer provider"); - - global::set_tracer_provider(provider.clone()); - provider.tracer(format!("{app_name}-subscriber")) -} - -fn init_logs_provider(app_name: &str, collection_endpoint: &str) -> Result { - let logger = opentelemetry_otlp::new_pipeline() - .logging() - .with_batch_config(BatchConfig::default()) - .with_resource(Resource::new(vec![KeyValue::new( - SERVICE_NAME, - format!("{app_name}-logs-service"), - )])) - .with_exporter( - opentelemetry_otlp::new_exporter() - .tonic() - .with_endpoint(collection_endpoint), - ) - .install_batch(runtime::Tokio)?; - - Ok(logger) -} - -pub fn init_meter_provider(app_name: &str, collection_endpoint: &str) -> Result<()> { - let exporter = opentelemetry_otlp::new_exporter() - .tonic() - .with_endpoint(collection_endpoint) - .build_metrics_exporter(Box::new(DefaultTemporalitySelector::new()))?; - - let reader = PeriodicReader::builder(exporter, runtime::Tokio) - .with_interval(std::time::Duration::from_secs(5)) - .build(); - - let metrics_provider = MeterProviderBuilder::default() - .with_reader(reader) - .with_resource(Resource::new(vec![KeyValue::new( - SERVICE_NAME, - format!("{app_name}-meter-service"), - )])) - .build(); - - // Set the global meter provider - global::set_meter_provider(metrics_provider); - - Ok(()) -} diff --git a/pragma-common/src/types/auth.rs b/pragma-common/src/types/auth.rs deleted file mode 100644 index 5656f3db..00000000 --- a/pragma-common/src/types/auth.rs +++ /dev/null @@ -1,76 +0,0 @@ -use indexmap::IndexMap; -use serde::{Deserialize, Serialize}; -use starknet::core::types::Felt; -use utoipa::ToSchema; - -use crate::typed_data::{Domain, Field, PrimitiveType, SimpleField, TypedData}; -use crate::types_utils::felt_from_decimal; - -#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] -pub struct LoginMessage { - #[schema(value_type = Vec)] - #[serde(deserialize_with = "felt_from_decimal")] - pub signature: Vec, - pub publisher_name: String, - pub expiration_timestamp: u64, -} - -pub fn build_login_message(publisher_name: &str, expiration_timestamp: u64) -> TypedData { - // Define the domain - let domain = Domain::new("Pragma", "1", "1", Some("1")); - - // Define the types - let mut types = IndexMap::new(); - - // Add "StarknetDomain" type - types.insert( - "StarknetDomain".to_string(), - vec![ - Field::SimpleType(SimpleField { - name: "name".to_string(), - r#type: "shortstring".to_string(), - }), - Field::SimpleType(SimpleField { - name: "version".to_string(), - r#type: "shortstring".to_string(), - }), - Field::SimpleType(SimpleField { - name: "chainId".to_string(), - r#type: "shortstring".to_string(), - }), - Field::SimpleType(SimpleField { - name: "revision".to_string(), - r#type: "shortstring".to_string(), - }), - ], - ); - - // Add "Request" type - types.insert( - "Request".to_string(), - vec![ - Field::SimpleType(SimpleField { - name: "publisher_name".to_string(), - r#type: "shortstring".to_string(), - }), - Field::SimpleType(SimpleField { - name: "expiration_timestamp".to_string(), - r#type: "timestamp".to_string(), - }), - ], - ); - - // Create the message - let mut message = IndexMap::new(); - message.insert( - "publisher_name".to_string(), - PrimitiveType::String(publisher_name.to_string()), - ); - message.insert( - "expiration_timestamp".to_string(), - PrimitiveType::Number(expiration_timestamp.into()), - ); - - // Create TypedData - TypedData::new(types, "Request", domain, message) -} diff --git a/pragma-common/src/types/block_id.rs b/pragma-common/src/types/block_id.rs deleted file mode 100644 index e58fa2e5..00000000 --- a/pragma-common/src/types/block_id.rs +++ /dev/null @@ -1,69 +0,0 @@ -// Reference: -// https://github.com/xJonathanLEI/starknet-rs/blob/master/starknet-core/src/types/codegen.rs#L71 -use serde::{Deserialize, Deserializer, Serialize}; -use std::str::FromStr; -use strum::{Display, EnumString}; -use utoipa::ToSchema; - -/// Block tag. -/// -/// A tag specifying a dynamic reference to a block. -#[derive( - Debug, - Default, - Clone, - Copy, - PartialEq, - Eq, - Serialize, - Deserialize, - Display, - EnumString, - ToSchema, -)] -#[strum(serialize_all = "lowercase")] -pub enum BlockTag { - Latest, - #[default] - Pending, -} - -/// Block identifier in the form of hash, number or tag. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Display, ToSchema)] -pub enum BlockId { - #[strum(serialize = "{0}")] - Tag(BlockTag), - #[strum(serialize = "{0}")] - Number(u64), -} - -impl<'de> Deserialize<'de> for BlockId { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let value = String::deserialize(deserializer)?; - u64::from_str(&value).map_or_else( - |_| { - BlockTag::from_str(&value.to_lowercase()).map_or_else( - |_| { - Err(serde::de::Error::custom(format!( - "Invalid BlockId: {value}" - ))) - }, - |tag| Ok(Self::Tag(tag)), - ) - }, - |num| Ok(Self::Number(num)), - ) - } -} - -impl Serialize for BlockId { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - serializer.serialize_str(&self.to_string()) - } -} diff --git a/pragma-common/src/types/entries.rs b/pragma-common/src/types/entries.rs deleted file mode 100644 index c04db90b..00000000 --- a/pragma-common/src/types/entries.rs +++ /dev/null @@ -1,326 +0,0 @@ -use indexmap::IndexMap; -use serde::{Deserialize, Serialize}; -use serde_json::Number; -use std::fmt; -use utoipa::ToSchema; - -use crate::typed_data::{Domain, Field, PrimitiveType, SimpleField, TypedData}; -use crate::types::utils::flexible_u128; - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, ToSchema)] -pub struct BaseEntry { - pub timestamp: u64, - pub source: String, - pub publisher: String, -} - -pub trait EntryTrait { - fn base(&self) -> &BaseEntry; - fn pair_id(&self) -> &String; - fn price(&self) -> u128; - fn volume(&self) -> u128; - fn expiration_timestamp(&self) -> Option { - None - } -} - -// Entry = SpotEntry -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, ToSchema)] -pub struct Entry { - pub base: BaseEntry, - pub pair_id: String, - #[serde(deserialize_with = "flexible_u128")] - pub price: u128, - #[serde(deserialize_with = "flexible_u128")] - pub volume: u128, -} - -impl EntryTrait for Entry { - fn base(&self) -> &BaseEntry { - &self.base - } - - fn pair_id(&self) -> &String { - &self.pair_id - } - - fn price(&self) -> u128 { - self.price - } - - fn volume(&self) -> u128 { - self.volume - } -} - -impl fmt::Display for Entry { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "SPOT[{}] {} @ {} (vol: {}) from {}/{}", - self.pair_id, - self.price, - self.base.timestamp, - self.volume, - self.base.source, - self.base.publisher - ) - } -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, ToSchema)] -pub struct PerpEntry { - pub base: BaseEntry, - pub pair_id: String, - #[serde(deserialize_with = "flexible_u128")] - pub price: u128, - #[serde(deserialize_with = "flexible_u128")] - pub volume: u128, -} - -impl EntryTrait for PerpEntry { - fn base(&self) -> &BaseEntry { - &self.base - } - - fn pair_id(&self) -> &String { - &self.pair_id - } - - fn price(&self) -> u128 { - self.price - } - - fn volume(&self) -> u128 { - self.volume - } - - fn expiration_timestamp(&self) -> Option { - Some(0) - } -} - -impl fmt::Display for PerpEntry { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "PERP[{}] {} @ {} (vol: {}) from {}/{}", - self.pair_id, - self.price, - self.base.timestamp, - self.volume, - self.base.source, - self.base.publisher - ) - } -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, ToSchema)] -pub struct FutureEntry { - pub base: BaseEntry, - pub pair_id: String, - #[serde(deserialize_with = "flexible_u128")] - pub price: u128, - #[serde(deserialize_with = "flexible_u128")] - pub volume: u128, - // in milliseconds - pub expiration_timestamp: u64, -} - -impl EntryTrait for FutureEntry { - fn base(&self) -> &BaseEntry { - &self.base - } - - fn pair_id(&self) -> &String { - &self.pair_id - } - - fn price(&self) -> u128 { - self.price - } - - fn volume(&self) -> u128 { - self.volume - } - - fn expiration_timestamp(&self) -> Option { - Some(self.expiration_timestamp) - } -} - -impl fmt::Display for FutureEntry { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "FUTURE[{}] {} @ {} (vol: {}, exp: {}) from {}/{}", - self.pair_id, - self.price, - self.base.timestamp, - self.volume, - self.expiration_timestamp, - self.base.source, - self.base.publisher - ) - } -} - -#[allow(clippy::too_many_lines)] -pub fn build_publish_message(entries: &[E]) -> TypedData -where - E: EntryTrait + Serialize + for<'a> Deserialize<'a>, -{ - let mut is_future = false; - - // Construct the raw entries - let raw_entries: Vec = entries - .iter() - .map(|entry| { - let mut entry_map = IndexMap::new(); - let base = entry.base(); - - // Add base fields - let mut base_map = IndexMap::new(); - base_map.insert( - "publisher".to_string(), - PrimitiveType::String(base.publisher.clone()), - ); - base_map.insert( - "source".to_string(), - PrimitiveType::String(base.source.clone()), - ); - base_map.insert( - "timestamp".to_string(), - PrimitiveType::String(base.timestamp.to_string()), - ); - - entry_map.insert("base".to_string(), PrimitiveType::Object(base_map)); - entry_map.insert( - "pair_id".to_string(), - PrimitiveType::String(entry.pair_id().to_string()), - ); - entry_map.insert( - "price".to_string(), - PrimitiveType::Number(Number::from(entry.price())), - ); - entry_map.insert( - "volume".to_string(), - PrimitiveType::Number(Number::from(entry.volume())), - ); - - // Handle optional expiration timestamp - if let Some(expiration) = entry.expiration_timestamp() { - is_future = true; - entry_map.insert( - "expiration_timestamp".to_string(), - PrimitiveType::String(expiration.to_string()), - ); - } - - PrimitiveType::Object(entry_map) - }) - .collect(); - - // Define the domain - let domain = Domain::new("Pragma", "1", "1", Some("1")); - - // Define the types - let mut types = IndexMap::new(); - - // Add "StarknetDomain" type - types.insert( - "StarknetDomain".to_string(), - vec![ - Field::SimpleType(SimpleField { - name: "name".to_string(), - r#type: "shortstring".to_string(), - }), - Field::SimpleType(SimpleField { - name: "version".to_string(), - r#type: "shortstring".to_string(), - }), - Field::SimpleType(SimpleField { - name: "chainId".to_string(), - r#type: "shortstring".to_string(), - }), - Field::SimpleType(SimpleField { - name: "revision".to_string(), - r#type: "shortstring".to_string(), - }), - ], - ); - - // Define "Entry" type - let mut entry_fields = vec![ - Field::SimpleType(SimpleField { - name: "base".to_string(), - r#type: "Base".to_string(), - }), - Field::SimpleType(SimpleField { - name: "pair_id".to_string(), - r#type: "shortstring".to_string(), - }), - Field::SimpleType(SimpleField { - name: "price".to_string(), - r#type: "u128".to_string(), - }), - Field::SimpleType(SimpleField { - name: "volume".to_string(), - r#type: "u128".to_string(), - }), - ]; - - // Include "expiration_timestamp" if necessary - if is_future { - entry_fields.push(Field::SimpleType(SimpleField { - name: "expiration_timestamp".to_string(), - r#type: "timestamp".to_string(), - })); - } - - types.insert("Entry".to_string(), entry_fields); - - // Define "Base" type - types.insert( - "Base".to_string(), - vec![ - Field::SimpleType(SimpleField { - name: "publisher".to_string(), - r#type: "shortstring".to_string(), - }), - Field::SimpleType(SimpleField { - name: "source".to_string(), - r#type: "shortstring".to_string(), - }), - Field::SimpleType(SimpleField { - name: "timestamp".to_string(), - r#type: "timestamp".to_string(), - }), - ], - ); - - // **Add the missing "Request" type** - types.insert( - "Request".to_string(), - vec![ - Field::SimpleType(SimpleField { - name: "action".to_string(), - r#type: "shortstring".to_string(), - }), - Field::SimpleType(SimpleField { - name: "entries".to_string(), - r#type: "Entry*".to_string(), - }), - ], - ); - - // Create the message - let mut message = IndexMap::new(); - message.insert( - "action".to_string(), - PrimitiveType::String("Publish".to_string()), - ); - message.insert("entries".to_string(), PrimitiveType::Array(raw_entries)); - - TypedData::new(types, "Request", domain, message) -} diff --git a/pragma-common/src/types/hex_hash.rs b/pragma-common/src/types/hex_hash.rs deleted file mode 100644 index e8681c74..00000000 --- a/pragma-common/src/types/hex_hash.rs +++ /dev/null @@ -1,22 +0,0 @@ -use serde::{Deserialize, Deserializer}; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct HexHash(pub String); - -impl<'de> Deserialize<'de> for HexHash { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - if !is_0x_prefixed_hex_string(&s) { - return Err(serde::de::Error::custom("Invalid hex hash format")); - } - Ok(Self(s)) - } -} - -// Helper function to check if a string is a valid 0x-prefixed hexadecimal string -fn is_0x_prefixed_hex_string(s: &str) -> bool { - s.starts_with("0x") && s[2..].chars().all(|c| c.is_ascii_hexdigit()) -} diff --git a/pragma-common/src/types/merkle_tree.rs b/pragma-common/src/types/merkle_tree.rs deleted file mode 100644 index 855dad29..00000000 --- a/pragma-common/src/types/merkle_tree.rs +++ /dev/null @@ -1,264 +0,0 @@ -use serde::{Deserialize, Serialize}; -use starknet::core::types::Felt; -use thiserror::Error; -use utoipa::ToSchema; - -use crate::hash::pedersen_hash; - -#[derive(Debug, Error)] -pub enum MerkleTreeError { - #[error("could not build the merkle tree: {0}")] - BuildFailed(String), - #[error("cannot build a merkle tree from empty leaves")] - EmptyLeaves, - #[error("could not convert hash {0} to a felt")] - FeltConversion(String), -} - -/// Simple `MerkleTree`. -/// Reference: -/// -/// NOTE: Only supports the Pedersen hash for now. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct MerkleTree { - pub root_hash: Felt, - pub leaves: Vec, - pub levels: Vec>, -} - -/// The merkle proof that a leaf belongs to a Merkle tree. -#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)] -pub struct FeltMerkleProof(pub Vec); - -/// The merkle proof but with hexadecimal hashes instead of Field elements. -#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, ToSchema)] -pub struct MerkleProof(pub Vec); - -impl From for MerkleProof { - fn from(proof: FeltMerkleProof) -> Self { - Self( - proof - .0 - .into_iter() - .map(|felt| format!("{felt:#x}")) - .collect(), - ) - } -} - -impl TryInto for MerkleProof { - type Error = MerkleTreeError; - - fn try_into(self) -> Result { - self.0 - .into_iter() - .map(|hash| Felt::from_hex(&hash).map_err(|_| MerkleTreeError::FeltConversion(hash))) - .collect::, _>>() - .map(FeltMerkleProof) - } -} - -impl MerkleTree { - pub fn new(leaves: Vec) -> Result { - if leaves.is_empty() { - return Err(MerkleTreeError::EmptyLeaves); - } - - let mut tree = Self { - leaves, - root_hash: Felt::default(), - levels: Vec::new(), - }; - - let (root_hash, levels) = tree.build(); - tree.root_hash = root_hash; - tree.levels = levels; - - Ok(tree) - } - - fn build(&self) -> (Felt, Vec>) { - if self.leaves.len() == 1 { - return (self.leaves[0], vec![self.leaves.clone()]); - } - - let mut curr_level_nodes = self.leaves.clone(); - let mut levels = Vec::new(); - - while curr_level_nodes.len() > 1 { - if curr_level_nodes.len() != self.leaves.len() { - levels.push(curr_level_nodes.clone()); - } - - let mut new_nodes = Vec::new(); - for chunk in curr_level_nodes.chunks(2) { - let a = chunk[0]; - let b = if chunk.len() > 1 { - chunk[1] - } else { - Felt::ZERO - }; - // sorting of A & B happens in the [hash] method - new_nodes.push(pedersen_hash(&a, &b)); - } - - curr_level_nodes = new_nodes; - } - - levels.insert(0, self.leaves.clone()); - levels.push(curr_level_nodes.clone()); - - (curr_level_nodes[0], levels) - } - - /// Returns the merkle proof if the passed leaf is found in the tree. - pub fn get_proof(&self, leaf: &Felt) -> Option { - let mut path = Vec::new(); - let mut current_hash = *leaf; - - for level in &self.levels { - let index = level.iter().position(|&x| x == current_hash)?; - if level.len() == 1 { - break; - } - - let sibling_index = if index % 2 == 0 { index + 1 } else { index - 1 }; - let sibling = level.get(sibling_index).unwrap_or(&Felt::ZERO); - - path.push(*sibling); - current_hash = pedersen_hash(¤t_hash, sibling); - } - Some(FeltMerkleProof(path)) - } - - /// Verify that the passed merkle proof is valid for the leaf. - pub fn verify_proof(&self, leaf: &Felt, proof: &FeltMerkleProof) -> bool { - let mut current_hash = *leaf; - for &sibling in &proof.0 { - current_hash = pedersen_hash(¤t_hash, &sibling); - } - current_hash == self.root_hash - } -} - -/// All the expected values for the hash come from the python implementation of starknet.py. -/// Reference: -/// -#[cfg(test)] -mod tests { - use rstest::rstest; - - use super::*; - - #[rstest] - fn test_merkle_tree_new() { - let leaves = vec![ - Felt::from(1_u32), - Felt::from(2_u32), - Felt::from(3_u32), - Felt::from(4_u32), - ]; - - let merkle_tree = MerkleTree::new(leaves.clone()).unwrap(); - - assert_eq!(merkle_tree.leaves, leaves); - assert_eq!(merkle_tree.levels.len(), 3); - assert_eq!( - merkle_tree.root_hash, - Felt::from_hex("0x38118a340bbba28e678413cd3b07a9436a5e60fd6a7cbda7db958a6d501e274") - .unwrap() - ); - } - - #[rstest] - fn test_merkle_tree_proof() { - let leaves = vec![ - Felt::from(1_u32), - Felt::from(2_u32), - Felt::from(3_u32), - Felt::from(4_u32), - ]; - let merkle_tree = MerkleTree::new(leaves).unwrap(); - - let leaf = Felt::from(1_u32); - let proof = merkle_tree.get_proof(&leaf).unwrap(); - - let expected_proof = FeltMerkleProof(vec![ - Felt::from_hex("0x2").unwrap(), - Felt::from_hex("0x262697b88544f733e5c6907c3e1763131e9f14c51ee7951258abbfb29415fbf") - .unwrap(), - ]); - - assert_eq!(proof, expected_proof); - assert!(merkle_tree.verify_proof(&leaf, &proof)); - } - - #[rstest] - fn test_merkle_tree_single_leaf() { - let leaves = vec![Felt::from(1_u32)]; - let merkle_tree = MerkleTree::new(leaves.clone()).unwrap(); - - assert_eq!(merkle_tree.leaves, leaves); - assert_eq!(merkle_tree.levels.len(), 1); - assert_eq!(merkle_tree.root_hash, Felt::from(1_u32)); - } - - #[rstest] - fn test_merkle_tree_odd_number_of_leaves() { - let leaves = vec![Felt::from(1_u32), Felt::from(2_u32), Felt::from(3_u32)]; - let merkle_tree = MerkleTree::new(leaves.clone()).unwrap(); - - assert_eq!(merkle_tree.leaves, leaves); - assert_eq!(merkle_tree.levels.len(), 3); - assert_eq!( - merkle_tree.root_hash, - Felt::from_hex("0x015ac9e457789ef0c56e5d559809e7336a909c14ee2511503fa7af69be1ba639") - .unwrap() - ); - } - - #[rstest] - fn test_merkle_tree_empty_leaves() { - let leaves: Vec = vec![]; - let result = MerkleTree::new(leaves); - - assert!(matches!(result, Err(MerkleTreeError::EmptyLeaves))); - } - - #[rstest] - fn test_merkle_tree_proof_verification_failure() { - let leaves = vec![ - Felt::from(1_u32), - Felt::from(2_u32), - Felt::from(3_u32), - Felt::from(4_u32), - ]; - let merkle_tree = MerkleTree::new(leaves).unwrap(); - - let leaf = Felt::from(1_u32); - let mut proof = merkle_tree.get_proof(&leaf).unwrap(); - - if let Some(first) = proof.0.first_mut() { - *first = Felt::from(99_u32); - } - - assert!(!merkle_tree.verify_proof(&leaf, &proof)); - } - - #[rstest] - fn test_merkle_tree_proof_for_nonexistent_leaf() { - let leaves = vec![ - Felt::from(1_u32), - Felt::from(2_u32), - Felt::from(3_u32), - Felt::from(4_u32), - ]; - let merkle_tree = MerkleTree::new(leaves).unwrap(); - - let nonexistent_leaf = Felt::from(5_u32); - let proof = merkle_tree.get_proof(&nonexistent_leaf); - - assert!(proof.is_none()); - } -} diff --git a/pragma-common/src/types/mod.rs b/pragma-common/src/types/mod.rs deleted file mode 100644 index d6d1a1b4..00000000 --- a/pragma-common/src/types/mod.rs +++ /dev/null @@ -1,93 +0,0 @@ -pub mod auth; -pub mod block_id; -pub mod entries; -pub mod hex_hash; -pub mod merkle_tree; -pub mod options; -pub mod pair; -pub mod timestamp; -pub mod typed_data; -pub mod utils; - -use serde::{Deserialize, Serialize}; -use strum::{Display, EnumString}; -use utoipa::ToSchema; - -#[derive(Default, Debug, Serialize, Deserialize, ToSchema, Clone, Copy)] -pub enum AggregationMode { - #[serde(rename = "median")] - #[default] - Median, - #[serde(rename = "mean")] - Mean, - #[serde(rename = "twap")] - Twap, -} - -#[derive(Default, Debug, Serialize, Deserialize, ToSchema, Clone, Copy, Display, EnumString)] -#[strum(serialize_all = "lowercase")] -pub enum Network { - #[default] - #[serde(rename = "sepolia")] - Sepolia, - #[serde(rename = "mainnet")] - Mainnet, -} - -#[derive(Default, Debug, Deserialize, ToSchema, Clone, Copy, PartialEq, Eq)] -pub enum DataType { - #[serde(rename = "spot_entry")] - #[default] - SpotEntry, - #[serde(rename = "perp_entry")] - PerpEntry, - #[serde(rename = "future_entry")] - FutureEntry, -} - -// Supported Aggregation Intervals -#[derive(Default, Debug, Serialize, Deserialize, ToSchema, Clone, Copy, Eq, PartialEq, Hash)] -pub enum Interval { - #[serde(rename = "1s")] - OneSecond, - #[serde(rename = "5s")] - FiveSeconds, - #[serde(rename = "1min")] - #[default] - OneMinute, - #[serde(rename = "15min")] - FifteenMinutes, - #[serde(rename = "1h")] - OneHour, - #[serde(rename = "2h")] - TwoHours, - #[serde(rename = "1d")] - OneDay, - #[serde(rename = "1w")] - OneWeek, -} - -impl Interval { - pub const fn to_minutes(&self) -> i64 { - match self { - Self::OneSecond => 0, - Self::FiveSeconds => 5, - Self::OneMinute => 1, - Self::FifteenMinutes => 15, - Self::OneHour => 60, - Self::TwoHours => 120, - Self::OneDay => 1400, - Self::OneWeek => 10080, - } - } - - pub const fn to_seconds(&self) -> i64 { - if matches!(self, Self::OneSecond) { - return 1; - } - if matches!(self, Self::FiveSeconds) { - return 5; - } - self.to_minutes() * 60 - } -} diff --git a/pragma-common/src/types/options.rs b/pragma-common/src/types/options.rs deleted file mode 100644 index 2b775f28..00000000 --- a/pragma-common/src/types/options.rs +++ /dev/null @@ -1,292 +0,0 @@ -use std::str::FromStr; - -use bigdecimal::BigDecimal; -use chrono::NaiveDate; -use serde::{Deserialize, Serialize}; -use starknet::core::{ - crypto::compute_hash_on_elements, types::Felt, utils::cairo_short_string_to_felt, -}; -use strum::{Display, EnumString}; -use thiserror::Error; -use utoipa::ToSchema; - -use crate::utils::field_element_as_hex_string; - -/// The available currencies supported. -#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Display, EnumString, ToSchema)] -#[strum(serialize_all = "UPPERCASE")] -pub enum OptionCurrency { - BTC, - ETH, -} - -impl OptionCurrency { - pub fn from_ticker(ticker: &str) -> Result { - ticker - .parse() - .map_err(|_| InstrumentError::UnsupportedCurrency(ticker.to_owned())) - } -} - -/// The possible types for an option. -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Display, EnumString, ToSchema)] -#[strum(serialize_all = "UPPERCASE")] -pub enum OptionType { - #[strum(serialize = "P")] - Put, - #[strum(serialize = "C")] - Call, -} - -#[derive(Debug, Error)] -pub enum InstrumentError { - #[error("invalid name format: {0}")] - NameFormat(String), - #[error("invalid date format: {0}")] - DateFormat(String), - #[error("invalid option type: {0}")] - OptionType(String), - #[error("invalid mark price: {0}")] - MarkPrice(String), - #[error("currency must be BTC or ETH, found: {0}")] - UnsupportedCurrency(String), - #[error("could not convert {0} to a field element")] - Felt(String), -} - -#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] -/// An instrument. -pub struct Instrument { - pub base_currency: OptionCurrency, - pub expiration_date: NaiveDate, - #[schema(value_type = u64)] - pub strike_price: BigDecimal, - pub option_type: OptionType, -} - -impl Instrument { - pub fn from_name(instrument_name: &str) -> Result { - let parts: Vec<&str> = instrument_name.split('-').collect(); - - if parts.len() != 4 { - return Err(InstrumentError::NameFormat(instrument_name.to_owned())); - } - - let base_currency = OptionCurrency::from_ticker(parts[0])?; - let expiration_date = NaiveDate::parse_from_str(parts[1], "%d%b%y") - .map_err(|_| InstrumentError::DateFormat(parts[1].to_owned()))?; - let strike_price = BigDecimal::from_str(parts[2]) - .map_err(|_| InstrumentError::MarkPrice(parts[2].to_owned()))?; - let option_type = match parts.get(3) { - Some(&"P") => OptionType::Put, - Some(&"C") => OptionType::Call, - _ => return Err(InstrumentError::OptionType(parts[3].to_owned())), - }; - - Ok(Self { - base_currency, - expiration_date, - strike_price, - option_type, - }) - } - - pub fn name(&self) -> String { - format!( - "{}-{}-{}-{}", - self.base_currency, - self.expiration_date - .format("%d%b%y") - .to_string() - .to_uppercase(), - self.strike_price, - self.option_type - ) - } -} - -#[macro_export] -macro_rules! instrument { - ($($name:expr),* $(,)?) => {$( - { - const _: () = { - let s = $name; - assert!(s.len() >= 11, "Instrument name too short"); - assert!(s.as_bytes()[3] == b'-' && s.as_bytes()[11] == b'-' && s.as_bytes()[s.len() - 2] == b'-', "Invalid format"); - }; - Instrument::from_name($name).expect(&format!("Could not use macro instrument! from: {}", $name)) - } - )*}; -} - -/// An instrument option with its mark price for a certain timestamp. -#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] -pub struct OptionData { - pub instrument_name: String, - pub base_currency: OptionCurrency, - pub current_timestamp: i64, - #[schema(value_type = u64)] - pub mark_price: BigDecimal, -} - -impl OptionData { - /// Converts an option as a Vec of Felt - i.e a calldata. - pub fn as_calldata(&self) -> Result, InstrumentError> { - Ok(vec![ - cairo_short_string_to_felt(&self.instrument_name) - .map_err(|_| InstrumentError::Felt("instrument name".to_string()))?, - cairo_short_string_to_felt(&self.base_currency.to_string()) - .map_err(|_| InstrumentError::Felt("base currency".to_string()))?, - Felt::from(self.current_timestamp as u64), - Felt::from_str(&self.mark_price.to_string()) - .map_err(|_| InstrumentError::Felt("mark price".to_string()))?, - ]) - } - - /// Computes the pedersen hash of the Option. - pub fn pedersen_hash(&self) -> Result { - let elements = self.as_calldata()?; - Ok(compute_hash_on_elements(&elements)) - } - - pub fn pedersen_hash_as_hex_string(&self) -> Result { - let hash = self.pedersen_hash()?; - Ok(field_element_as_hex_string(&hash)) - } -} - -#[cfg(test)] -mod tests { - use bigdecimal::BigDecimal; - use chrono::NaiveDate; - use rstest::rstest; - use std::str::FromStr; - - use super::*; - - #[rstest] - #[case( - "BTC-27JUN25-80000-P", - OptionCurrency::BTC, - 2025, - 6, - 27, - 80_000, - OptionType::Put - )] - #[case( - "BTC-16AUG24-59000-P", - OptionCurrency::BTC, - 2024, - 8, - 16, - 59_000, - OptionType::Put - )] - #[case( - "BTC-16AUG24-54000-C", - OptionCurrency::BTC, - 2024, - 8, - 16, - 54_000, - OptionType::Call - )] - #[case( - "BTC-27DEC24-20000-P", - OptionCurrency::BTC, - 2024, - 12, - 27, - 20_000, - OptionType::Put - )] - #[case( - "BTC-3AUG24-61500-C", - OptionCurrency::BTC, - 2024, - 8, - 3, - 61_500, - OptionType::Call - )] - #[case( - "BTC-27DEC24-28000-P", - OptionCurrency::BTC, - 2024, - 12, - 27, - 28_000, - OptionType::Put - )] - #[case( - "BTC-3AUG24-61000-P", - OptionCurrency::BTC, - 2024, - 8, - 3, - 61_000, - OptionType::Put - )] - #[case( - "BTC-30AUG24-78000-P", - OptionCurrency::BTC, - 2024, - 8, - 30, - 78_000, - OptionType::Put - )] - #[case( - "BTC-27DEC24-105000-C", - OptionCurrency::BTC, - 2024, - 12, - 27, - 105_000, - OptionType::Call - )] - #[case( - "BTC-4AUG24-56000-P", - OptionCurrency::BTC, - 2024, - 8, - 4, - 56_000, - OptionType::Put - )] - fn test_instrument_from_name( - #[case] name: &str, - #[case] expected_currency: OptionCurrency, - #[case] expected_year: i32, - #[case] expected_month: u32, - #[case] expected_day: u32, - #[case] expected_strike: i32, - #[case] expected_option_type: OptionType, - ) { - let instrument = Instrument::from_name(name).unwrap(); - assert_eq!(instrument.base_currency, expected_currency); - assert_eq!( - instrument.expiration_date, - NaiveDate::from_ymd_opt(expected_year, expected_month, expected_day).unwrap() - ); - assert_eq!( - instrument.strike_price, - BigDecimal::from_str(&expected_strike.to_string()).unwrap() - ); - assert_eq!(instrument.option_type, expected_option_type); - } - - #[rstest] - #[case("BTC-27JUN25-80000-X")] - #[case("BTC-16AUG24-59000")] - #[case("BTC-16AUG24-ABCDE-C")] - #[case("-27DEC24")] - #[case("BTC-3AUG24-61500-C-EXTRA")] - #[case("INVALID-27DEC24-28000-P")] - #[case("SOL-4AUG24-56000-P")] - #[case("ETH-2424AUG24-56000-P")] - fn test_invalid_instrument_names(#[case] name: &str) { - assert!(Instrument::from_name(name).is_err()); - } -} diff --git a/pragma-common/src/types/pair.rs b/pragma-common/src/types/pair.rs deleted file mode 100644 index 50be9a97..00000000 --- a/pragma-common/src/types/pair.rs +++ /dev/null @@ -1,261 +0,0 @@ -use std::str::FromStr; - -use serde::{Deserialize, Serialize}; -use utoipa::ToSchema; - -const STABLE_SUFFIXES: [&str; 4] = ["USDT", "USDC", "USD", "DAI"]; - -/// A pair of assets, e.g. BTC/USD -/// -/// This is a simple struct that holds the base and quote assets. -/// It is used to represent a pair of assets in the system. -/// Base and quote are always in UPPERCASE. -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, ToSchema)] -pub struct Pair { - pub base: String, - pub quote: String, -} - -impl Pair { - /// Creates a routed pair from two pairs that share a common quote currency. - /// - /// e.g. "BTC/USD" and "ETH/USD" -> "BTC/ETH" - pub fn create_routed_pair(base_pair: &Self, quote_pair: &Self) -> Self { - Self { - base: base_pair.base.clone(), - quote: quote_pair.base.clone(), - } - } - - /// Creates a new pair from base and quote currencies. - pub fn from_currencies(base: &str, quote: &str) -> Self { - Self { - base: base.to_uppercase(), - quote: quote.to_uppercase(), - } - } - - /// Creates a pair from a stable pair string with or without delimiters - /// e.g. "BTCUSDT" -> BTC/USD, "ETH-USDC" -> ETH/USD, "`SOL_USDT`" -> SOL/USD - pub fn from_stable_pair(pair: &str) -> Option { - let pair = pair.to_uppercase(); - let normalized = pair.replace(['-', '_', '/'], ""); - - for stable in STABLE_SUFFIXES { - if let Some(base) = normalized.strip_suffix(stable) { - return Some(Self { - base: base.to_string(), - quote: "USD".to_string(), - }); - } - } - None - } - - /// Get the base and quote as a tuple - pub fn as_tuple(&self) -> (String, String) { - (self.base.clone(), self.quote.clone()) - } - - /// Format pair with a custom separator - pub fn format_with_separator(&self, separator: &str) -> String { - format!("{}{}{}", self.base, separator, self.quote) - } - - /// Get the pair ID in standard format without consuming self - pub fn to_pair_id(&self) -> String { - self.format_with_separator("/") - } -} - -impl std::fmt::Display for Pair { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}/{}", self.base, self.quote) - } -} - -impl From for String { - fn from(pair: Pair) -> Self { - format!("{0}/{1}", pair.base, pair.quote) - } -} - -impl From<&str> for Pair { - fn from(pair_id: &str) -> Self { - let normalized = pair_id.replace(['-', '_'], "/"); - let parts: Vec<&str> = normalized.split('/').collect(); - Self { - base: parts[0].trim().to_uppercase(), - quote: parts[1].trim().to_uppercase(), - } - } -} - -impl From for Pair { - fn from(pair_id: String) -> Self { - Self::from(pair_id.as_str()) - } -} - -impl FromStr for Pair { - type Err = (); - - fn from_str(s: &str) -> Result { - Ok(Self::from(s)) - } -} - -impl From<(String, String)> for Pair { - fn from(pair: (String, String)) -> Self { - Self { - base: pair.0.to_uppercase(), - quote: pair.1.to_uppercase(), - } - } -} - -#[macro_export] -macro_rules! pair { - ($pair_str:expr) => {{ - #[allow(dead_code)] - const fn validate_pair(s: &str) -> bool { - let mut count = 0; - let chars = s.as_bytes(); - let mut i = 0; - while i < chars.len() { - if chars[i] == b'/' || chars[i] == b'-' || chars[i] == b'_' { - count += 1; - } - i += 1; - } - count == 1 - } - const _: () = { - assert!( - validate_pair($pair_str), - "Invalid pair format. Expected format: BASE/QUOTE, BASE-QUOTE, or BASE_QUOTE" - ); - }; - let normalized = $pair_str.replace('-', "/").replace('_', "/"); - let parts: Vec<&str> = normalized.split('/').collect(); - Pair { - base: parts[0].trim().to_uppercase(), - quote: parts[1].trim().to_uppercase(), - } - }}; -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_pair_macro() { - let pair1 = pair!("BTC/USD"); - assert_eq!(pair1.base, "BTC"); - assert_eq!(pair1.quote, "USD"); - - let pair2 = pair!("btc-usd"); - assert_eq!(pair2.base, "BTC"); - assert_eq!(pair2.quote, "USD"); - - let pair3 = pair!("eth_usdt"); - assert_eq!(pair3.base, "ETH"); - assert_eq!(pair3.quote, "USDT"); - - let pair4 = pair!("SOL/usdc"); - assert_eq!(pair4.base, "SOL"); - assert_eq!(pair4.quote, "USDC"); - - let pair5 = pair!("bTc/uSDt"); - assert_eq!(pair5.base, "BTC"); - assert_eq!(pair5.quote, "USDT"); - } - - #[test] - fn test_pair_conversions() { - // Test from_currencies - let pair = Pair::from_currencies("btc", "usd"); - assert_eq!(pair.base, "BTC"); - assert_eq!(pair.quote, "USD"); - - // Test create_routed_pair - let btc_usd = Pair::from_currencies("btc", "usd"); - let eth_usd = Pair::from_currencies("eth", "usd"); - let btc_eth = Pair::create_routed_pair(&btc_usd, ð_usd); - assert_eq!(btc_eth.base, "BTC"); - assert_eq!(btc_eth.quote, "ETH"); - - // Test From<&str> - let pair_from_str = Pair::from("btc-usd"); - assert_eq!(pair_from_str.base, "BTC"); - assert_eq!(pair_from_str.quote, "USD"); - - let pair_from_str = Pair::from("ETH_USDT"); - assert_eq!(pair_from_str.base, "ETH"); - assert_eq!(pair_from_str.quote, "USDT"); - - let pair_from_str = Pair::from("BTC/USD"); - assert_eq!(pair_from_str.base, "BTC"); - assert_eq!(pair_from_str.quote, "USD"); - - // Test From<(String, String)> - let pair_from_tuple = Pair::from((String::from("btc"), String::from("usdt"))); - assert_eq!(pair_from_tuple.base, "BTC"); - assert_eq!(pair_from_tuple.quote, "USDT"); - - // Using into() - let pair_from_tuple: Pair = (String::from("eth"), String::from("usdc")).into(); - assert_eq!(pair_from_tuple.base, "ETH"); - assert_eq!(pair_from_tuple.quote, "USDC"); - - // Test as_tuple() - let pair = Pair::from_currencies("btc", "usd"); - let (base, quote) = pair.as_tuple(); - assert_eq!(base, "BTC"); - assert_eq!(quote, "USD"); - - // Test format_with_separator - let pair = Pair::from_currencies("btc", "usd"); - assert_eq!(pair.format_with_separator("/"), "BTC/USD"); - assert_eq!(pair.format_with_separator("-"), "BTC-USD"); - assert_eq!(pair.format_with_separator("_"), "BTC_USD"); - assert_eq!(pair.to_string(), "BTC/USD"); - } - - #[test] - fn test_from_stable_pair() { - // Without delimiter - let pair = Pair::from_stable_pair("BTCUSDT").unwrap(); - assert_eq!(pair.base, "BTC"); - assert_eq!(pair.quote, "USD"); - - // With different delimiters - let pair = Pair::from_stable_pair("ETH-USDC").unwrap(); - assert_eq!(pair.base, "ETH"); - assert_eq!(pair.quote, "USD"); - - let pair = Pair::from_stable_pair("SOL_USDT").unwrap(); - assert_eq!(pair.base, "SOL"); - assert_eq!(pair.quote, "USD"); - - let pair = Pair::from_stable_pair("MATIC/USD").unwrap(); - assert_eq!(pair.base, "MATIC"); - assert_eq!(pair.quote, "USD"); - - // Case insensitive - let pair = Pair::from_stable_pair("dot-usdt").unwrap(); - assert_eq!(pair.base, "DOT"); - assert_eq!(pair.quote, "USD"); - - // Invalid cases - assert!(Pair::from_stable_pair("INVALID").is_none()); - assert!(Pair::from_stable_pair("BTC-EUR").is_none()); - } - - // This test is commented out because it would fail at compile time - // #[test] - // fn test_invalid_pair() { - // let _pair = generate_pair!("BTC/USD/EUR"); // This will fail at compile time - // } -} diff --git a/pragma-common/src/types/typed_data.rs b/pragma-common/src/types/typed_data.rs deleted file mode 100644 index 1108abdf..00000000 --- a/pragma-common/src/types/typed_data.rs +++ /dev/null @@ -1,784 +0,0 @@ -// Taken from: -// https://github.com/dojoengine/dojo/blob/34b13caa785c1149558d28f1a9d9fbd700c4aa2d/crates/torii/libp2p/src/typed_data.rs - -use std::str::FromStr; - -use cainome::cairo_serde::ByteArray; -use indexmap::IndexMap; -use serde::{Deserialize, Serialize}; -use serde_json::Number; -use starknet::core::types::Felt; -use starknet::core::utils::{cairo_short_string_to_felt, get_selector_from_name}; -use starknet_crypto::poseidon_hash_many; - -use crate::signing::SignerError; - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct SimpleField { - pub name: String, - pub r#type: String, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct ParentField { - pub name: String, - pub r#type: String, - pub contains: String, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(untagged)] -pub enum Field { - ParentType(ParentField), - SimpleType(SimpleField), -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(untagged)] -pub enum PrimitiveType { - // All of object types. Including preset types - Object(IndexMap), - Array(Vec), - Bool(bool), - // comprehensive representation of - // String, ShortString, Selector and Felt - String(String), - // For JSON numbers. Formed into a Felt - Number(Number), -} - -fn get_preset_types() -> IndexMap> { - let mut types = IndexMap::new(); - - types.insert( - "TokenAmount".to_string(), - vec![ - Field::SimpleType(SimpleField { - name: "token_address".to_string(), - r#type: "ContractAddress".to_string(), - }), - Field::SimpleType(SimpleField { - name: "amount".to_string(), - r#type: "u256".to_string(), - }), - ], - ); - - types.insert( - "NftId".to_string(), - vec![ - Field::SimpleType(SimpleField { - name: "collection_address".to_string(), - r#type: "ContractAddress".to_string(), - }), - Field::SimpleType(SimpleField { - name: "token_id".to_string(), - r#type: "u256".to_string(), - }), - ], - ); - - types.insert( - "u256".to_string(), - vec![ - Field::SimpleType(SimpleField { - name: "low".to_string(), - r#type: "u128".to_string(), - }), - Field::SimpleType(SimpleField { - name: "high".to_string(), - r#type: "u128".to_string(), - }), - ], - ); - - types -} - -// Get the fields of a specific type -// Looks up both the types hashmap as well as the preset types -// Returns the fields and the hashmap of types -fn get_fields(name: &str, types: &IndexMap>) -> Result, SignerError> { - if let Some(fields) = types.get(name) { - return Ok(fields.clone()); - } - - Err(SignerError::InvalidMessage(format!( - "Type {name} not found", - ))) -} - -fn get_dependencies( - name: &str, - types: &IndexMap>, - dependencies: &mut Vec, -) -> Result<(), SignerError> { - if dependencies.contains(&name.to_string()) { - return Ok(()); - } - - dependencies.push(name.to_string()); - - for field in get_fields(name, types)? { - let mut field_type = match field { - Field::SimpleType(simple_field) => simple_field.r#type.clone(), - Field::ParentType(parent_field) => parent_field.contains.clone(), - }; - - field_type = field_type.trim_end_matches('*').to_string(); - - if types.contains_key(&field_type) && !dependencies.contains(&field_type) { - get_dependencies(&field_type, types, dependencies)?; - } - } - - Ok(()) -} - -pub fn encode_type( - name: &str, - types: &IndexMap>, -) -> Result { - let mut type_hash = String::new(); - - // get dependencies - let mut dependencies: Vec = Vec::new(); - get_dependencies(name, types, &mut dependencies)?; - - let (_, rest) = dependencies - .split_first_mut() - .ok_or_else(|| SignerError::InvalidMessage("No dependencies found".to_string()))?; - rest.sort_by_key(|dep| dep.to_lowercase()); - - for dep in dependencies { - type_hash += &format!("\"{dep}\""); - - type_hash += "("; - - let fields = get_fields(&dep, types)?; - for (idx, field) in fields.iter().enumerate() { - match field { - Field::SimpleType(simple_field) => { - // if ( at start and ) at end - if simple_field.r#type.starts_with('(') && simple_field.r#type.ends_with(')') { - let inner_types = &simple_field.r#type[1..simple_field.r#type.len() - 1] - .split(',') - .map(|t| { - if t.is_empty() { - t.to_string() - } else { - format!("\"{t}\"") - } - }) - .collect::>() - .join(","); - type_hash += &format!("\"{}\":({})", simple_field.name, inner_types); - } else { - type_hash += - &format!("\"{}\":\"{}\"", simple_field.name, simple_field.r#type); - } - } - Field::ParentType(parent_field) => { - type_hash += - &format!("\"{}\":\"{}\"", parent_field.name, parent_field.contains); - } - } - - if idx < fields.len() - 1 { - type_hash += ","; - } - } - - type_hash += ")"; - } - - Ok(type_hash) -} - -#[derive(Debug, Default)] -pub struct Ctx { - pub base_type: String, - pub parent_type: String, - pub is_preset: bool, -} - -pub(crate) struct FieldInfo { - _name: String, - r#type: String, - base_type: String, - index: usize, -} - -pub(crate) fn get_value_type( - name: &str, - types: &IndexMap>, -) -> Result { - // iter both "types" and "preset_types" to find the field - for (idx, (key, value)) in types.iter().enumerate() { - if key == name { - return Ok(FieldInfo { - _name: name.to_string(), - r#type: key.clone(), - base_type: String::new(), - index: idx, - }); - } - - for (idx, field) in value.iter().enumerate() { - match field { - Field::SimpleType(simple_field) => { - if simple_field.name == name { - return Ok(FieldInfo { - _name: name.to_string(), - r#type: simple_field.r#type.clone(), - base_type: String::new(), - index: idx, - }); - } - } - Field::ParentType(parent_field) => { - if parent_field.name == name { - return Ok(FieldInfo { - _name: name.to_string(), - r#type: parent_field.contains.clone(), - base_type: parent_field.r#type.clone(), - index: idx, - }); - } - } - } - } - } - - Err(SignerError::InvalidMessage(format!( - "Field {name} not found in types", - ))) -} - -fn get_hex(value: &str) -> Result { - Felt::from_str(value).map_or_else( - |_| { - cairo_short_string_to_felt(value).map_err(|e| { - SignerError::InvalidMessage(format!("Invalid shortstring for felt: {e}")) - }) - }, - Ok, - ) -} - -impl PrimitiveType { - pub fn encode( - &self, - r#type: &str, - types: &IndexMap>, - ctx: &mut Ctx, - ) -> Result { - match self { - Self::Object(obj) => { - ctx.is_preset = types.contains_key(r#type); - - let mut hashes = Vec::new(); - - if ctx.base_type == "enum" { - let (variant_name, value) = obj.first().ok_or_else(|| { - SignerError::InvalidMessage("Enum value must be populated".to_string()) - })?; - let variant_type = get_value_type(variant_name, types)?; - - let arr: &Vec = match value { - Self::Array(arr) => arr, - _ => { - return Err(SignerError::InvalidMessage( - "Enum value must be an array".to_string(), - )); - } - }; - - // variant index - hashes.push(Felt::from(variant_type.index as u32)); - - // variant parameters - for (idx, param) in arr.iter().enumerate() { - let field_type = &variant_type - .r#type - .trim_start_matches('(') - .trim_end_matches(')') - .split(',') - .nth(idx) - .ok_or_else(|| { - SignerError::InvalidMessage("Invalid enum variant type".to_string()) - })?; - - let field_hash = param.encode(field_type, types, ctx)?; - hashes.push(field_hash); - } - - return Ok(poseidon_hash_many(hashes.as_slice())); - } - - let type_hash = encode_type(r#type, types)?; - hashes.push(get_selector_from_name(&type_hash).map_err(|e| { - SignerError::InvalidMessage(format!("Invalid type {type} for selector: {e}",)) - })?); - - for (field_name, value) in obj { - // recheck if we're currently in a preset type - ctx.is_preset = types.contains_key(r#type); - - // pass correct types - preset or types - let field_type = get_value_type(field_name, types)?; - ctx.base_type = field_type.base_type; - ctx.parent_type = r#type.to_string(); - let field_hash = value.encode(field_type.r#type.as_str(), types, ctx)?; - hashes.push(field_hash); - } - - Ok(poseidon_hash_many(hashes.as_slice())) - } - Self::Array(array) => Ok(poseidon_hash_many( - array - .iter() - .map(|x| x.encode(r#type.trim_end_matches('*'), types, ctx)) - .collect::, _>>()? - .as_slice(), - )), - Self::Bool(boolean) => { - let v = if *boolean { - Felt::from(1_u32) - } else { - Felt::from(0_u32) - }; - Ok(v) - } - Self::String(string) => match r#type { - "felt" | "shortstring" | "ContractAddress" | "ClassHash" | "timestamp" | "u128" - | "i128" => get_hex(string), - "string" => { - // split the string into short strings and encode - let byte_array = ByteArray::from_string(string).map_err(|e| { - SignerError::InvalidMessage(format!("Invalid string for bytearray: {e}")) - })?; - - let mut hashes = vec![Felt::from(byte_array.data.len())]; - - for hash in byte_array.data { - hashes.push(hash.felt()); - } - - hashes.push(byte_array.pending_word); - hashes.push(Felt::from(byte_array.pending_word_len)); - - Ok(poseidon_hash_many(hashes.as_slice())) - } - "selector" => get_selector_from_name(string) - .map_err(|e| SignerError::InvalidMessage(format!("Invalid selector: {e}"))), - _ => Err(SignerError::InvalidMessage(format!( - "Invalid type {type} for string", - ))), - }, - Self::Number(number) => { - let felt = Felt::from_str(&number.to_string()) - .map_err(|_| SignerError::InvalidMessage(format!("Invalid number {number}")))?; - Ok(felt) - } - } - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct Domain { - pub name: String, - pub version: String, - #[serde(rename = "chainId")] - pub chain_id: String, - pub revision: Option, -} - -impl Domain { - pub fn new(name: &str, version: &str, chain_id: &str, revision: Option<&str>) -> Self { - Self { - name: name.to_string(), - version: version.to_string(), - chain_id: chain_id.to_string(), - revision: revision.map(ToString::to_string), - } - } - - pub fn encode(&self, types: &IndexMap>) -> Result { - let mut object = IndexMap::new(); - - object.insert("name".to_string(), PrimitiveType::String(self.name.clone())); - object.insert( - "version".to_string(), - PrimitiveType::String(self.version.clone()), - ); - object.insert( - "chainId".to_string(), - PrimitiveType::String(self.chain_id.clone()), - ); - if let Some(revision) = &self.revision { - object.insert( - "revision".to_string(), - PrimitiveType::String(revision.clone()), - ); - } - - // we dont need to pass our preset types here. domain should never use a preset type - PrimitiveType::Object(object).encode("StarknetDomain", types, &mut Default::default()) - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct TypedData { - pub types: IndexMap>, - #[serde(rename = "primaryType")] - pub primary_type: String, - pub domain: Domain, - pub message: IndexMap, -} - -/// Breakdown of components that make up a typed message hash. -#[derive(Debug, Clone)] -pub struct TypedDataHash { - /// The final hash of the entire message. - pub hash: Felt, - /// Hash of the `domain_separator` component. - #[allow(dead_code)] - pub domain_separator_hash: Felt, - /// Hash of the `message` component. - #[allow(dead_code)] - pub message_hash: Felt, -} - -impl TypedData { - pub fn new( - types: IndexMap>, - primary_type: &str, - domain: Domain, - message: IndexMap, - ) -> Self { - Self { - types, - primary_type: primary_type.to_string(), - domain, - message, - } - } - - pub fn encode(&self, account: Felt) -> Result { - let preset_types = get_preset_types(); - - // Combine types and preset_types - let mut all_types = preset_types; - all_types.extend(self.types.clone()); - - if self.domain.revision.clone().unwrap_or("1".to_string()) != "1" { - return Err(SignerError::InvalidMessage( - "Legacy revision 0 is not supported".to_string(), - )); - } - - let prefix_message = cairo_short_string_to_felt("StarkNet Message").unwrap(); - - // encode domain separator - let domain_hash = self.domain.encode(&self.types)?; - - // encode message - let message_hash = PrimitiveType::Object(self.message.clone()).encode( - &self.primary_type, - &all_types, - &mut Default::default(), - )?; - - // return full hash - Ok(TypedDataHash { - hash: poseidon_hash_many( - vec![prefix_message, domain_hash, account, message_hash].as_slice(), - ), - domain_separator_hash: domain_hash, - message_hash, - }) - } -} - -#[cfg(test)] -mod tests { - use rstest::*; - use starknet::core::types::Felt; - use starknet::core::utils::starknet_keccak; - - use super::*; - - #[rstest] - #[case(EXAMPLE_BASE_TYPES)] - #[case(EXAMPLE_ENUM)] - #[case(EXAMPLE_PRESET_TYPES)] - #[case(MAIL_STRUCT_ARRAY)] - fn test_read_json(#[case] json_data: &str) { - let reader = std::io::BufReader::new(json_data.as_bytes()); - - let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); - - // Optionally, you can assert certain properties of `typed_data` - // For now, we'll just ensure that deserialization succeeds - assert!(!typed_data.types.is_empty()); - assert!(!typed_data.primary_type.is_empty()); - } - - #[rstest] - #[case( - EXAMPLE_BASE_TYPES, - "\"Example\"(\"n0\":\"felt\",\"n1\":\"bool\",\"n2\":\"string\",\"n3\":\"selector\",\"n4\":\"u128\",\"n5\":\"i128\",\"n6\":\"ContractAddress\",\"n7\":\"ClassHash\",\"n8\":\"timestamp\",\"n9\":\"shortstring\")" - )] - #[case( - MAIL_STRUCT_ARRAY, - "\"Mail\"(\"from\":\"Person\",\"to\":\"Person\",\"posts_len\":\"felt\",\"posts\":\"Post*\")\"Person\"(\"name\":\"felt\",\"wallet\":\"felt\")\"Post\"(\"title\":\"felt\",\"content\":\"felt\")" - )] - #[case( - EXAMPLE_ENUM, - "\"Example\"(\"someEnum\":\"MyEnum\")\"MyEnum\"(\"Variant 1\":(),\"Variant 2\":(\"u128\",\"u128*\"),\"Variant 3\":(\"u128\"))" - )] - #[case( - EXAMPLE_PRESET_TYPES, - "\"Example\"(\"n0\":\"TokenAmount\",\"n1\":\"NftId\")" - )] - fn test_type_encode(#[case] json_data: &str, #[case] expected_encoded: &str) { - let reader = std::io::BufReader::new(json_data.as_bytes()); - - let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); - - let encoded = encode_type(&typed_data.primary_type, &typed_data.types).unwrap(); - - assert_eq!(encoded, expected_encoded); - } - - #[test] - fn test_selector_encode() { - let selector = PrimitiveType::String("transfer".to_string()); - let selector_hash = PrimitiveType::String(starknet_keccak(b"transfer").to_string()); - - let preset_types = get_preset_types(); - - let encoded_selector = selector - .encode("selector", &preset_types, &mut Default::default()) - .unwrap(); - let raw_encoded_selector = selector_hash - .encode("felt", &preset_types, &mut Default::default()) - .unwrap(); - - assert_eq!(encoded_selector, raw_encoded_selector); - assert_eq!(encoded_selector, starknet_keccak(b"transfer")); - } - - #[test] - fn test_domain_hash() { - let reader = std::io::BufReader::new(EXAMPLE_BASE_TYPES.as_bytes()); - - let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); - - let domain_hash = typed_data.domain.encode(&typed_data.types).unwrap(); - - assert_eq!( - domain_hash, - Felt::from_hex("0x555f72e550b308e50c1a4f8611483a174026c982a9893a05c185eeb85399657") - .unwrap() - ); - } - - #[rstest] - #[case( - EXAMPLE_BASE_TYPES, - "0xcd2a3d9f938e13cd947ec05abc7fe734df8dd826", - "0x2d80b87b8bc32068247c779b2ef0f15f65c9c449325e44a9df480fb01eb43ec" - )] - #[case( - EXAMPLE_ENUM, - "0xcd2a3d9f938e13cd947ec05abc7fe734df8dd826", - "0x3df10475ad5a8f49db4345a04a5b09164d2e24b09f6e1e236bc1ccd87627cc" - )] - #[case( - EXAMPLE_PRESET_TYPES, - "0xcd2a3d9f938e13cd947ec05abc7fe734df8dd826", - "0x185b339d5c566a883561a88fb36da301051e2c0225deb325c91bb7aa2f3473a" - )] - #[case( - MAIL_STRUCT_ARRAY, - "0xcd2a3d9f938e13cd947ec05abc7fe734df8dd826", - "0x1df06fd32d689b5431a784a33b02314cc7f395f3bda3ecedf97deafaa66ea31" - )] - fn test_message_hash( - #[case] example_data: &str, - #[case] address_hex: &str, - #[case] expected_hash_hex: &str, - ) { - let address = Felt::from_str(address_hex).unwrap(); - - let reader = std::io::BufReader::new(example_data.as_bytes()); - - let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); - - let message_hash = typed_data.encode(address).unwrap().hash; - - assert_eq!(message_hash, Felt::from_str(expected_hash_hex).unwrap()); - } - - const EXAMPLE_BASE_TYPES: &str = r#" -{ - "types": { - "StarknetDomain": [ - { "name": "name", "type": "shortstring" }, - { "name": "version", "type": "shortstring" }, - { "name": "chainId", "type": "shortstring" }, - { "name": "revision", "type": "shortstring" } - ], - "Example": [ - { "name": "n0", "type": "felt" }, - { "name": "n1", "type": "bool" }, - { "name": "n2", "type": "string" }, - { "name": "n3", "type": "selector" }, - { "name": "n4", "type": "u128" }, - { "name": "n5", "type": "i128" }, - { "name": "n6", "type": "ContractAddress" }, - { "name": "n7", "type": "ClassHash" }, - { "name": "n8", "type": "timestamp" }, - { "name": "n9", "type": "shortstring" } - ] - }, - "primaryType": "Example", - "domain": { - "name": "StarkNet Mail", - "version": "1", - "chainId": "1", - "revision": "1" - }, - "message": { - "n0": "0x3e8", - "n1": true, - "n2": "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.", - "n3": "transfer", - "n4": "0x3e8", - "n5": "-170141183460469231731687303715884105727", - "n6": "0x3e8", - "n7": "0x3e8", - "n8": 1000, - "n9": "transfer" - } -}"#; - - const EXAMPLE_ENUM: &str = r#" -{ - "types": { - "StarknetDomain": [ - { "name": "name", "type": "shortstring" }, - { "name": "version", "type": "shortstring" }, - { "name": "chainId", "type": "shortstring" }, - { "name": "revision", "type": "shortstring" } - ], - "Example": [{ "name": "someEnum", "type": "enum", "contains": "MyEnum" }], - "MyEnum": [ - { "name": "Variant 1", "type": "()" }, - { "name": "Variant 2", "type": "(u128,u128*)" }, - { "name": "Variant 3", "type": "(u128)" } - ] - }, - "primaryType": "Example", - "domain": { - "name": "StarkNet Mail", - "version": "1", - "chainId": "1", - "revision": "1" - }, - "message": { - "someEnum": { - "Variant 2": [2, [0, 1]] - } - } -}"#; - - const EXAMPLE_PRESET_TYPES: &str = r#" -{ - "types": { - "StarknetDomain": [ - { "name": "name", "type": "shortstring" }, - { "name": "version", "type": "shortstring" }, - { "name": "chainId", "type": "shortstring" }, - { "name": "revision", "type": "shortstring" } - ], - "Example": [ - { "name": "n0", "type": "TokenAmount" }, - { "name": "n1", "type": "NftId" } - ] - }, - "primaryType": "Example", - "domain": { - "name": "StarkNet Mail", - "version": "1", - "chainId": "1", - "revision": "1" - }, - "message": { - "n0": { - "token_address": "0x049d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7", - "amount": { - "low": "0x3e8", - "high": "0x0" - } - }, - "n1": { - "collection_address": "0x049d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7", - "token_id": { - "low": "0x3e8", - "high": "0x0" - } - } - } -}"#; - - const MAIL_STRUCT_ARRAY: &str = r#" -{ - "types": { - "StarknetDomain": [ - { "name": "name", "type": "shortstring" }, - { "name": "version", "type": "shortstring" }, - { "name": "chainId", "type": "shortstring" }, - { "name": "revision", "type": "shortstring" } - ], - "Person": [ - { "name": "name", "type": "felt" }, - { "name": "wallet", "type": "felt" } - ], - "Post": [ - { "name": "title", "type": "felt" }, - { "name": "content", "type": "felt" } - ], - "Mail": [ - { "name": "from", "type": "Person" }, - { "name": "to", "type": "Person" }, - { "name": "posts_len", "type": "felt" }, - { "name": "posts", "type": "Post*" } - ] - }, - "primaryType": "Mail", - "domain": { - "name": "StarkNet Mail", - "version": "1", - "chainId": "1", - "revision": "1" - }, - "message": { - "from": { - "name": "Cow", - "wallet": "0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826" - }, - "to": { - "name": "Bob", - "wallet": "0xbBbBBBBbbBBBbbbBbbBbbbbBBbBbbbbBbBbbBBbB" - }, - "posts_len": 2, - "posts": [ - { "title": "Greeting", "content": "Hello, Bob!" }, - { "title": "Farewell", "content": "Goodbye, Bob!" } - ] - } -}"#; -} diff --git a/pragma-common/src/types/utils.rs b/pragma-common/src/types/utils.rs deleted file mode 100644 index 136f452b..00000000 --- a/pragma-common/src/types/utils.rs +++ /dev/null @@ -1,32 +0,0 @@ -use serde::{Deserialize as _, Deserializer}; -use starknet::core::types::Felt; -use std::str::FromStr; - -/// Deserializes a vector of Felt from a JSON array of strings. -pub fn felt_from_decimal<'de, D>(deserializer: D) -> Result, D::Error> -where - D: Deserializer<'de>, -{ - let s: Vec = Vec::deserialize(deserializer)?; - Ok(s.iter().map(|s| Felt::from_dec_str(s).unwrap()).collect()) -} - -/// Deserializes a u128 from a JSON string or number. -pub fn flexible_u128<'de, D>(deserializer: D) -> Result -where - D: Deserializer<'de>, -{ - use serde::de::Error; - - // Try deserializing to Value first to handle both formats - let value = serde_json::Value::deserialize(deserializer)?; - - match value { - serde_json::Value::String(s) => u128::from_str(&s).map_err(D::Error::custom), - serde_json::Value::Number(n) => { - let s = n.to_string(); - u128::from_str(&s).map_err(D::Error::custom) - } - _ => Err(D::Error::custom("expected string or number")), - } -} diff --git a/pragma-common/src/utils.rs b/pragma-common/src/utils.rs deleted file mode 100644 index 7624d750..00000000 --- a/pragma-common/src/utils.rs +++ /dev/null @@ -1,17 +0,0 @@ -use starknet::core::types::Felt; - -/// Returns a Field Element as an hexadecimal string representation. -pub fn field_element_as_hex_string(f: &Felt) -> String { - format!("{f:#x}") -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_field_element_as_hex_string() { - let f = Felt::from(123_456); - assert_eq!(field_element_as_hex_string(&f), "0x1e240"); - } -} diff --git a/pragma-consumer/Cargo.toml b/pragma-consumer/Cargo.toml deleted file mode 100644 index 0162609b..00000000 --- a/pragma-consumer/Cargo.toml +++ /dev/null @@ -1,26 +0,0 @@ -[package] -name = "pragma-consumer" -version = "0.1.1" -edition = "2021" -license = "MIT" -homepage = "https://pragma.build/" -repository = "https://github.com/astraly-labs/pragma-node/" -description = "SDK to consume Pragma data feeds" -readme = "README.md" -keywords = ["pragma", "sdk", "consumer", "data", "feeds"] - -[lints] -workspace = true - -[dependencies] -pragma-common = { workspace = true } - -reqwest = { workspace = true } -serde_json = { workspace = true } -starknet = { workspace = true } -thiserror = { workspace = true } -tokio = { workspace = true, features = ["full"] } - -[dev-dependencies] -rstest = { workspace = true } -httpmock = { workspace = true, features = ["remote"] } diff --git a/pragma-consumer/README.md b/pragma-consumer/README.md deleted file mode 100644 index 3cdea88b..00000000 --- a/pragma-consumer/README.md +++ /dev/null @@ -1,166 +0,0 @@ -# Pragma Consumer SDK - -The Pragma Consumer SDK is used to fetch options and their associated Merkle proofs -so you can use them in our Pragma Oracle contract to interact with the Merkle Feed published on-chain. - -We have [examples](./examples/src/) to help you get started. - -## Installation - -Add this to your `Cargo.toml`: - -```toml -[dependencies] -pragma-consumer = "0.1.0" -``` - -## Quick Start - -```rust -use pragma_consumer::builder::PragmaConsumerBuilder; -use pragma_consumer::config::{ApiConfig, PragmaBaseUrl}; -use pragma_consumer::macros::instrument; -use pragma_consumer::types::{BlockId, Instrument}; - -#[tokio::main] -async fn main() -> Result<(), ()> { - let api_config = ApiConfig { - base_url: PragmaBaseUrl::Prod, - api_key: "your_api_key".into(), - }; - - let consumer = PragmaConsumerBuilder::new() - .on_mainnet() - .with_http(api_config) - .await - .unwrap(); - - let instrument = instrument!("BTC-16AUG24-52000-P"); - - let result = consumer - .get_merkle_feed_calldata(&instrument, None) // None = Pending block by default - .await - .unwrap(); - - // Use the calldata with the pragma-oracle contract... - println!("Hex calldata: {}", result.as_hex_calldata()); - - // result.calldata() returns the calldata wrapped with Felt - // from starknet-rs 0.9.0 -} -``` - -## Usage - -### Configure the API connection - -Create an instance of an `ApiConfig` object: - -```rust -let api_config = ApiConfig { - // This will use our dev API - base_url: PragmaBaseUrl::Dev, // or PragmaBaseUrl::Prod - api_key: "your_api_key".into(), -}; - -// If you need a custom url, you can do: -let api_config = ApiConfig { - base_url: PragmaBaseUrl::Custom("http://localhost:3000".into()), - api_key: "your_api_key".into(), -}; -``` - -### Initializing the Consumer - -Create a `PragmaConsumer` instance using the builder pattern: - -```rust -let consumer = PragmaConsumerBuilder::new() - .on_sepolia() // or .on_mainnet() - .with_http(api_config) - .await?; -``` - -**NOTE**: By default, the network will be `Sepolia` if you don't specify it: - -```rust -let consumer = PragmaConsumerBuilder::new() - .with_http(api_config) - .await?; -``` - -You can also add a `check_api_health` call to the builder to make sure the connection with the PragmAPI is healthy: - -```rust -let consumer = PragmaConsumerBuilder::new() - .check_api_health() - .with_http(api_config) - .await?; -``` - -### Fetching Merkle Feed Calldata - -Use the `get_merkle_feed_calldata` method to fetch the necessary data for interacting with the Pragma Oracle: - -```rust -let calldata = consumer - .get_merkle_feed_calldata(&instrument, block_number) - .await?; -``` - -### Creating Instruments - -You can create an Instrument in two ways: - -#### 1. Using the `instrument!` macro: - -```rust -let instrument = instrument!("BTC-16AUG24-52000-P"); -``` - -#### 2. Manually constructing the `Instrument` struct: - -```rust -use pragma_consumer::{Instrument, OptionCurrency, OptionType}; -use bigdecimal::BigDecimal; -use chrono::NaiveDate; - -let instrument = Instrument { - base_currency: OptionCurrency::BTC, - expiration_date: NaiveDate::from_ymd_opt(2024, 8, 16).unwrap(), - strike_price: BigDecimal::from(52000).unwrap(), - option_type: OptionType::Put -}; -``` - -You can retrieve the name of an instrument with the `name()` method: - -```rust -println!("{}", instrument.name()); - -// BTC-16AUG24-52000-P -``` - -### Specifying Block ID - -You can specify the block in different ways: - -```rust -use pragma_consumer::types::{BlockId, BlockTag}; - -// Using a specific block number -let block = BlockId::Number(85925); - -// Using the latest block -let block = BlockId::Tag(BlockTag::Latest); - -// Using the pending block -let block = BlockId::Tag(BlockTag::Pending); -``` - -### Error Handling - -The SDK uses the `thiserror` crate for error handling. The two main errors types are: - -- `builder::BuilderError` for errors during the `PragmaConsumer` building, -- `conssumer::ConsumerError` for errors during the fetching of the option and the merkle proof. diff --git a/pragma-consumer/examples/.gitignore b/pragma-consumer/examples/.gitignore deleted file mode 100644 index 64bf0eb4..00000000 --- a/pragma-consumer/examples/.gitignore +++ /dev/null @@ -1,21 +0,0 @@ -# Generated by Cargo -# will have compiled files and executables -debug/ -target/ - -# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries -# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html -Cargo.lock - -# These are backup files generated by rustfmt -**/*.rs.bk - -# MSVC Windows builds of rustc generate these, which store debugging information -*.pdb - -# RustRover -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ diff --git a/pragma-consumer/examples/README.md b/pragma-consumer/examples/README.md deleted file mode 100644 index b0b6c672..00000000 --- a/pragma-consumer/examples/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Pragma Consumer Examples - -A suite of multiple examples on how to use the Pragma Consumer SDK. - -## Run example - -e.g - -```bash -cargo run --example mainnet_specific_block -``` diff --git a/pragma-consumer/examples/complete_flow.rs b/pragma-consumer/examples/complete_flow.rs deleted file mode 100644 index 341f6975..00000000 --- a/pragma-consumer/examples/complete_flow.rs +++ /dev/null @@ -1,75 +0,0 @@ -use pragma_consumer::builder::PragmaConsumerBuilder; -use pragma_consumer::config::{ApiConfig, PragmaBaseUrl}; -use pragma_consumer::macros::instrument; -use pragma_consumer::types::Instrument; -use reqwest::Url; -use starknet::accounts::{Account, ExecutionEncoding, SingleOwnerAccount}; -use starknet::core::types::{Call, Felt}; -use starknet::core::utils::get_selector_from_name; -use starknet::providers::jsonrpc::HttpTransport; -use starknet::providers::JsonRpcClient; -use starknet::signers::{LocalWallet, SigningKey}; - -#[tokio::main] -async fn main() -> Result<(), ()> { - let api_config = ApiConfig { - base_url: PragmaBaseUrl::Dev, - api_key: String::new(), - }; - - let consumer = PragmaConsumerBuilder::new() - .with_http(api_config) - .await - .unwrap(); - - let instrument = instrument!("BTC-30AUG24-52000-C"); - - let result = consumer - .get_merkle_feed_calldata(&instrument, None) - .await - .unwrap(); - - let _ = dbg!(&result); - // Use the calldata with the pragma-oracle contract... - let _ = dbg!(&result.as_hex_calldata()); - - // Use the calldata with the pragma-oracle contract... - let calldata = result.as_calldata().unwrap(); - - let provider = JsonRpcClient::new(HttpTransport::new( - Url::parse("https://starknet-sepolia.public.blastapi.io/rpc/v0_7").unwrap(), - )); - - let signer = LocalWallet::from(SigningKey::from_secret_scalar( - Felt::from_hex("").unwrap(), - )); - let address = Felt::from_hex("").unwrap(); - let summary_stats_address = - Felt::from_hex("0x0379afb83d2f8e38ab08252750233665a812a24278aacdde52475618edbf879c") - .unwrap(); - - let mut account = SingleOwnerAccount::new( - provider, - signer, - address, - Felt::from_hex("0x534e5f5345504f4c4941").unwrap(), // SN_SEPOLIA - ExecutionEncoding::New, - ); - account.set_block_id(starknet::core::types::BlockId::Tag( - starknet::core::types::BlockTag::Pending, - )); - - let result = account - .execute_v1(vec![Call { - to: summary_stats_address, - selector: get_selector_from_name("update_options_data").unwrap(), - calldata, - }]) - .send() - .await - .unwrap(); - - println!("Transaction hash: {:#064x}", result.transaction_hash); - - Ok(()) -} diff --git a/pragma-consumer/examples/mainnet_specific_block.rs b/pragma-consumer/examples/mainnet_specific_block.rs deleted file mode 100644 index b85dc0a2..00000000 --- a/pragma-consumer/examples/mainnet_specific_block.rs +++ /dev/null @@ -1,31 +0,0 @@ -use pragma_consumer::builder::PragmaConsumerBuilder; -use pragma_consumer::config::{ApiConfig, PragmaBaseUrl}; -use pragma_consumer::macros::instrument; -use pragma_consumer::types::{BlockId, Instrument}; - -#[tokio::main] -async fn main() -> Result<(), ()> { - let api_config = ApiConfig { - base_url: PragmaBaseUrl::Prod, - api_key: String::new(), - }; - - let consumer = PragmaConsumerBuilder::new() - .on_mainnet() // Sepolia by default - .with_http(api_config) - .await - .unwrap(); - - let current_block = BlockId::Number(85925); - let instrument = instrument!("BTC-16AUG24-52000-P"); - - let result = consumer - .get_merkle_feed_calldata(&instrument, Some(current_block)) - .await - .unwrap(); - - let _ = dbg!(&result); - // Use the calldata with the pragma-oracle contract... - let _ = dbg!(&result.as_hex_calldata()); - Ok(()) -} diff --git a/pragma-consumer/examples/sepolia_custom_url_latest_block.rs b/pragma-consumer/examples/sepolia_custom_url_latest_block.rs deleted file mode 100644 index 23686120..00000000 --- a/pragma-consumer/examples/sepolia_custom_url_latest_block.rs +++ /dev/null @@ -1,31 +0,0 @@ -use pragma_consumer::builder::PragmaConsumerBuilder; -use pragma_consumer::config::{ApiConfig, PragmaBaseUrl}; -use pragma_consumer::macros::instrument; -use pragma_consumer::types::{BlockId, BlockTag, Instrument}; - -#[tokio::main] -async fn main() -> Result<(), ()> { - let api_config = ApiConfig { - base_url: PragmaBaseUrl::Custom("http://localhost:3000".into()), - api_key: String::new(), - }; - - let consumer = PragmaConsumerBuilder::new() - .on_sepolia() // Sepolia by default - .with_http(api_config) - .await - .unwrap(); - - let instrument = instrument!("BTC-16AUG24-52000-P"); - - let block = BlockId::Tag(BlockTag::Latest); - let result = consumer - .get_merkle_feed_calldata(&instrument, Some(block)) - .await - .unwrap(); - - let _ = dbg!(&result); - // Use the calldata with the pragma-oracle contract... - let _ = dbg!(&result.as_hex_calldata()); - Ok(()) -} diff --git a/pragma-consumer/examples/sepolia_custom_url_pending_block.rs b/pragma-consumer/examples/sepolia_custom_url_pending_block.rs deleted file mode 100644 index 90c78e73..00000000 --- a/pragma-consumer/examples/sepolia_custom_url_pending_block.rs +++ /dev/null @@ -1,30 +0,0 @@ -use pragma_consumer::builder::PragmaConsumerBuilder; -use pragma_consumer::config::{ApiConfig, PragmaBaseUrl}; -use pragma_consumer::macros::instrument; -use pragma_consumer::types::Instrument; - -#[tokio::main] -async fn main() -> Result<(), ()> { - let api_config = ApiConfig { - base_url: PragmaBaseUrl::Custom("http://localhost:3000".into()), - api_key: String::new(), - }; - - let consumer = PragmaConsumerBuilder::new() - .on_sepolia() // Sepolia by default - .with_http(api_config) - .await - .unwrap(); - - let instrument = instrument!("BTC-16AUG24-52000-P"); - - let result = consumer - .get_merkle_feed_calldata(&instrument, None) // Pending block by default - .await - .unwrap(); - - let _ = dbg!(&result); - // Use the calldata with the pragma-oracle contract... - let _ = dbg!(&result.as_hex_calldata()); - Ok(()) -} diff --git a/pragma-consumer/src/builder.rs b/pragma-consumer/src/builder.rs deleted file mode 100644 index 5d973246..00000000 --- a/pragma-consumer/src/builder.rs +++ /dev/null @@ -1,113 +0,0 @@ -use pragma_common::types::Network; -use reqwest::{ - header::{HeaderValue, InvalidHeaderValue}, - StatusCode, -}; - -use crate::{ - config::{ApiConfig, PragmaBaseUrl}, - constants::PRAGMAPI_HEALTHCHECK_ENDPOINT, - consumer::PragmaConsumer, -}; - -#[derive(thiserror::Error, Debug)] -pub enum BuilderError { - #[error("HTTP request to the pragmAPI failed with status `{0}`")] - HttpRequest(StatusCode), - #[error(transparent)] - Reqwest(#[from] reqwest::Error), - #[error("unexpected health check response: `{0}`")] - HealthCheck(String), - #[error(transparent)] - Header(#[from] InvalidHeaderValue), -} - -/// Builder of the Pragma consumer client. -/// Default network is Sepolia. -#[derive(Default, Debug)] -pub struct PragmaConsumerBuilder { - network: Network, - check_api_health: bool, -} - -impl PragmaConsumerBuilder { - pub fn new() -> Self { - Self::default() - } - - #[must_use] - pub const fn on_mainnet(self) -> Self { - self.on_network(Network::Mainnet) - } - - #[must_use] - pub const fn on_sepolia(self) -> Self { - self.on_network(Network::Sepolia) - } - - #[must_use] - const fn on_network(mut self, network: Network) -> Self { - self.network = network; - self - } - - /// Perform an health check with the `PragmAPI` to make sur the connection is - /// successfuly established. - #[must_use] - pub const fn check_api_health(mut self) -> Self { - self.check_api_health = true; - self - } - - pub async fn with_http(self, api_config: ApiConfig) -> Result { - let http_client = Self::build_http_client(&api_config)?; - - if self.check_api_health { - self.http_health_check(&http_client, &api_config.base_url) - .await?; - } - - Ok(PragmaConsumer { - network: self.network, - http_client, - base_url: api_config.base_url, - }) - } - - fn build_http_client(api_config: &ApiConfig) -> Result { - Ok(reqwest::Client::builder() - .default_headers({ - let mut headers = reqwest::header::HeaderMap::new(); - headers.insert( - "x-api-key", - HeaderValue::from_str(&api_config.api_key).map_err(BuilderError::Header)?, - ); - headers - }) - .build()?) - } - - async fn http_health_check( - &self, - client: &reqwest::Client, - base_url: &PragmaBaseUrl, - ) -> Result<(), BuilderError> { - let health_check_url = format!("{}/{}", base_url.url(), PRAGMAPI_HEALTHCHECK_ENDPOINT); - let response = client - .get(&health_check_url) - .send() - .await - .map_err(BuilderError::Reqwest)?; - - if response.status() != StatusCode::OK { - return Err(BuilderError::HttpRequest(response.status())); - } - - let body = response.text().await?; - if body.trim() != "Server is running!" { - return Err(BuilderError::HealthCheck(body)); - } - - Ok(()) - } -} diff --git a/pragma-consumer/src/config.rs b/pragma-consumer/src/config.rs deleted file mode 100644 index 7585c4ab..00000000 --- a/pragma-consumer/src/config.rs +++ /dev/null @@ -1,24 +0,0 @@ -/// `PragmAPI` Base url. Can be either Dev, Prod or a Custom url. -#[derive(Debug, Clone)] -pub enum PragmaBaseUrl { - Dev, - Prod, - Custom(String), -} - -impl PragmaBaseUrl { - pub fn url(&self) -> &str { - match self { - Self::Dev => "https://api.dev.pragma.build", - Self::Prod => "https://api.prod.pragma.build", - Self::Custom(url) => url, - } - } -} - -/// Required fields to connect to our `PragmAPI`. -#[derive(Debug, Clone)] -pub struct ApiConfig { - pub base_url: PragmaBaseUrl, - pub api_key: String, -} diff --git a/pragma-consumer/src/constants.rs b/pragma-consumer/src/constants.rs deleted file mode 100644 index d4bf9ac7..00000000 --- a/pragma-consumer/src/constants.rs +++ /dev/null @@ -1,5 +0,0 @@ -/// The prefix our API containing the version. -pub(crate) const PRAGMAPI_PATH_PREFIX: &str = "node/v1/merkle_feeds"; - -/// Endpoint that can be called (without the prefix) to healthcheck the HTTP connection. -pub(crate) const PRAGMAPI_HEALTHCHECK_ENDPOINT: &str = "node"; diff --git a/pragma-consumer/src/consumer.rs b/pragma-consumer/src/consumer.rs deleted file mode 100644 index 0523c719..00000000 --- a/pragma-consumer/src/consumer.rs +++ /dev/null @@ -1,111 +0,0 @@ -use reqwest::{Response, StatusCode}; - -use pragma_common::types::{ - block_id::{BlockId, BlockTag}, - merkle_tree::MerkleProof, - options::{Instrument, OptionData}, - Network, -}; - -use crate::{config::PragmaBaseUrl, constants::PRAGMAPI_PATH_PREFIX, types::MerkleFeedCalldata}; - -#[derive(thiserror::Error, Debug)] -pub enum ConsumerError { - #[error("http request to the pragmAPI failed with status `{0}`")] - HttpRequest(StatusCode), - #[error("could not decode the HTTP response: `{0}`")] - Decode(String), - #[error(transparent)] - Reqwest(#[from] reqwest::Error), - #[error(transparent)] - Serde(#[from] serde_json::Error), - #[error("could not compute the pedersen hash for option: `{:?}`", 0)] - OptionHash(OptionData), -} - -pub struct PragmaConsumer { - pub(crate) network: Network, - pub(crate) http_client: reqwest::Client, - pub(crate) base_url: PragmaBaseUrl, -} - -impl PragmaConsumer { - /// Query the `PragmAPI` and returns the necessary calldata to use - /// with our Oracle contract. - pub async fn get_merkle_feed_calldata( - &self, - instrument: &Instrument, - block_id: Option, - ) -> Result { - let block_id = block_id.unwrap_or(BlockId::Tag(BlockTag::Pending)); - let option_data = self.request_option(instrument.name(), block_id).await?; - let option_hash = option_data - .pedersen_hash_as_hex_string() - .map_err(|_| ConsumerError::OptionHash(option_data.clone()))?; - - let merkle_proof = self.request_merkle_proof(option_hash, block_id).await?; - - Ok(MerkleFeedCalldata { - merkle_proof, - option_data, - }) - } - - /// Requests from our `PragmAPI` the option data for a given instrument name at a - /// certain block. - async fn request_option( - &self, - instrument_name: String, - block_id: BlockId, - ) -> Result { - let url = format!( - "{}/{}/options/{}?network={}&block_id={}", - self.base_url.url(), - PRAGMAPI_PATH_PREFIX, - instrument_name, - self.network, - block_id, - ); - - let api_response = self.request_api(url).await?; - if api_response.status() != StatusCode::OK { - return Err(ConsumerError::HttpRequest(api_response.status())); - } - - let contents = api_response.text().await.map_err(ConsumerError::Reqwest)?; - serde_json::from_str(&contents).map_err(ConsumerError::Serde) - } - - /// Requests from our `PragmAPI` the merkle proof for an hash at a certain block. - async fn request_merkle_proof( - &self, - option_hash: String, - block_id: BlockId, - ) -> Result { - let url = format!( - "{}/{}/proof/{}?network={}&block_id={}", - self.base_url.url(), - PRAGMAPI_PATH_PREFIX, - option_hash, - self.network, - block_id, - ); - - let api_response = self.request_api(url).await?; - if api_response.status() != StatusCode::OK { - return Err(ConsumerError::HttpRequest(api_response.status())); - } - - let contents = api_response.text().await.map_err(ConsumerError::Reqwest)?; - serde_json::from_str(&contents).map_err(ConsumerError::Serde) - } - - /// Utility function to make an HTTP Get request to a provided URL. - async fn request_api(&self, url: String) -> Result { - self.http_client - .get(url) - .send() - .await - .map_err(ConsumerError::Reqwest) - } -} diff --git a/pragma-consumer/src/lib.rs b/pragma-consumer/src/lib.rs deleted file mode 100644 index 7bff7570..00000000 --- a/pragma-consumer/src/lib.rs +++ /dev/null @@ -1,11 +0,0 @@ -pub mod builder; -pub mod config; -pub(crate) mod constants; -pub mod consumer; -pub mod types; - -/// Re-export of some types from our common library so they're publicly accessible -/// through the SDK. -pub mod macros { - pub use pragma_common::instrument; -} diff --git a/pragma-consumer/src/types.rs b/pragma-consumer/src/types.rs deleted file mode 100644 index afe1f69b..00000000 --- a/pragma-consumer/src/types.rs +++ /dev/null @@ -1,55 +0,0 @@ -pub use pragma_common::types::block_id::{BlockId, BlockTag}; -/// Re-export of some types from our common library so they're publicly accessible -/// through the SDK. -pub use pragma_common::types::merkle_tree::MerkleProof; -pub use pragma_common::types::options::{ - Instrument, InstrumentError, OptionCurrency, OptionData, OptionType, -}; - -use pragma_common::{types::merkle_tree::FeltMerkleProof, utils::field_element_as_hex_string}; -use starknet::core::types::Felt; - -#[derive(thiserror::Error, Debug)] -pub enum CalldataError { - #[error("field element conversion failed")] - FeltConversion, -} - -/// Calldata used to query Pragma Oracle. -#[derive(Debug)] -pub struct MerkleFeedCalldata { - pub merkle_proof: MerkleProof, - pub option_data: OptionData, -} - -impl MerkleFeedCalldata { - /// Converts the structure as the Vec, i.e. a calldata. - pub fn as_calldata(&self) -> Result, CalldataError> { - let mut calldata = Vec::with_capacity(self.merkle_proof.0.len()); - - let felt_proof: FeltMerkleProof = self - .merkle_proof - .clone() - .try_into() - .map_err(|_| CalldataError::FeltConversion)?; - - calldata.push(felt_proof.0.len().into()); - calldata.extend(felt_proof.0); - - let option_calldata = self - .option_data - .as_calldata() - .map_err(|_| CalldataError::FeltConversion)?; - calldata.extend(option_calldata); - - Ok(calldata) - } - - pub fn as_hex_calldata(&self) -> Result, CalldataError> { - Ok(self - .as_calldata()? - .into_iter() - .map(|f| field_element_as_hex_string(&f)) - .collect()) - } -} diff --git a/pragma-consumer/tests/common/mocks.rs b/pragma-consumer/tests/common/mocks.rs deleted file mode 100644 index 4dd83eb8..00000000 --- a/pragma-consumer/tests/common/mocks.rs +++ /dev/null @@ -1,77 +0,0 @@ -use httpmock::{prelude::*, Mock}; -use pragma_common::types::Network; -use pragma_consumer::types::{BlockId, Instrument}; -use serde_json::json; - -pub(crate) fn mock_healthcheck(pragmapi: &MockServer) -> Mock<'_> { - pragmapi.mock(|when, then| { - when.method(GET).path("/node"); - then.status(200).body("Server is running!"); - }) -} - -pub(crate) fn mock_option_response( - pragmapi: &MockServer, - instrument: Instrument, - network: Network, - block_id: BlockId, -) -> Mock<'_> { - let url = format!("node/v1/merkle_feeds/options/{}", instrument.name(),); - pragmapi.mock(|when, then| { - when.method(GET) - .path_contains(url) - .query_param("network", network.to_string()) - .query_param("block_id", block_id.to_string()); - then.status(200) - .header("content-type", "text/json") - .json_body(option_data(&instrument)); - }) -} - -pub(crate) fn mock_merkle_proof_response( - pragmapi: &MockServer, - option_hash: String, - network: Network, - block_id: BlockId, -) -> Mock<'_> { - let url = format!("node/v1/merkle_feeds/proof/{}", &option_hash); - pragmapi.mock(|when, then| { - when.method(GET) - .path_contains(url) - .query_param("network", network.to_string()) - .query_param("block_id", block_id.to_string()); - then.status(200) - .header("content-type", "text/json") - .json_body(merkle_proof_data()); - }) -} - -pub(crate) fn option_data(instrument: &Instrument) -> serde_json::Value { - json!({ - "instrument_name": instrument.name(), - "base_currency": &instrument.base_currency.to_string(), - "current_timestamp": 1_722_805_873, - "mark_price": "45431835920", - "hash": "0x7866fd2ec3bc6bd1a2efb6e1f02337d62064a86e8d5755bdc568d92a06f320a" - }) -} - -pub(crate) fn merkle_proof_data() -> serde_json::Value { - json!([ - "0x78626d4f8f1e24c24a41d90457688b436463d7595c4dd483671b1d5297518d2", - "0x14eb21a8e98fbd61f20d0bbdba2b32cb2bcb61082dfcf5229370aca5b2dbd2", - "0x73a5b6ab2f3ed2647ed316e5d4acac4db4b5f8da8f6e4707e633ebe02006043", - "0x1c156b5dedc44a27e73968ebe3d464538d7bb0332f1c8191b2eb4a5afca8c7a", - "0x39b52ee5f605f57cc893d398b09cb558c87ec9c956e11cd066df82e1006b33b", - "0x698ea138d770764c65cb171627c57ebc1efb7c495b2c7098872cb485fd2e0bc", - "0x313f2d7dc97dabc9a7fea0b42a5357787cabe78cdcca0d8274eabe170aaa79d", - "0x6b35594ee638d1baa9932b306753fbd43a300435af0d51abd3dd7bd06159e80", - "0x6e9f8a80ebebac7ba997448a1c50cd093e1b9c858cac81537446bafa4aa9431", - "0x3082dc1a8f44267c1b9bea29a3df4bd421e9c33ee1594bf297a94dfd34c7ae4", - "0x16356d27fc23e31a3570926c593bb37430201f51282f2628780264d3a399867" - ]) -} - -pub(crate) fn merkle_root_data() -> String { - "0x31d84dd2db2edb4b74a651b0f86351612efdedc51b51a178d5967a3cdfd319f".into() -} diff --git a/pragma-consumer/tests/common/mod.rs b/pragma-consumer/tests/common/mod.rs deleted file mode 100644 index 2689a667..00000000 --- a/pragma-consumer/tests/common/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub(crate) mod mocks; diff --git a/pragma-consumer/tests/test_consumer.rs b/pragma-consumer/tests/test_consumer.rs deleted file mode 100644 index 481b1238..00000000 --- a/pragma-consumer/tests/test_consumer.rs +++ /dev/null @@ -1,81 +0,0 @@ -mod common; - -use httpmock::MockServer; -use rstest::*; -use starknet::core::types::Felt; - -use pragma_common::{hash::pedersen_hash, instrument, types::Network}; -use pragma_consumer::{ - builder::PragmaConsumerBuilder, - config::{ApiConfig, PragmaBaseUrl}, - consumer::PragmaConsumer, - types::{BlockId, BlockTag, Instrument}, -}; - -use common::mocks::{ - merkle_root_data, mock_healthcheck, mock_merkle_proof_response, mock_option_response, - option_data, -}; - -#[rstest] -#[tokio::test] -async fn test_consumer() { - let pragmapi = MockServer::start(); - - let api_config = ApiConfig { - base_url: PragmaBaseUrl::Custom(format!("http://{}", pragmapi.address())), - api_key: "this_is_a_test".into(), - }; - - let healthcheck_mock = mock_healthcheck(&pragmapi); - - // 1. Build the consumer with an healthcheck - let consumer: PragmaConsumer = PragmaConsumerBuilder::new() - .on_sepolia() - .check_api_health() - .with_http(api_config) - .await - .expect("Could not build PragmaConsumer"); - healthcheck_mock.assert(); - - // 2. Define some fake tests instruments - let test_instrument: Instrument = instrument!("BTC-16AUG24-52000-P"); - let block_test = BlockId::Tag(BlockTag::Latest); - let network = Network::Sepolia; - - // 2.5 Mock responses - let option_mock = mock_option_response(&pragmapi, test_instrument.clone(), network, block_test); - let merkle_proof_mock = mock_merkle_proof_response( - &pragmapi, - option_data(&test_instrument)["hash"] - .as_str() - .unwrap() - .to_owned(), - network, - block_test, - ); - - // 3. Fetch the calldata & assert that the mocks got correctly called - let calldata = consumer - .get_merkle_feed_calldata(&test_instrument, Some(block_test)) - .await - .expect("Could not fetch the calldata"); - - option_mock.assert(); - merkle_proof_mock.assert(); - - // 4. Verify the proof returned - let expected_merkle_root = Felt::from_hex(&merkle_root_data()).unwrap(); - - let mut out_merkle_root = calldata - .option_data - .pedersen_hash() - .expect("Could not generate the hash of option"); - - for sibling in calldata.merkle_proof.0 { - let felt_sibling = Felt::from_hex(&sibling).unwrap(); - out_merkle_root = pedersen_hash(&out_merkle_root, &felt_sibling); - } - - assert_eq!(out_merkle_root, expected_merkle_root); -} diff --git a/pragma-entities/Cargo.toml b/pragma-entities/Cargo.toml index 1153e209..1c6c96fc 100644 --- a/pragma-entities/Cargo.toml +++ b/pragma-entities/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "pragma-entities" version = "0.1.0" -edition = "2021" +edition.workspace = true [lints] workspace = true @@ -19,7 +19,6 @@ diesel = { workspace = true, features = [ "postgres_backend", ] } diesel_migrations = { workspace = true } -redis = { workspace = true, features = ["tokio-comp", "json"] } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true, features = ["arbitrary_precision"] } starknet = { workspace = true } diff --git a/pragma-entities/migrations/2023-10-11-223513_create_entries/down.sql b/pragma-entities/migrations/2023-10-11-223513_create_entries/down.sql index b32b01ab..c929d73d 100644 --- a/pragma-entities/migrations/2023-10-11-223513_create_entries/down.sql +++ b/pragma-entities/migrations/2023-10-11-223513_create_entries/down.sql @@ -1,2 +1,2 @@ --- This file should undo anything in `up.sql` -DROP TABLE entries \ No newline at end of file +DROP TABLE future_entries; +DROP TABLE entries; diff --git a/pragma-entities/migrations/2023-10-11-223513_create_entries/up.sql b/pragma-entities/migrations/2023-10-11-223513_create_entries/up.sql index 5dfb9189..5e491d91 100644 --- a/pragma-entities/migrations/2023-10-11-223513_create_entries/up.sql +++ b/pragma-entities/migrations/2023-10-11-223513_create_entries/up.sql @@ -1,11 +1,57 @@ -- Your SQL goes here CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; +-- SPOT entries + CREATE TABLE entries ( id uuid DEFAULT uuid_generate_v4(), pair_id VARCHAR NOT NULL, - publisher TEXT NOT NULL, + price NUMERIC NOT NULL, timestamp TIMESTAMPTZ NOT NULL, + publisher TEXT NOT NULL, + publisher_signature TEXT, + source VARCHAR NOT NULL, + PRIMARY KEY (id, timestamp) +); + +CREATE UNIQUE INDEX idx_entries_unique + ON entries(pair_id, source, timestamp DESC); +CREATE INDEX entries_pair_id_timestamp_idx ON entries (pair_id, timestamp DESC); + +SELECT + create_hypertable('entries', by_range('timestamp', INTERVAL '6 hours')); + +ALTER TABLE entries SET ( + timescaledb.enable_columnstore = true, + timescaledb.segmentby = 'pair_id' +); + +CALL add_columnstore_policy('entries', after => INTERVAL '2 hours'); + +-- FUTURE (contains perp) entries + +CREATE TABLE future_entries ( + id uuid DEFAULT uuid_generate_v4(), + pair_id VARCHAR NOT NULL, price NUMERIC NOT NULL, + timestamp TIMESTAMPTZ NOT NULL, + expiration_timestamp TIMESTAMPTZ, -- can be NULL for perp contracts + publisher TEXT NOT NULL, + publisher_signature TEXT, + source VARCHAR NOT NULL, PRIMARY KEY (id, timestamp) -) \ No newline at end of file +); + +CREATE UNIQUE INDEX idx_future_entries_unique ON future_entries(pair_id, source, timestamp, expiration_timestamp); +CREATE INDEX idx_future_entries_pair_id_timestamp ON future_entries (pair_id, timestamp DESC); +CREATE INDEX idx_future_entries_pair_id_timestamp_expiration_timestamp ON future_entries (pair_id, expiration_timestamp, timestamp DESC); + +SELECT + create_hypertable('future_entries', by_range('timestamp', INTERVAL '6 hours')); + +ALTER TABLE future_entries SET ( + timescaledb.enable_columnstore = true, + timescaledb.segmentby = 'pair_id' +); + +CALL add_columnstore_policy('future_entries', after => INTERVAL '2 hours'); diff --git a/pragma-entities/migrations/2023-10-12-005433_add_source/down.sql b/pragma-entities/migrations/2023-10-12-005433_add_source/down.sql deleted file mode 100644 index e4d98c74..00000000 --- a/pragma-entities/migrations/2023-10-12-005433_add_source/down.sql +++ /dev/null @@ -1,3 +0,0 @@ --- This file should undo anything in `up.sql` -ALTER TABLE entries -DROP COLUMN source; \ No newline at end of file diff --git a/pragma-entities/migrations/2023-10-12-005433_add_source/up.sql b/pragma-entities/migrations/2023-10-12-005433_add_source/up.sql deleted file mode 100644 index 71455d3a..00000000 --- a/pragma-entities/migrations/2023-10-12-005433_add_source/up.sql +++ /dev/null @@ -1,3 +0,0 @@ --- Your SQL goes here -ALTER TABLE entries -ADD COLUMN source VARCHAR NOT NULL; \ No newline at end of file diff --git a/pragma-entities/migrations/2023-10-12-232125_add_publishers_table/down.sql b/pragma-entities/migrations/2023-10-12-232125_add_publishers_table/down.sql index e1ac25ce..8787cde2 100644 --- a/pragma-entities/migrations/2023-10-12-232125_add_publishers_table/down.sql +++ b/pragma-entities/migrations/2023-10-12-232125_add_publishers_table/down.sql @@ -1,2 +1 @@ --- This file should undo anything in `up.sql` -DROP TABLE publishers \ No newline at end of file +DROP TABLE publishers; diff --git a/pragma-entities/migrations/2023-10-12-232125_add_publishers_table/up.sql b/pragma-entities/migrations/2023-10-12-232125_add_publishers_table/up.sql index 896d2e50..256ab3d9 100644 --- a/pragma-entities/migrations/2023-10-12-232125_add_publishers_table/up.sql +++ b/pragma-entities/migrations/2023-10-12-232125_add_publishers_table/up.sql @@ -4,6 +4,16 @@ CREATE TABLE publishers ( name VARCHAR NOT NULL, master_key VARCHAR NOT NULL, active_key VARCHAR NOT NULL, - active BOOLEAN NOT NULL, + account_address VARCHAR NOT NULL DEFAULT '', + active BOOLEAN NOT NULL DEFAULT true, PRIMARY KEY (id) -); \ No newline at end of file +); + +INSERT INTO publishers (name, master_key, active_key, active, account_address) +VALUES ( + 'PRAGMA', + '0x05e6361b53afbb451d1326ed4e37aecff9ef68af8318eb3c8dc58bcadfc16705', + '0x05e6361b53afbb451d1326ed4e37aecff9ef68af8318eb3c8dc58bcadfc16705', + true, + '0x624EBFB99865079BD58CFCFB925B6F5CE940D6F6E41E118B8A72B7163FB435C' +); diff --git a/pragma-entities/migrations/2023-10-16-233310_add_account_address/down.sql b/pragma-entities/migrations/2023-10-16-233310_add_account_address/down.sql deleted file mode 100644 index 4b621e7a..00000000 --- a/pragma-entities/migrations/2023-10-16-233310_add_account_address/down.sql +++ /dev/null @@ -1,3 +0,0 @@ --- This file should undo anything in `up.sql` -ALTER TABLE publishers -DROP COLUMN account_address; \ No newline at end of file diff --git a/pragma-entities/migrations/2023-10-16-233310_add_account_address/up.sql b/pragma-entities/migrations/2023-10-16-233310_add_account_address/up.sql deleted file mode 100644 index ad0837c1..00000000 --- a/pragma-entities/migrations/2023-10-16-233310_add_account_address/up.sql +++ /dev/null @@ -1,12 +0,0 @@ --- Your SQL goes here -ALTER TABLE publishers -ADD COLUMN account_address VARCHAR NOT NULL DEFAULT ''; - -INSERT INTO publishers (name, master_key, active_key, active, account_address) -VALUES ( - 'PRAGMA', - '0x05e6361b53afbb451d1326ed4e37aecff9ef68af8318eb3c8dc58bcadfc16705', - '0x05e6361b53afbb451d1326ed4e37aecff9ef68af8318eb3c8dc58bcadfc16705', - true, - '0x624EBFB99865079BD58CFCFB925B6F5CE940D6F6E41E118B8A72B7163FB435C' -); \ No newline at end of file diff --git a/pragma-entities/migrations/2023-10-25-134954_add_currencies_table/down.sql b/pragma-entities/migrations/2023-10-25-134954_add_currencies_table/down.sql deleted file mode 100644 index ee00d5c6..00000000 --- a/pragma-entities/migrations/2023-10-25-134954_add_currencies_table/down.sql +++ /dev/null @@ -1,2 +0,0 @@ --- This file should undo anything in `up.sql` -DROP TABLE currencies \ No newline at end of file diff --git a/pragma-entities/migrations/2023-10-25-134954_add_currencies_table/up.sql b/pragma-entities/migrations/2023-10-25-134954_add_currencies_table/up.sql deleted file mode 100644 index 87c4df3e..00000000 --- a/pragma-entities/migrations/2023-10-25-134954_add_currencies_table/up.sql +++ /dev/null @@ -1,23 +0,0 @@ --- Your SQL goes here -CREATE TABLE currencies ( - id uuid DEFAULT uuid_generate_v4(), - name VARCHAR NOT NULL, - decimals NUMERIC NOT NULL, - abstract BOOLEAN NOT NULL, - ethereum_address VARCHAR, - PRIMARY KEY (id) -); - --- initialize -INSERT INTO public.currencies (name, decimals, abstract, ethereum_address) VALUES -('BTC', 8, true, NULL), -('ETH', 18, false, NULL), -('USD', 8, true, NULL), -('EUR', 8, true, NULL), -('WBTC', 8, false, '0x2260FAC5E5542A773AA44FBCFEDF7C193BC2C599'), -('USDC', 6, false, '0xA0B86991C6218B36C1D19D4A2E9EB0CE3606EB48'), -('USDT', 6, false, '0xDAC17F958D2EE523A2206206994597C13D831EC7'), -('DAI', 18, false, '0x6B175474E89094C44DA98B954EEDEAC495271D0F'), -('LORDS', 18, false, '0x686F2404E77AB0D9070A46CDFB0B7FECDD2318B0'), -('R', 18, false, '0x183015A9BA6FF60230FDEADC3F43B3D788B13E21'), -('WSTETH', 18, false, '0x7F39C581F595B53C5CB19BD0B3F8DA6C935E2CA0'); diff --git a/pragma-entities/migrations/2023-11-24-185951_add_source_index/down.sql b/pragma-entities/migrations/2023-11-24-185951_add_source_index/down.sql deleted file mode 100644 index 5f0d5064..00000000 --- a/pragma-entities/migrations/2023-11-24-185951_add_source_index/down.sql +++ /dev/null @@ -1,2 +0,0 @@ --- This file should undo anything in `up.sql` -DROP INDEX IF EXISTS idx_entries_unique; \ No newline at end of file diff --git a/pragma-entities/migrations/2023-11-24-185951_add_source_index/up.sql b/pragma-entities/migrations/2023-11-24-185951_add_source_index/up.sql deleted file mode 100644 index 3aab5e77..00000000 --- a/pragma-entities/migrations/2023-11-24-185951_add_source_index/up.sql +++ /dev/null @@ -1,3 +0,0 @@ --- Your SQL goes here -CREATE UNIQUE INDEX idx_entries_unique - ON entries(pair_id, source, timestamp DESC); \ No newline at end of file diff --git a/pragma-entities/migrations/2023-12-29-052754_add_hypertable_entries/down.sql b/pragma-entities/migrations/2023-12-29-052754_add_hypertable_entries/down.sql deleted file mode 100644 index fa930c00..00000000 --- a/pragma-entities/migrations/2023-12-29-052754_add_hypertable_entries/down.sql +++ /dev/null @@ -1,2 +0,0 @@ --- This file should undo anything in `up.sql` -SELECT detach_table('entries'); \ No newline at end of file diff --git a/pragma-entities/migrations/2023-12-29-052754_add_hypertable_entries/up.sql b/pragma-entities/migrations/2023-12-29-052754_add_hypertable_entries/up.sql deleted file mode 100644 index b3ebe350..00000000 --- a/pragma-entities/migrations/2023-12-29-052754_add_hypertable_entries/up.sql +++ /dev/null @@ -1,2 +0,0 @@ --- Your SQL goes here -SELECT create_hypertable('entries', 'timestamp'); \ No newline at end of file diff --git a/pragma-entities/migrations/2024-01-11-123510_add_continuous_aggregates/down.sql b/pragma-entities/migrations/2024-01-11-123510_add_continuous_aggregates/down.sql deleted file mode 100644 index 8fe24240..00000000 --- a/pragma-entities/migrations/2024-01-11-123510_add_continuous_aggregates/down.sql +++ /dev/null @@ -1,4 +0,0 @@ --- This file should undo anything in `up.sql` -DROP MATERIALIZED VIEW IF EXISTS price_1_min_agg; -DROP MATERIALIZED VIEW IF EXISTS price_15_min_agg; -DROP MATERIALIZED VIEW IF EXISTS price_1_h_agg; \ No newline at end of file diff --git a/pragma-entities/migrations/2024-01-11-123510_add_continuous_aggregates/up.sql b/pragma-entities/migrations/2024-01-11-123510_add_continuous_aggregates/up.sql deleted file mode 100644 index 5d96e126..00000000 --- a/pragma-entities/migrations/2024-01-11-123510_add_continuous_aggregates/up.sql +++ /dev/null @@ -1,49 +0,0 @@ --- Your SQL goes here -CREATE MATERIALIZED VIEW price_1_min_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = true) -AS SELECT - pair_id, - time_bucket('1 min'::interval, timestamp) as bucket, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_1_min_agg', - start_offset => NULL, - end_offset => INTERVAL '1 min', - schedule_interval => INTERVAL '1 min'); - -CREATE MATERIALIZED VIEW price_15_min_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = true) -AS SELECT - pair_id, - time_bucket('15 min'::interval, timestamp) as bucket, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_15_min_agg', - start_offset => NULL, - end_offset => INTERVAL '15 min', - schedule_interval => INTERVAL '15 min'); - -CREATE MATERIALIZED VIEW price_1_h_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = true) -AS SELECT - pair_id, - time_bucket('1 hour'::interval, timestamp) as bucket, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_1_h_agg', - start_offset => NULL, - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); - diff --git a/pragma-entities/migrations/2024-01-11-123510_add_median_aggregates/down.sql b/pragma-entities/migrations/2024-01-11-123510_add_median_aggregates/down.sql new file mode 100644 index 00000000..21e803bc --- /dev/null +++ b/pragma-entities/migrations/2024-01-11-123510_add_median_aggregates/down.sql @@ -0,0 +1,49 @@ +-- Drop materialized views for spot +DROP MATERIALIZED VIEW median_100_ms_spot; +DROP MATERIALIZED VIEW median_100_ms_spot_per_source; +DROP MATERIALIZED VIEW median_1_s_spot; +DROP MATERIALIZED VIEW median_1_s_spot_per_source; +DROP MATERIALIZED VIEW median_5_s_spot; +DROP MATERIALIZED VIEW median_5_s_spot_per_source; +DROP MATERIALIZED VIEW median_10_s_spot; +DROP MATERIALIZED VIEW median_10_s_spot_per_source; +DROP MATERIALIZED VIEW median_1_min_spot; +DROP MATERIALIZED VIEW median_1_min_spot_per_source; +DROP MATERIALIZED VIEW median_15_min_spot; +DROP MATERIALIZED VIEW median_15_min_spot_per_source; +DROP MATERIALIZED VIEW median_1_h_spot; +DROP MATERIALIZED VIEW median_1_h_spot_per_source; +DROP MATERIALIZED VIEW median_2_h_spot; +DROP MATERIALIZED VIEW median_2_h_spot_per_source; +DROP MATERIALIZED VIEW median_1_day_spot; +DROP MATERIALIZED VIEW median_1_day_spot_per_source; +DROP MATERIALIZED VIEW median_1_week_spot; +DROP MATERIALIZED VIEW median_1_week_spot_per_source; + +-- Drop materialized views for perp +DROP MATERIALIZED VIEW median_100_ms_perp; +DROP MATERIALIZED VIEW median_100_ms_perp_per_source; +DROP MATERIALIZED VIEW median_1_s_perp; +DROP MATERIALIZED VIEW median_1_s_perp_per_source; +DROP MATERIALIZED VIEW median_5_s_perp; +DROP MATERIALIZED VIEW median_5_s_perp_per_source; +DROP MATERIALIZED VIEW median_10_s_perp; +DROP MATERIALIZED VIEW median_10_s_perp_per_source; +DROP MATERIALIZED VIEW median_1_min_perp; +DROP MATERIALIZED VIEW median_1_min_perp_per_source; +DROP MATERIALIZED VIEW median_15_min_perp; +DROP MATERIALIZED VIEW median_15_min_perp_per_source; +DROP MATERIALIZED VIEW median_1_h_perp; +DROP MATERIALIZED VIEW median_1_h_perp_per_source; +DROP MATERIALIZED VIEW median_2_h_perp; +DROP MATERIALIZED VIEW median_2_h_perp_per_source; +DROP MATERIALIZED VIEW median_1_day_perp; +DROP MATERIALIZED VIEW median_1_day_perp_per_source; +DROP MATERIALIZED VIEW median_1_week_perp; +DROP MATERIALIZED VIEW median_1_week_perp_per_source; + +-- Drop the function +DROP FUNCTION create_median_aggregate(text, interval, interval, text); + +-- Drop the custom type +DROP TYPE price_component; diff --git a/pragma-entities/migrations/2024-01-11-123510_add_median_aggregates/up.sql b/pragma-entities/migrations/2024-01-11-123510_add_median_aggregates/up.sql new file mode 100644 index 00000000..002ef901 --- /dev/null +++ b/pragma-entities/migrations/2024-01-11-123510_add_median_aggregates/up.sql @@ -0,0 +1,106 @@ +-- A price component - it represents a sub price that has been used to compute a price. +-- For example, a price that has been used to compute a median for an ID. +CREATE TYPE price_component AS ( + source text, + price numeric(1000,0), + "timestamp" timestamptz +); + +-- =============================== +-- Function used to create a median continuous aggregate +-- =============================== +CREATE OR REPLACE FUNCTION create_median_aggregate( + p_name text, + p_interval interval, + p_start_offset interval, + p_type text -- 'spot' or 'perp' +) +RETURNS void AS $$ +DECLARE + table_name text; + where_condition text; +BEGIN + -- Set the table and WHERE condition based on p_type + IF p_type = 'spot' THEN + table_name := 'entries'; + where_condition := '"timestamp" IS NOT NULL'; + ELSIF p_type = 'perp' THEN + table_name := 'future_entries'; + where_condition := '"timestamp" IS NOT NULL AND expiration_timestamp IS NULL'; + ELSE + RAISE EXCEPTION 'Invalid type: %', p_type; + END IF; + + -- Create the sub materialized view that contains the median price per source + EXECUTE format(' + CREATE MATERIALIZED VIEW %s_per_source + WITH (timescaledb.continuous, timescaledb.materialized_only = false) + AS SELECT + pair_id, + source, + time_bucket(%L, "timestamp") AS subbucket, + percentile_cont(0.5) WITHIN GROUP (ORDER BY price)::numeric(1000,0) AS source_median_price + FROM %I + WHERE %s + GROUP BY pair_id, source, subbucket + WITH NO DATA;', + p_name, p_interval, table_name, where_condition); + + -- Create the materialized view that contains the median price across all sources + EXECUTE format(' + CREATE MATERIALIZED VIEW %I + WITH (timescaledb.continuous, timescaledb.materialized_only = false) + AS SELECT + pair_id, + time_bucket(%L, subbucket) AS bucket, + percentile_cont(0.5) WITHIN GROUP (ORDER BY source_median_price)::numeric(1000,0) AS median_price, + COUNT(DISTINCT source) AS num_sources, + array_agg(ROW(source, source_median_price, subbucket)::price_component) AS components + FROM %I + GROUP BY pair_id, bucket + WITH NO DATA;', + p_name, p_interval, p_name || '_per_source'); + + -- Set the chunk time interval to 12 hours + EXECUTE format('SELECT set_chunk_time_interval(%L, INTERVAL ''12 hours'');', p_name || '_per_source'); + EXECUTE format('SELECT set_chunk_time_interval(%L, INTERVAL ''12 hours'');', p_name); + + -- Add the continuous aggregate refresh policy + EXECUTE format(' + SELECT add_continuous_aggregate_policy(%L, + start_offset => %L, + end_offset => %L, + schedule_interval => %L);', + p_name || '_per_source', p_start_offset, '0'::interval, p_interval); + EXECUTE format(' + SELECT add_continuous_aggregate_policy(%L, + start_offset => %L, + end_offset => %L, + schedule_interval => %L);', + p_name, p_start_offset, '0'::interval, p_interval); +END; +$$ LANGUAGE plpgsql; + +-- SPOT median +SELECT create_median_aggregate('median_100_ms_spot', '100 milliseconds'::interval, '300 milliseconds'::interval, 'spot'); +SELECT create_median_aggregate('median_1_s_spot', '1 second'::interval, '3 seconds'::interval, 'spot'); +SELECT create_median_aggregate('median_5_s_spot', '5 seconds'::interval, '15 seconds'::interval, 'spot'); +SELECT create_median_aggregate('median_10_s_spot', '10 seconds'::interval, '30 seconds'::interval, 'spot'); +SELECT create_median_aggregate('median_1_min_spot', '1 minute'::interval, '3 minutes'::interval, 'spot'); +SELECT create_median_aggregate('median_15_min_spot', '15 minutes'::interval, '45 minutes'::interval, 'spot'); +SELECT create_median_aggregate('median_1_h_spot', '1 hour'::interval, '3 hours'::interval, 'spot'); +SELECT create_median_aggregate('median_2_h_spot', '2 hours'::interval, '6 hours'::interval, 'spot'); +SELECT create_median_aggregate('median_1_day_spot', '1 day'::interval, '3 days'::interval, 'spot'); +SELECT create_median_aggregate('median_1_week_spot', '1 week'::interval, '3 weeks'::interval, 'spot'); + +-- PERP median +SELECT create_median_aggregate('median_100_ms_perp', '100 milliseconds'::interval, '300 milliseconds'::interval, 'perp'); +SELECT create_median_aggregate('median_1_s_perp', '1 second'::interval, '3 seconds'::interval, 'perp'); +SELECT create_median_aggregate('median_5_s_perp', '5 seconds'::interval, '15 seconds'::interval, 'perp'); +SELECT create_median_aggregate('median_10_s_perp', '10 seconds'::interval, '30 seconds'::interval, 'perp'); +SELECT create_median_aggregate('median_1_min_perp', '1 minute'::interval, '3 minutes'::interval, 'perp'); +SELECT create_median_aggregate('median_15_min_perp', '15 minutes'::interval, '45 minutes'::interval, 'perp'); +SELECT create_median_aggregate('median_1_h_perp', '1 hour'::interval, '3 hours'::interval, 'perp'); +SELECT create_median_aggregate('median_2_h_perp', '2 hours'::interval, '6 hours'::interval, 'perp'); +SELECT create_median_aggregate('median_1_day_perp', '1 day'::interval, '3 days'::interval, 'perp'); +SELECT create_median_aggregate('median_1_week_perp', '1 week'::interval, '3 weeks'::interval, 'perp'); diff --git a/pragma-entities/migrations/2024-01-12-095822_add_realtime_agg/down.sql b/pragma-entities/migrations/2024-01-12-095822_add_realtime_agg/down.sql deleted file mode 100644 index 475bdacb..00000000 --- a/pragma-entities/migrations/2024-01-12-095822_add_realtime_agg/down.sql +++ /dev/null @@ -1,4 +0,0 @@ --- This file should undo anything in `up.sql` -ALTER MATERIALIZED VIEW price_1_min_agg set (timescaledb.materialized_only = true); -ALTER MATERIALIZED VIEW price_15_min_agg set (timescaledb.materialized_only = true); -ALTER MATERIALIZED VIEW price_1_h_agg set (timescaledb.materialized_only = true); \ No newline at end of file diff --git a/pragma-entities/migrations/2024-01-12-095822_add_realtime_agg/up.sql b/pragma-entities/migrations/2024-01-12-095822_add_realtime_agg/up.sql deleted file mode 100644 index 4df2c6d6..00000000 --- a/pragma-entities/migrations/2024-01-12-095822_add_realtime_agg/up.sql +++ /dev/null @@ -1,4 +0,0 @@ --- Your SQL goes here -ALTER MATERIALIZED VIEW price_1_min_agg set (timescaledb.materialized_only = false); -ALTER MATERIALIZED VIEW price_15_min_agg set (timescaledb.materialized_only = false); -ALTER MATERIALIZED VIEW price_1_h_agg set (timescaledb.materialized_only = false); \ No newline at end of file diff --git a/pragma-entities/migrations/2024-01-12-135355_add_candlestick_views/down.sql b/pragma-entities/migrations/2024-01-12-135355_add_candlestick_views/down.sql index dd89c822..c15507bd 100644 --- a/pragma-entities/migrations/2024-01-12-135355_add_candlestick_views/down.sql +++ b/pragma-entities/migrations/2024-01-12-135355_add_candlestick_views/down.sql @@ -1,6 +1,18 @@ --- This file should undo anything in `up.sql` -DROP MATERIALIZED VIEW IF EXISTS one_day_candle; -DROP MATERIALIZED VIEW IF EXISTS one_hour_candle; -DROP MATERIALIZED VIEW IF EXISTS fifteen_minute_candle; -DROP MATERIALIZED VIEW IF EXISTS five_minute_candle; -DROP MATERIALIZED VIEW IF EXISTS one_minute_candle; \ No newline at end of file +-- Drop materialized views for spot candlesticks +DROP MATERIALIZED VIEW candle_10_s_spot; +DROP MATERIALIZED VIEW candle_1_min_spot; +DROP MATERIALIZED VIEW candle_5_min_spot; +DROP MATERIALIZED VIEW candle_15_min_spot; +DROP MATERIALIZED VIEW candle_1_h_spot; +DROP MATERIALIZED VIEW candle_1_day_spot; + +-- Drop materialized views for perp candlesticks +DROP MATERIALIZED VIEW candle_10_s_perp; +DROP MATERIALIZED VIEW candle_1_min_perp; +DROP MATERIALIZED VIEW candle_5_min_perp; +DROP MATERIALIZED VIEW candle_15_min_perp; +DROP MATERIALIZED VIEW candle_1_h_perp; +DROP MATERIALIZED VIEW candle_1_day_perp; + +-- Drop the function +DROP FUNCTION create_candlestick_view(text, interval, interval, text); diff --git a/pragma-entities/migrations/2024-01-12-135355_add_candlestick_views/up.sql b/pragma-entities/migrations/2024-01-12-135355_add_candlestick_views/up.sql index ccccba14..ad9589cf 100644 --- a/pragma-entities/migrations/2024-01-12-135355_add_candlestick_views/up.sql +++ b/pragma-entities/migrations/2024-01-12-135355_add_candlestick_views/up.sql @@ -1,95 +1,50 @@ --- 1 day candle -CREATE MATERIALIZED VIEW one_day_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 day', timestamp) AS bucket, - pair_id, - FIRST(price, timestamp) AS "open", - MAX(price) AS high, - MIN(price) AS low, - LAST(price, timestamp) AS "close" - FROM entries - GROUP BY bucket, pair_id - WITH NO DATA; - - -SELECT add_continuous_aggregate_policy('one_day_candle', - start_offset => INTERVAL '3 days', - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - --- 1 hour candle -CREATE MATERIALIZED VIEW one_hour_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 hour', timestamp) AS bucket, - pair_id, - FIRST(price, timestamp) AS "open", - MAX(price) AS high, - MIN(price) AS low, - LAST(price, timestamp) AS "close" - FROM entries - GROUP BY bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('one_hour_candle', - start_offset => INTERVAL '3 hours', - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); - --- 15 minute candle -CREATE MATERIALIZED VIEW fifteen_minute_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('15 minutes', timestamp) AS bucket, - pair_id, - FIRST(price, timestamp)::numeric AS "open", - MAX(price)::numeric AS high, - MIN(price)::numeric AS low, - LAST(price, timestamp)::numeric AS "close" - FROM entries - GROUP BY bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('fifteen_minute_candle', - start_offset => INTERVAL '45 minutes', - end_offset => INTERVAL '15 minutes', - schedule_interval => INTERVAL '15 minutes'); - --- 5 minute candle -CREATE MATERIALIZED VIEW five_minute_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('5 minutes', timestamp) AS bucket, - pair_id, - FIRST(price, timestamp) AS "open", - MAX(price) AS high, - MIN(price) AS low, - LAST(price, timestamp) AS "close" - FROM entries - GROUP BY bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('five_minute_candle', - start_offset => INTERVAL '15 minutes', - end_offset => INTERVAL '5 minutes', - schedule_interval => INTERVAL '5 minutes'); - --- 1 minute candle -CREATE MATERIALIZED VIEW one_minute_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 minute', timestamp) AS bucket, - pair_id, - FIRST(price, timestamp) AS "open", - MAX(price) AS high, - MIN(price) AS low, - LAST(price, timestamp) AS "close" - FROM entries - GROUP BY bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('one_minute_candle', - start_offset => INTERVAL '3 minutes', - end_offset => INTERVAL '1 minute', - schedule_interval => INTERVAL '1 minute'); \ No newline at end of file +CREATE OR REPLACE FUNCTION create_candlestick_view( + p_name text, + p_interval interval, + p_start_offset interval, + p_table_name text +) +RETURNS void AS $$ +BEGIN + -- Create the materialized view with continuous aggregate + EXECUTE format(' + CREATE MATERIALIZED VIEW %I + WITH (timescaledb.continuous, timescaledb.materialized_only = false) AS + SELECT + time_bucket(%L, subbucket) AS ohlc_bucket, + pair_id, + FIRST(source_median_price, subbucket)::numeric AS "open", + MAX(source_median_price)::numeric AS high, + MIN(source_median_price)::numeric AS low, + LAST(source_median_price, subbucket)::numeric AS "close" + FROM %I_per_source + GROUP BY ohlc_bucket, pair_id + WITH NO DATA;', p_name, p_interval, p_table_name); + + -- Set the chunk time interval to 7 days + EXECUTE format('SELECT set_chunk_time_interval(%L, INTERVAL ''7 days'');', p_name); + + -- Add the continuous aggregate refresh policy + EXECUTE format(' + SELECT add_continuous_aggregate_policy(%L, + start_offset => %L, + end_offset => %L, + schedule_interval => %L);', p_name, p_start_offset, '0'::interval, p_interval); +END; +$$ LANGUAGE plpgsql; + +-- Spot entries candlesticks +SELECT create_candlestick_view('candle_10_s_spot', '10 seconds'::interval, '30 seconds'::interval, 'median_1_s_spot'); +SELECT create_candlestick_view('candle_1_min_spot', '1 minute'::interval, '3 minutes'::interval, 'median_1_s_spot'); +SELECT create_candlestick_view('candle_5_min_spot', '5 minutes'::interval, '15 minutes'::interval, 'median_10_s_spot'); +SELECT create_candlestick_view('candle_15_min_spot', '15 minutes'::interval, '45 minutes'::interval, 'median_10_s_spot'); +SELECT create_candlestick_view('candle_1_h_spot', '1 hour'::interval, '3 hours'::interval, 'median_10_s_spot'); +SELECT create_candlestick_view('candle_1_day_spot', '1 day'::interval, '3 days'::interval, 'median_10_s_spot'); + +-- Perp entries candlesticks +SELECT create_candlestick_view('candle_10_s_perp', '10 seconds'::interval, '30 seconds'::interval, 'median_1_s_perp'); +SELECT create_candlestick_view('candle_1_min_perp', '1 minute'::interval, '3 minutes'::interval, 'median_1_s_perp'); +SELECT create_candlestick_view('candle_5_min_perp', '5 minutes'::interval, '15 minutes'::interval, 'median_10_s_perp'); +SELECT create_candlestick_view('candle_15_min_perp', '15 minutes'::interval, '45 minutes'::interval, 'median_10_s_perp'); +SELECT create_candlestick_view('candle_1_h_perp', '1 hour'::interval, '3 hours'::interval, 'median_10_s_perp'); +SELECT create_candlestick_view('candle_1_day_perp', '1 day'::interval, '3 days'::interval, 'median_10_s_perp'); diff --git a/pragma-entities/migrations/2024-01-19-113453_add_twap_aggregates/down.sql b/pragma-entities/migrations/2024-01-19-113453_add_twap_aggregates/down.sql index 00e2ba49..c273fa88 100644 --- a/pragma-entities/migrations/2024-01-19-113453_add_twap_aggregates/down.sql +++ b/pragma-entities/migrations/2024-01-19-113453_add_twap_aggregates/down.sql @@ -1,4 +1,34 @@ --- This file should undo anything in `up.sql` -DROP MATERIALIZED VIEW IF EXISTS twap_1_min_agg; -DROP MATERIALIZED VIEW IF EXISTS twap_15_min_agg; -DROP MATERIALIZED VIEW IF EXISTS twap_1_hour_agg; \ No newline at end of file +-- Drop materialized views for spot twap (main views first) +DROP MATERIALIZED VIEW IF EXISTS twap_1_min_spot; +DROP MATERIALIZED VIEW IF EXISTS twap_5_min_spot; +DROP MATERIALIZED VIEW IF EXISTS twap_15_min_spot; +DROP MATERIALIZED VIEW IF EXISTS twap_1_h_spot; +DROP MATERIALIZED VIEW IF EXISTS twap_2_h_spot; +DROP MATERIALIZED VIEW IF EXISTS twap_1_day_spot; + +-- Drop materialized views for spot twap (per-source views) +DROP MATERIALIZED VIEW IF EXISTS twap_1_min_spot_per_source; +DROP MATERIALIZED VIEW IF EXISTS twap_5_min_spot_per_source; +DROP MATERIALIZED VIEW IF EXISTS twap_15_min_spot_per_source; +DROP MATERIALIZED VIEW IF EXISTS twap_1_h_spot_per_source; +DROP MATERIALIZED VIEW IF EXISTS twap_2_h_spot_per_source; +DROP MATERIALIZED VIEW IF EXISTS twap_1_day_spot_per_source; + +-- Drop materialized views for perp twap (main views first) +DROP MATERIALIZED VIEW IF EXISTS twap_1_min_perp; +DROP MATERIALIZED VIEW IF EXISTS twap_5_min_perp; +DROP MATERIALIZED VIEW IF EXISTS twap_15_min_perp; +DROP MATERIALIZED VIEW IF EXISTS twap_1_h_perp; +DROP MATERIALIZED VIEW IF EXISTS twap_2_h_perp; +DROP MATERIALIZED VIEW IF EXISTS twap_1_day_perp; + +-- Drop materialized views for perp twap (per-source views) +DROP MATERIALIZED VIEW IF EXISTS twap_1_min_perp_per_source; +DROP MATERIALIZED VIEW IF EXISTS twap_5_min_perp_per_source; +DROP MATERIALIZED VIEW IF EXISTS twap_15_min_perp_per_source; +DROP MATERIALIZED VIEW IF EXISTS twap_1_h_perp_per_source; +DROP MATERIALIZED VIEW IF EXISTS twap_2_h_perp_per_source; +DROP MATERIALIZED VIEW IF EXISTS twap_1_day_perp_per_source; + +-- Drop the function +DROP FUNCTION IF EXISTS create_twap_aggregate(text, interval, interval, text); diff --git a/pragma-entities/migrations/2024-01-19-113453_add_twap_aggregates/up.sql b/pragma-entities/migrations/2024-01-19-113453_add_twap_aggregates/up.sql index 5cd3376b..16652ac4 100644 --- a/pragma-entities/migrations/2024-01-19-113453_add_twap_aggregates/up.sql +++ b/pragma-entities/migrations/2024-01-19-113453_add_twap_aggregates/up.sql @@ -1,50 +1,90 @@ --- 1min TWAP -CREATE MATERIALIZED VIEW twap_1_min_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = true) -AS SELECT - pair_id, - time_bucket('1 min'::interval, timestamp) as bucket, - average(time_weight('Linear', timestamp, price))::numeric as price_twap, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; +-- =============================== +-- Function used to create a twap continuous aggregate +-- =============================== +CREATE OR REPLACE FUNCTION create_twap_aggregate( + p_name text, + p_interval interval, + p_start_offset interval, + p_type text -- 'spot' or 'perp' +) +RETURNS void AS $$ +DECLARE + table_name text; + where_condition text; +BEGIN + -- Set the table and WHERE condition based on p_type + IF p_type = 'spot' THEN + table_name := 'entries'; + where_condition := '"timestamp" IS NOT NULL'; + ELSIF p_type = 'perp' THEN + table_name := 'future_entries'; + where_condition := '"timestamp" IS NOT NULL AND expiration_timestamp = NULL'; + ELSE + RAISE EXCEPTION 'Invalid type: %', p_type; + END IF; -SELECT add_continuous_aggregate_policy('twap_1_min_agg', - start_offset => NULL, - end_offset => INTERVAL '1 min', - schedule_interval => INTERVAL '1 min'); + -- Create the sub materialized view for TWAP per source + EXECUTE format(' + CREATE MATERIALIZED VIEW %s_per_source + WITH (timescaledb.continuous, timescaledb.materialized_only = false) + AS SELECT + pair_id, + source, + time_bucket(%L, "timestamp") AS subbucket, + average(time_weight(''Linear'', "timestamp", price))::numeric(1000,0) AS source_twap_price + FROM %I + WHERE %s + GROUP BY pair_id, source, subbucket + WITH NO DATA;', + p_name, p_interval, table_name, where_condition); --- 15min TWAP -CREATE MATERIALIZED VIEW twap_15_min_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = true) -AS SELECT - pair_id, - time_bucket('15 min'::interval, timestamp) as bucket, - average(time_weight('Linear', timestamp, price))::numeric as price_twap, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; + -- Create the main materialized view averaging source TWAPs + EXECUTE format(' + CREATE MATERIALIZED VIEW %I + WITH (timescaledb.continuous, timescaledb.materialized_only = false) + AS SELECT + pair_id, + time_bucket(%L, subbucket) AS bucket, + avg(source_twap_price)::numeric(1000,0) AS twap_price, + COUNT(DISTINCT source) AS num_sources, + array_agg(ROW(source, source_twap_price, subbucket)::price_component) AS components + FROM %I + GROUP BY pair_id, bucket + WITH NO DATA;', + p_name, p_interval, p_name || '_per_source'); -SELECT add_continuous_aggregate_policy('twap_15_min_agg', - start_offset => NULL, - end_offset => INTERVAL '15 min', - schedule_interval => INTERVAL '15 min'); + -- Set chunk time interval to 7 days for both views + EXECUTE format('SELECT set_chunk_time_interval(%L, INTERVAL ''7 days'');', p_name || '_per_source'); + EXECUTE format('SELECT set_chunk_time_interval(%L, INTERVAL ''7 days'');', p_name); --- 1hour TWAP -CREATE MATERIALIZED VIEW twap_1_hour_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = true) -AS SELECT - pair_id, - time_bucket('1 hour'::interval, timestamp) as bucket, - average(time_weight('Linear', timestamp, price))::numeric as price_twap, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; + -- Add continuous aggregate policies + EXECUTE format(' + SELECT add_continuous_aggregate_policy(%L, + start_offset => %L, + end_offset => %L, + schedule_interval => %L);', + p_name || '_per_source', p_start_offset, '0'::interval, p_interval); + EXECUTE format(' + SELECT add_continuous_aggregate_policy(%L, + start_offset => %L, + end_offset => %L, + schedule_interval => %L);', + p_name, p_start_offset, '0'::interval, p_interval); +END; +$$ LANGUAGE plpgsql; -SELECT add_continuous_aggregate_policy('twap_1_hour_agg', - start_offset => NULL, - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); \ No newline at end of file +-- SPOT twap +SELECT create_twap_aggregate('twap_1_min_spot', '1 minute'::interval, '3 minutes'::interval, 'spot'); +SELECT create_twap_aggregate('twap_5_min_spot', '5 minutes'::interval, '15 minutes'::interval, 'spot'); +SELECT create_twap_aggregate('twap_15_min_spot', '15 minutes'::interval, '45 minutes'::interval, 'spot'); +SELECT create_twap_aggregate('twap_1_h_spot', '1 hour'::interval, '3 hours'::interval, 'spot'); +SELECT create_twap_aggregate('twap_2_h_spot', '2 hours'::interval, '6 hours'::interval, 'spot'); +SELECT create_twap_aggregate('twap_1_day_spot', '1 day'::interval, '3 days'::interval, 'spot'); + +-- PERP twap +SELECT create_twap_aggregate('twap_1_min_perp', '1 minute'::interval, '3 minutes'::interval, 'perp'); +SELECT create_twap_aggregate('twap_5_min_perp', '5 minutes'::interval, '15 minutes'::interval, 'perp'); +SELECT create_twap_aggregate('twap_15_min_perp', '15 minutes'::interval, '45 minutes'::interval, 'perp'); +SELECT create_twap_aggregate('twap_1_h_perp', '1 hour'::interval, '3 hours'::interval, 'perp'); +SELECT create_twap_aggregate('twap_2_h_perp', '2 hours'::interval, '6 hours'::interval, 'perp'); +SELECT create_twap_aggregate('twap_1_day_perp', '1 day'::interval, '3 days'::interval, 'perp'); diff --git a/pragma-entities/migrations/2024-02-14-160804_add_2hours_interval/down.sql b/pragma-entities/migrations/2024-02-14-160804_add_2hours_interval/down.sql deleted file mode 100644 index a3bc8202..00000000 --- a/pragma-entities/migrations/2024-02-14-160804_add_2hours_interval/down.sql +++ /dev/null @@ -1,4 +0,0 @@ --- This file should undo anything in `up.sql` -DROP MATERIALIZED VIEW IF EXISTS twap_2_hours_agg; -DROP MATERIALIZED VIEW IF EXISTS price_2_h_agg; -DROP MATERIALIZED VIEW IF EXISTS two_hour_candle; \ No newline at end of file diff --git a/pragma-entities/migrations/2024-02-14-160804_add_2hours_interval/up.sql b/pragma-entities/migrations/2024-02-14-160804_add_2hours_interval/up.sql deleted file mode 100644 index 30d46259..00000000 --- a/pragma-entities/migrations/2024-02-14-160804_add_2hours_interval/up.sql +++ /dev/null @@ -1,54 +0,0 @@ --- Your SQL goes here - --- aggregate -CREATE MATERIALIZED VIEW price_2_h_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('2 hours'::interval, timestamp) as bucket, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_2_h_agg', - start_offset => NULL, - end_offset => INTERVAL '2 hours', - schedule_interval => INTERVAL '2 hours'); - --- twap -CREATE MATERIALIZED VIEW twap_2_hours_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('2 hours'::interval, timestamp) as bucket, - average(time_weight('Linear', timestamp, price))::numeric as price_twap, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('twap_2_hours_agg', - start_offset => NULL, - end_offset => INTERVAL '2 hours', - schedule_interval => INTERVAL '2 hours'); - --- ohlc -CREATE MATERIALIZED VIEW two_hour_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('2 hours', timestamp) AS bucket, - pair_id, - FIRST(price, timestamp) AS "open", - MAX(price) AS high, - MIN(price) AS low, - LAST(price, timestamp) AS "close" - FROM entries - GROUP BY bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('two_hour_candle', - start_offset => INTERVAL '6 hours', - end_offset => INTERVAL '2 hours', - schedule_interval => INTERVAL '2 hours'); \ No newline at end of file diff --git a/pragma-entities/migrations/2024-02-14-161427_set_twap_realtime/down.sql b/pragma-entities/migrations/2024-02-14-161427_set_twap_realtime/down.sql deleted file mode 100644 index 395dd56e..00000000 --- a/pragma-entities/migrations/2024-02-14-161427_set_twap_realtime/down.sql +++ /dev/null @@ -1,4 +0,0 @@ --- This file should undo anything in `up.sql` -ALTER MATERIALIZED VIEW twap_1_min_agg set (timescaledb.materialized_only = true); -ALTER MATERIALIZED VIEW twap_15_min_agg set (timescaledb.materialized_only = true); -ALTER MATERIALIZED VIEW twap_1_hour_agg set (timescaledb.materialized_only = true); \ No newline at end of file diff --git a/pragma-entities/migrations/2024-02-14-161427_set_twap_realtime/up.sql b/pragma-entities/migrations/2024-02-14-161427_set_twap_realtime/up.sql deleted file mode 100644 index 5c311890..00000000 --- a/pragma-entities/migrations/2024-02-14-161427_set_twap_realtime/up.sql +++ /dev/null @@ -1,4 +0,0 @@ --- Your SQL goes here -ALTER MATERIALIZED VIEW twap_1_min_agg set (timescaledb.materialized_only = false); -ALTER MATERIALIZED VIEW twap_15_min_agg set (timescaledb.materialized_only = false); -ALTER MATERIALIZED VIEW twap_1_hour_agg set (timescaledb.materialized_only = false); \ No newline at end of file diff --git a/pragma-entities/migrations/2024-06-07-130038_add_publisher_signature/down.sql b/pragma-entities/migrations/2024-06-07-130038_add_publisher_signature/down.sql deleted file mode 100644 index c5983b76..00000000 --- a/pragma-entities/migrations/2024-06-07-130038_add_publisher_signature/down.sql +++ /dev/null @@ -1,3 +0,0 @@ --- This file should undo anything in `up.sql` -ALTER TABLE entries -DROP COLUMN publisher_signature; \ No newline at end of file diff --git a/pragma-entities/migrations/2024-06-07-130038_add_publisher_signature/up.sql b/pragma-entities/migrations/2024-06-07-130038_add_publisher_signature/up.sql deleted file mode 100644 index 8caa65bb..00000000 --- a/pragma-entities/migrations/2024-06-07-130038_add_publisher_signature/up.sql +++ /dev/null @@ -1,3 +0,0 @@ --- Your SQL goes here -ALTER TABLE entries -ADD COLUMN publisher_signature VARCHAR; \ No newline at end of file diff --git a/pragma-entities/migrations/2024-06-11-120309_create_future_entries/down.sql b/pragma-entities/migrations/2024-06-11-120309_create_future_entries/down.sql deleted file mode 100644 index fafc6bf1..00000000 --- a/pragma-entities/migrations/2024-06-11-120309_create_future_entries/down.sql +++ /dev/null @@ -1,5 +0,0 @@ --- This file should undo anything in `up.sql` -DROP TABLE IF EXISTS future_entries; -DROP INDEX IF EXISTS idx_future_entries_unique; -SELECT detach_table('future_entries'); - diff --git a/pragma-entities/migrations/2024-06-11-120309_create_future_entries/up.sql b/pragma-entities/migrations/2024-06-11-120309_create_future_entries/up.sql deleted file mode 100644 index fba73c24..00000000 --- a/pragma-entities/migrations/2024-06-11-120309_create_future_entries/up.sql +++ /dev/null @@ -1,20 +0,0 @@ --- Your SQL goes here -CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; - -CREATE TABLE future_entries ( - id uuid DEFAULT uuid_generate_v4(), - pair_id VARCHAR NOT NULL, - price NUMERIC NOT NULL, - timestamp TIMESTAMPTZ NOT NULL, - expiration_timestamp TIMESTAMPTZ, - -- can be NULL for perp contracts - publisher TEXT NOT NULL, - publisher_signature TEXT NOT NULL, - source VARCHAR NOT NULL, - PRIMARY KEY (id, timestamp) -); - -CREATE UNIQUE INDEX idx_future_entries_unique ON future_entries(pair_id, source, timestamp, expiration_timestamp); - -SELECT - create_hypertable('future_entries', 'timestamp'); diff --git a/pragma-entities/migrations/2024-06-26-125836_add_continuous_aggregates_future/down.sql b/pragma-entities/migrations/2024-06-26-125836_add_continuous_aggregates_future/down.sql deleted file mode 100644 index 82fed1eb..00000000 --- a/pragma-entities/migrations/2024-06-26-125836_add_continuous_aggregates_future/down.sql +++ /dev/null @@ -1,5 +0,0 @@ --- This file should undo anything in `up.sql` -DROP MATERIALIZED VIEW IF EXISTS price_1_min_agg_future; -DROP MATERIALIZED VIEW IF EXISTS price_15_min_agg_future; -DROP MATERIALIZED VIEW IF EXISTS price_1_h_agg_future; -DROP MATERIALIZED VIEW IF EXISTS price_2_h_agg_future; \ No newline at end of file diff --git a/pragma-entities/migrations/2024-06-26-125836_add_continuous_aggregates_future/up.sql b/pragma-entities/migrations/2024-06-26-125836_add_continuous_aggregates_future/up.sql deleted file mode 100644 index b9b92552..00000000 --- a/pragma-entities/migrations/2024-06-26-125836_add_continuous_aggregates_future/up.sql +++ /dev/null @@ -1,68 +0,0 @@ --- Your SQL goes here -CREATE MATERIALIZED VIEW price_1_min_agg_future -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 min'::interval, timestamp) as bucket, - expiration_timestamp, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM future_entries -GROUP BY bucket, pair_id, expiration_timestamp -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_1_min_agg_future', - start_offset => NULL, - end_offset => INTERVAL '1 min', - schedule_interval => INTERVAL '1 min'); - -CREATE MATERIALIZED VIEW price_15_min_agg_future -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('15 min'::interval, timestamp) as bucket, - expiration_timestamp, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM future_entries -GROUP BY bucket, pair_id, expiration_timestamp -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_15_min_agg_future', - start_offset => NULL, - end_offset => INTERVAL '15 min', - schedule_interval => INTERVAL '15 min'); - -CREATE MATERIALIZED VIEW price_1_h_agg_future -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 hour'::interval, timestamp) as bucket, - expiration_timestamp, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM future_entries -GROUP BY bucket, pair_id, expiration_timestamp -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_1_h_agg_future', - start_offset => NULL, - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); - -CREATE MATERIALIZED VIEW price_2_h_agg_future -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('2 hours'::interval, timestamp) as bucket, - expiration_timestamp, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM future_entries -GROUP BY bucket, pair_id, expiration_timestamp -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_2_h_agg_future', - start_offset => NULL, - end_offset => INTERVAL '2 hours', - schedule_interval => INTERVAL '2 hours'); \ No newline at end of file diff --git a/pragma-entities/migrations/2024-06-26-145400_add_twap_aggregates_future/down.sql b/pragma-entities/migrations/2024-06-26-145400_add_twap_aggregates_future/down.sql deleted file mode 100644 index b4a1516e..00000000 --- a/pragma-entities/migrations/2024-06-26-145400_add_twap_aggregates_future/down.sql +++ /dev/null @@ -1,5 +0,0 @@ --- This file should undo anything in `up.sql` -DROP MATERIALIZED VIEW IF EXISTS twap_1_min_agg_future; -DROP MATERIALIZED VIEW IF EXISTS twap_15_min_agg_future; -DROP MATERIALIZED VIEW IF EXISTS twap_1_hour_agg_future; -DROP MATERIALIZED VIEW IF EXISTS twap_2_hours_agg_future; \ No newline at end of file diff --git a/pragma-entities/migrations/2024-06-26-145400_add_twap_aggregates_future/up.sql b/pragma-entities/migrations/2024-06-26-145400_add_twap_aggregates_future/up.sql deleted file mode 100644 index 413b280e..00000000 --- a/pragma-entities/migrations/2024-06-26-145400_add_twap_aggregates_future/up.sql +++ /dev/null @@ -1,71 +0,0 @@ --- 1min TWAP -CREATE MATERIALIZED VIEW twap_1_min_agg_future -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 min'::interval, timestamp) as bucket, - expiration_timestamp, - average(time_weight('Linear', timestamp, price))::numeric as price_twap, - COUNT(DISTINCT source) as num_sources -FROM future_entries -GROUP BY bucket, pair_id, expiration_timestamp -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('twap_1_min_agg_future', - start_offset => NULL, - end_offset => INTERVAL '1 min', - schedule_interval => INTERVAL '1 min'); - --- 15min TWAP -CREATE MATERIALIZED VIEW twap_15_min_agg_future -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('15 min'::interval, timestamp) as bucket, - expiration_timestamp, - average(time_weight('Linear', timestamp, price))::numeric as price_twap, - COUNT(DISTINCT source) as num_sources -FROM future_entries -GROUP BY bucket, pair_id, expiration_timestamp -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('twap_15_min_agg_future', - start_offset => NULL, - end_offset => INTERVAL '15 min', - schedule_interval => INTERVAL '15 min'); - --- 1hour TWAP -CREATE MATERIALIZED VIEW twap_1_hour_agg_future -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 hour'::interval, timestamp) as bucket, - expiration_timestamp, - average(time_weight('Linear', timestamp, price))::numeric as price_twap, - COUNT(DISTINCT source) as num_sources -FROM future_entries -GROUP BY bucket, pair_id, expiration_timestamp -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('twap_1_hour_agg_future', - start_offset => NULL, - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); - --- 2hours TWAP -CREATE MATERIALIZED VIEW twap_2_hours_agg_future -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('2 hours'::interval, timestamp) as bucket, - expiration_timestamp, - average(time_weight('Linear', timestamp, price))::numeric as price_twap, - COUNT(DISTINCT source) as num_sources -FROM future_entries -GROUP BY bucket, pair_id, expiration_timestamp -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('twap_2_hours_agg_future', - start_offset => NULL, - end_offset => INTERVAL '2 hours', - schedule_interval => INTERVAL '2 hours'); \ No newline at end of file diff --git a/pragma-entities/migrations/2024-07-02-232003_add_median_aggregates/down.sql b/pragma-entities/migrations/2024-07-02-232003_add_median_aggregates/down.sql deleted file mode 100644 index 17b7445e..00000000 --- a/pragma-entities/migrations/2024-07-02-232003_add_median_aggregates/down.sql +++ /dev/null @@ -1,2 +0,0 @@ --- This file should undo anything in `up.sql` -DROP MATERIALIZED VIEW IF EXISTS price_10_s_agg; \ No newline at end of file diff --git a/pragma-entities/migrations/2024-07-02-232003_add_median_aggregates/up.sql b/pragma-entities/migrations/2024-07-02-232003_add_median_aggregates/up.sql deleted file mode 100644 index 0ea8b67e..00000000 --- a/pragma-entities/migrations/2024-07-02-232003_add_median_aggregates/up.sql +++ /dev/null @@ -1,16 +0,0 @@ --- Your SQL goes here -CREATE MATERIALIZED VIEW price_10_s_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('10 seconds'::interval, timestamp) as bucket, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_10_s_agg', - start_offset => INTERVAL '1 day', - end_offset => INTERVAL '10 seconds', - schedule_interval => INTERVAL '10 seconds'); diff --git a/pragma-entities/migrations/2024-07-02-232723_new_ohlc_aggs/down.sql b/pragma-entities/migrations/2024-07-02-232723_new_ohlc_aggs/down.sql deleted file mode 100644 index 69144cfc..00000000 --- a/pragma-entities/migrations/2024-07-02-232723_new_ohlc_aggs/down.sql +++ /dev/null @@ -1,6 +0,0 @@ --- This file should undo anything in `up.sql` -DROP MATERIALIZED VIEW IF EXISTS one_day_candle_new; -DROP MATERIALIZED VIEW IF EXISTS one_hour_candle_new; -DROP MATERIALIZED VIEW IF EXISTS fifteen_minute_candle_new; -DROP MATERIALIZED VIEW IF EXISTS five_minute_candle_new; -DROP MATERIALIZED VIEW IF EXISTS one_minute_candle_new; \ No newline at end of file diff --git a/pragma-entities/migrations/2024-07-02-232723_new_ohlc_aggs/up.sql b/pragma-entities/migrations/2024-07-02-232723_new_ohlc_aggs/up.sql deleted file mode 100644 index e77c5ece..00000000 --- a/pragma-entities/migrations/2024-07-02-232723_new_ohlc_aggs/up.sql +++ /dev/null @@ -1,96 +0,0 @@ --- Your SQL goes here --- 1 day candle -CREATE MATERIALIZED VIEW new_1_day_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 day', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - - -SELECT add_continuous_aggregate_policy('new_1_day_candle', - start_offset => INTERVAL '3 days', - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - --- 1 hour candle -CREATE MATERIALIZED VIEW new_1_h_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 hour', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('new_1_h_candle', - start_offset => INTERVAL '3 hours', - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '1 hour'); - --- 15 minute candle -CREATE MATERIALIZED VIEW new_15_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('15 minutes', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket)::numeric AS "open", - MAX(median_price)::numeric AS high, - MIN(median_price)::numeric AS low, - LAST(median_price, bucket)::numeric AS "close" - FROM price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('new_15_min_candle', - start_offset => INTERVAL '45 minutes', - end_offset => INTERVAL '15 minutes', - schedule_interval => INTERVAL '15 minutes'); - --- 5 minute candle -CREATE MATERIALIZED VIEW new_5_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('5 minutes', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('new_5_min_candle', - start_offset => INTERVAL '15 minutes', - end_offset => INTERVAL '5 minutes', - schedule_interval => INTERVAL '5 minutes'); - --- 1 minute candle -CREATE MATERIALIZED VIEW new_1_min_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 minute', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - -SELECT add_continuous_aggregate_policy('new_1_min_candle', - start_offset => INTERVAL '3 minutes', - end_offset => INTERVAL '1 minute', - schedule_interval => INTERVAL '1 minute'); \ No newline at end of file diff --git a/pragma-entities/migrations/2024-07-15-112059_add_daily_and_weekly_timeframe/down.sql b/pragma-entities/migrations/2024-07-15-112059_add_daily_and_weekly_timeframe/down.sql deleted file mode 100644 index 781e3a9d..00000000 --- a/pragma-entities/migrations/2024-07-15-112059_add_daily_and_weekly_timeframe/down.sql +++ /dev/null @@ -1,11 +0,0 @@ --- This file should undo anything in `up.sql` -DROP MATERIALIZED VIEW IF EXISTS twap_1_day_agg; -DROP MATERIALIZED VIEW IF EXISTS twap_1_day_agg_future; -DROP MATERIALIZED VIEW IF EXISTS price_1_day_agg; -DROP MATERIALIZED VIEW IF EXISTS price_1_day_agg_future; - -DROP MATERIALIZED VIEW IF EXISTS twap_1_week_agg; -DROP MATERIALIZED VIEW IF EXISTS twap_1_week_agg_future; -DROP MATERIALIZED VIEW IF EXISTS price_1_week_agg; -DROP MATERIALIZED VIEW IF EXISTS price_1_week_agg_future; -DROP MATERIALIZED VIEW IF EXISTS 1_week_candle; \ No newline at end of file diff --git a/pragma-entities/migrations/2024-07-15-112059_add_daily_and_weekly_timeframe/up.sql b/pragma-entities/migrations/2024-07-15-112059_add_daily_and_weekly_timeframe/up.sql deleted file mode 100644 index 8beabede..00000000 --- a/pragma-entities/migrations/2024-07-15-112059_add_daily_and_weekly_timeframe/up.sql +++ /dev/null @@ -1,163 +0,0 @@ --- Your SQL goes here - --- aggregate -CREATE MATERIALIZED VIEW price_1_day_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 day'::interval, timestamp) as bucket, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_1_day_agg', - start_offset => NULL, - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - -CREATE MATERIALIZED VIEW price_1_week_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 week'::interval, timestamp) as bucket, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_1_week_agg', - start_offset => NULL, - end_offset => INTERVAL '1 week', - schedule_interval => INTERVAL '1 week'); - --- aggregate future - -CREATE MATERIALIZED VIEW price_1_day_agg_future -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 day'::interval, timestamp) as bucket, - expiration_timestamp, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM future_entries -GROUP BY bucket, pair_id, expiration_timestamp -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_1_day_agg_future', - start_offset => NULL, - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - -CREATE MATERIALIZED VIEW price_1_week_agg_future -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 week'::interval, timestamp) as bucket, - expiration_timestamp, - approx_percentile(0.5, percentile_agg(price))::numeric AS median_price, - COUNT(DISTINCT source) as num_sources -FROM future_entries -GROUP BY bucket, pair_id, expiration_timestamp -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_1_week_agg_future', - start_offset => NULL, - end_offset => INTERVAL '1 week', - schedule_interval => INTERVAL '1 week'); - - --- twap -CREATE MATERIALIZED VIEW twap_1_day_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 day'::interval, timestamp) as bucket, - average(time_weight('Linear', timestamp, price))::numeric as price_twap, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('twap_1_day_agg', - start_offset => NULL, - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - -CREATE MATERIALIZED VIEW twap_1_week_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 week'::interval, timestamp) as bucket, - average(time_weight('Linear', timestamp, price))::numeric as price_twap, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('twap_1_week_agg', - start_offset => NULL, - end_offset => INTERVAL '1 week', - schedule_interval => INTERVAL '1 week'); - --- twap future - -CREATE MATERIALIZED VIEW twap_1_day_agg_future -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 day'::interval, timestamp) as bucket, - expiration_timestamp, - average(time_weight('Linear', timestamp, price))::numeric as price_twap, - COUNT(DISTINCT source) as num_sources -FROM future_entries -GROUP BY bucket, pair_id, expiration_timestamp -WITH NO DATA; - - -SELECT add_continuous_aggregate_policy('twap_1_day_agg_future', - start_offset => NULL, - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day'); - -CREATE MATERIALIZED VIEW twap_1_week_agg_future -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 week'::interval, timestamp) as bucket, - expiration_timestamp, - average(time_weight('Linear', timestamp, price))::numeric as price_twap, - COUNT(DISTINCT source) as num_sources -FROM future_entries -GROUP BY bucket, pair_id, expiration_timestamp -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('twap_1_week_agg_future', - start_offset => NULL, - end_offset => INTERVAL '1 week', - schedule_interval => INTERVAL '1 week'); - - --- ohlc - -CREATE MATERIALIZED VIEW new_1_week_candle -WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 week', bucket) AS ohlc_bucket, - pair_id, - FIRST(median_price, bucket) AS "open", - MAX(median_price) AS high, - MIN(median_price) AS low, - LAST(median_price, bucket) AS "close" - FROM price_10_s_agg - GROUP BY ohlc_bucket, pair_id - WITH NO DATA; - - -SELECT add_continuous_aggregate_policy('new_1_week_candle', - start_offset => INTERVAL '3 week', - end_offset => INTERVAL '1 week', - schedule_interval => INTERVAL '1 week'); \ No newline at end of file diff --git a/pragma-entities/migrations/2024-10-25-092708_add_new_pairs/down.sql b/pragma-entities/migrations/2024-10-25-092708_add_new_pairs/down.sql deleted file mode 100644 index a78ed916..00000000 --- a/pragma-entities/migrations/2024-10-25-092708_add_new_pairs/down.sql +++ /dev/null @@ -1,3 +0,0 @@ --- This file should undo anything in `up.sql` -DELETE FROM public.currencies -WHERE name IN ('TON', 'JTO', 'OKB', '1000SATS'); \ No newline at end of file diff --git a/pragma-entities/migrations/2024-10-25-092708_add_new_pairs/up.sql b/pragma-entities/migrations/2024-10-25-092708_add_new_pairs/up.sql deleted file mode 100644 index 62a3d2c4..00000000 --- a/pragma-entities/migrations/2024-10-25-092708_add_new_pairs/up.sql +++ /dev/null @@ -1,6 +0,0 @@ --- Your SQL goes here -INSERT INTO public.currencies (name, decimals, abstract, ethereum_address) VALUES -('TON', 9, false, '0x582D872A1B094FC48F5DE31D3B73F2D9BE47DEF1'), -('JTO', 9, false, NULL), -('OKB', 18, false, '0x75231F58B43240C9718DD58B4967C5114342A86C'), -('1000SATS', 8, false, NULL); diff --git a/pragma-entities/migrations/2025-02-13-142000_add_median_1s_interval/down.sql b/pragma-entities/migrations/2025-02-13-142000_add_median_1s_interval/down.sql deleted file mode 100644 index 717a006f..00000000 --- a/pragma-entities/migrations/2025-02-13-142000_add_median_1s_interval/down.sql +++ /dev/null @@ -1,2 +0,0 @@ --- This file should undo anything in `up.sql` -DROP MATERIALIZED VIEW IF EXISTS price_1_s_agg; diff --git a/pragma-entities/migrations/2025-02-13-142000_add_median_1s_interval/up.sql b/pragma-entities/migrations/2025-02-13-142000_add_median_1s_interval/up.sql deleted file mode 100644 index c981f226..00000000 --- a/pragma-entities/migrations/2025-02-13-142000_add_median_1s_interval/up.sql +++ /dev/null @@ -1,16 +0,0 @@ -CREATE MATERIALIZED VIEW price_1_s_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('1 second'::interval, timestamp) as bucket, - -- Force full numeric display using 1000 for numeric, so we don't loose digits - (percentile_cont(0.5) WITHIN GROUP (ORDER BY price))::numeric(1000,0) AS median_price, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_1_s_agg', - start_offset => INTERVAL '10 seconds', - end_offset => '1 second', - schedule_interval => INTERVAL '1 second'); \ No newline at end of file diff --git a/pragma-entities/migrations/2025-02-14-151531_add_5s_interval/down.sql b/pragma-entities/migrations/2025-02-14-151531_add_5s_interval/down.sql deleted file mode 100644 index b9882842..00000000 --- a/pragma-entities/migrations/2025-02-14-151531_add_5s_interval/down.sql +++ /dev/null @@ -1,2 +0,0 @@ --- This file should undo anything in `up.sql` -DROP MATERIALIZED VIEW IF EXISTS price_5_s_agg; diff --git a/pragma-entities/migrations/2025-02-14-151531_add_5s_interval/up.sql b/pragma-entities/migrations/2025-02-14-151531_add_5s_interval/up.sql deleted file mode 100644 index eaab834c..00000000 --- a/pragma-entities/migrations/2025-02-14-151531_add_5s_interval/up.sql +++ /dev/null @@ -1,17 +0,0 @@ --- Your SQL goes here -CREATE MATERIALIZED VIEW price_5_s_agg -WITH (timescaledb.continuous, timescaledb.materialized_only = false) -AS SELECT - pair_id, - time_bucket('5 seconds'::interval, timestamp) as bucket, - -- Force full numeric display using 1000 for numeric, so we don't loose digits - (percentile_cont(0.5) WITHIN GROUP (ORDER BY price))::numeric(1000,0) AS median_price, - COUNT(DISTINCT source) as num_sources -FROM entries -GROUP BY bucket, pair_id -WITH NO DATA; - -SELECT add_continuous_aggregate_policy('price_5_s_agg', - start_offset => INTERVAL '15 seconds', - end_offset => '5 seconds', - schedule_interval => INTERVAL '5 seconds'); \ No newline at end of file diff --git a/pragma-entities/migrations/2025-03-15-043705_add-compression/down.sql b/pragma-entities/migrations/2025-03-15-043705_add-compression/down.sql new file mode 100644 index 00000000..71fbc72b --- /dev/null +++ b/pragma-entities/migrations/2025-03-15-043705_add-compression/down.sql @@ -0,0 +1,72 @@ +-- This file should undo anything in `up.sql` + +-- Function to remove compression policies and disable columnstore from continuous aggregates +CREATE OR REPLACE FUNCTION remove_compression_from_continuous_aggregates() +RETURNS void AS $$ +DECLARE + view_name text; +BEGIN + -- Loop over all relevant views, including _per_source variants where applicable + FOR view_name IN + SELECT unnest(ARRAY[ + -- Sub-minute aggregates (main and _per_source where applicable) + 'median_100_ms_spot', 'median_100_ms_spot_per_source', + 'median_1_s_spot', 'median_1_s_spot_per_source', + 'median_5_s_spot', 'median_5_s_spot_per_source', + 'median_100_ms_perp', 'median_100_ms_perp_per_source', + 'median_1_s_perp', 'median_1_s_perp_per_source', + 'median_5_s_perp', 'median_5_s_perp_per_source', + 'candle_10_s_spot', 'candle_10_s_perp', + + -- 1-15min aggregates (main and _per_source where applicable) + 'median_1_min_spot', 'median_1_min_spot_per_source', + 'median_15_min_spot', 'median_15_min_spot_per_source', + 'median_1_min_perp', 'median_1_min_perp_per_source', + 'median_15_min_perp', 'median_15_min_perp_per_source', + 'candle_1_min_spot', 'candle_5_min_spot', 'candle_15_min_spot', + 'candle_1_min_perp', 'candle_5_min_perp', 'candle_15_min_perp', + 'twap_1_min_spot', 'twap_1_min_spot_per_source', + 'twap_5_min_spot', 'twap_5_min_spot_per_source', + 'twap_15_min_spot', 'twap_15_min_spot_per_source', + 'twap_1_min_perp', 'twap_1_min_perp_per_source', + 'twap_5_min_perp', 'twap_5_min_perp_per_source', + 'twap_15_min_perp', 'twap_15_min_perp_per_source', + + -- 1-2h aggregates (main and _per_source where applicable) + 'median_1_h_spot', 'median_1_h_spot_per_source', + 'median_2_h_spot', 'median_2_h_spot_per_source', + 'median_1_h_perp', 'median_1_h_perp_per_source', + 'median_2_h_perp', 'median_2_h_perp_per_source', + 'candle_1_h_spot', 'candle_1_h_perp', + 'twap_1_h_spot', 'twap_1_h_spot_per_source', + 'twap_2_h_spot', 'twap_2_h_spot_per_source', + 'twap_1_h_perp', 'twap_1_h_perp_per_source', + 'twap_2_h_perp', 'twap_2_h_perp_per_source', + + -- Daily aggregates (main and _per_source where applicable) + 'median_1_day_spot', 'median_1_day_spot_per_source', + 'median_1_day_perp', 'median_1_day_perp_per_source', + 'candle_1_day_spot', 'candle_1_day_perp', + 'twap_1_day_spot', 'twap_1_day_spot_per_source', + 'twap_1_day_perp', 'twap_1_day_perp_per_source', + + -- Weekly aggregates (main and _per_source where applicable) + 'median_1_week_spot', 'median_1_week_spot_per_source', + 'median_1_week_perp', 'median_1_week_perp_per_source' + ]) + LOOP + -- Remove compression policy if it exists, using a safer method + EXECUTE format('SELECT remove_compression_policy(%L, if_exists => true)', view_name); + + -- Reset columnstore and segmentby settings to default + EXECUTE format('ALTER MATERIALIZED VIEW %I RESET (timescaledb.enable_columnstore, timescaledb.segmentby)', view_name); + END LOOP; +END; +$$ LANGUAGE plpgsql; + +-- Execute the function to apply the changes +SELECT remove_compression_from_continuous_aggregates(); + +-- Drop the function after execution +DROP FUNCTION remove_compression_from_continuous_aggregates(); +DROP FUNCTION add_compression_to_continuous_aggregates(); diff --git a/pragma-entities/migrations/2025-03-15-043705_add-compression/up.sql b/pragma-entities/migrations/2025-03-15-043705_add-compression/up.sql new file mode 100644 index 00000000..9eb93a1b --- /dev/null +++ b/pragma-entities/migrations/2025-03-15-043705_add-compression/up.sql @@ -0,0 +1,69 @@ +-- Add compression to all continuous aggregates, including _per_source sub-tables +CREATE OR REPLACE FUNCTION add_compression_to_continuous_aggregates() +RETURNS void AS $$ +DECLARE + -- Define arrays for each type of view + median_views text[] := ARRAY[ + 'median_100_ms_spot', 'median_1_s_spot', 'median_5_s_spot', 'median_10_s_spot', + 'median_1_min_spot', 'median_15_min_spot', 'median_1_h_spot', 'median_2_h_spot', + 'median_1_day_spot', 'median_1_week_spot', + 'median_100_ms_perp', 'median_1_s_perp', 'median_5_s_perp', 'median_10_s_perp', + 'median_1_min_perp', 'median_15_min_perp', 'median_1_h_perp', 'median_2_h_perp', + 'median_1_day_perp', 'median_1_week_perp' + ]; + twap_views text[] := ARRAY[ + 'twap_1_min_spot', 'twap_5_min_spot', 'twap_15_min_spot', + 'twap_1_h_spot', 'twap_2_h_spot', 'twap_1_day_spot', + 'twap_1_min_perp', 'twap_5_min_perp', 'twap_15_min_perp', + 'twap_1_h_perp', 'twap_2_h_perp', 'twap_1_day_perp' + ]; + candle_views text[] := ARRAY[ + 'candle_10_s_spot', 'candle_1_min_spot', 'candle_5_min_spot', 'candle_15_min_spot', + 'candle_1_h_spot', 'candle_1_day_spot', + 'candle_10_s_perp', 'candle_1_min_perp', 'candle_5_min_perp', 'candle_15_min_perp', + 'candle_1_h_perp', 'candle_1_day_perp' + ]; + view_to_compress text; + compress_after interval; +BEGIN + -- Loop over all views: main median, median _per_source, main twap, twap _per_source, and candle views + FOR view_to_compress IN + SELECT view_n + FROM ( + -- Main median views + SELECT unnest(median_views) AS view_n + UNION + -- Median _per_source sub-tables + SELECT unnest(median_views) || '_per_source' + UNION + -- Main TWAP views + SELECT unnest(twap_views) + UNION + -- TWAP _per_source sub-tables + SELECT unnest(twap_views) || '_per_source' + UNION + -- Candlestick views (no _per_source sub-tables) + SELECT unnest(candle_views) + ) AS all_views + LOOP + -- Determine compress_after interval based on view name pattern + compress_after := + CASE + WHEN view_to_compress LIKE '%100_ms%' OR view_to_compress LIKE '%s%' THEN INTERVAL '1 hour' + WHEN view_to_compress LIKE '%min%' THEN INTERVAL '6 hours' + WHEN view_to_compress LIKE '%h%' OR view_to_compress LIKE '%2_h%' THEN INTERVAL '1 day' + WHEN view_to_compress LIKE '%day%' THEN INTERVAL '7 days' + WHEN view_to_compress LIKE '%week%' THEN INTERVAL '30 days' + END; + + -- Enable columnstore compression and set segmentby + EXECUTE format('ALTER MATERIALIZED VIEW %I SET (timescaledb.enable_columnstore = true, timescaledb.segmentby = ''pair_id'')', view_to_compress); + + -- Add compression policy + EXECUTE format('CALL add_columnstore_policy(%L, after => $1)', view_to_compress) USING compress_after; + END LOOP; +END; +$$ LANGUAGE plpgsql; + +-- Execute the function to apply compression policies +SELECT add_compression_to_continuous_aggregates(); diff --git a/pragma-entities/migrations/2025-04-20-145559_create-funding-rates-table/down.sql b/pragma-entities/migrations/2025-04-20-145559_create-funding-rates-table/down.sql new file mode 100644 index 00000000..b9384cf4 --- /dev/null +++ b/pragma-entities/migrations/2025-04-20-145559_create-funding-rates-table/down.sql @@ -0,0 +1,10 @@ +-- This file should undo anything in `up.sql` + +-- Remove compression policy first +SELECT remove_compression_policy('funding_rates'); + +-- Disable compression +ALTER TABLE funding_rates SET (timescaledb.compress = false); + +-- Drop the hypertable +DROP TABLE funding_rates CASCADE; diff --git a/pragma-entities/migrations/2025-04-20-145559_create-funding-rates-table/up.sql b/pragma-entities/migrations/2025-04-20-145559_create-funding-rates-table/up.sql new file mode 100644 index 00000000..d4bc3080 --- /dev/null +++ b/pragma-entities/migrations/2025-04-20-145559_create-funding-rates-table/up.sql @@ -0,0 +1,26 @@ +-- Your SQL goes here + +CREATE TABLE funding_rates ( + id uuid DEFAULT uuid_generate_v4(), + source VARCHAR NOT NULL, + pair VARCHAR NOT NULL, + annualized_rate DOUBLE PRECISION NOT NULL, + timestamp TIMESTAMPTZ NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (id, timestamp) +); + +-- Convert the table to a hypertable +SELECT create_hypertable('funding_rates', 'timestamp', chunk_time_interval => INTERVAL '1 day'); + +-- Create an index for efficient querying by pair +CREATE INDEX idx_funding_rates_pair ON funding_rates(pair); + +-- Enable compression +ALTER TABLE funding_rates SET ( + timescaledb.compress, + timescaledb.compress_segmentby = 'source,pair' +); + +-- Add compression policy to compress chunks older than 7 days +SELECT add_compression_policy('funding_rates', INTERVAL '7 days'); diff --git a/pragma-entities/src/connection.rs b/pragma-entities/src/connection.rs index 9b1284a1..5b181706 100644 --- a/pragma-entities/src/connection.rs +++ b/pragma-entities/src/connection.rs @@ -1,26 +1,26 @@ -use crate::error::ErrorKind; +use crate::error::PragmaNodeError; use deadpool_diesel::postgres::{Manager, Pool}; pub const ENV_ONCHAIN_DATABASE_URL: &str = "ONCHAIN_DATABASE_URL"; pub const ENV_OFFCHAIN_DATABASE_URL: &str = "OFFCHAIN_DATABASE_URL"; const ENV_DATABASE_MAX_CONN: &str = "DATABASE_MAX_CONN"; -pub fn init_pool(app_name: &str, database_url_env: &str) -> Result { +pub fn init_pool(app_name: &str, database_url_env: &str) -> Result { if database_url_env != ENV_OFFCHAIN_DATABASE_URL && database_url_env != ENV_ONCHAIN_DATABASE_URL { - return Err(ErrorKind::GenericInitDatabase(format!( + return Err(PragmaNodeError::GenericInitDatabase(format!( "invalid database URL environment variable: {database_url_env}", ))); } let database_url = std::env::var(database_url_env) - .map_err(|_| ErrorKind::VariableDatabase(database_url_env.to_string()))?; + .map_err(|_| PragmaNodeError::MissingDbEnvVar(database_url_env.to_string()))?; let database_max_conn = std::env::var(ENV_DATABASE_MAX_CONN) - .map_err(|_| ErrorKind::VariableDatabase(ENV_DATABASE_MAX_CONN.to_string()))? + .map_err(|_| PragmaNodeError::MissingDbEnvVar(ENV_DATABASE_MAX_CONN.to_string()))? .parse::() .map_err(|_| { - ErrorKind::GenericInitDatabase(format!("cannot parse {ENV_DATABASE_MAX_CONN}")) + PragmaNodeError::GenericInitDatabase(format!("cannot parse {ENV_DATABASE_MAX_CONN}")) })? as usize; let manager = Manager::new( @@ -31,14 +31,5 @@ pub fn init_pool(app_name: &str, database_url_env: &str) -> Result String { - format!("redis://{host}:{port}/") -} - -pub fn init_redis_client(host: &str, port: u16) -> Result { - redis::Client::open(get_redis_connection_uri(host, port)) - .map_err(|e| ErrorKind::RedisConnection(e.to_string())) + .map_err(|e| PragmaNodeError::PoolDatabase(e.to_string())) } diff --git a/pragma-entities/src/db.rs b/pragma-entities/src/db.rs index a0fc495e..b47b6a5e 100644 --- a/pragma-entities/src/db.rs +++ b/pragma-entities/src/db.rs @@ -1,12 +1,25 @@ use deadpool_diesel::postgres::Pool; -use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness}; +use diesel::prelude::*; +use diesel::sql_query; +use diesel_migrations::{EmbeddedMigrations, MigrationHarness, embed_migrations}; pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("./migrations/"); pub async fn run_migrations(pool: &Pool) { - let conn = pool.get().await.unwrap(); + let conn = pool.get().await.expect("Failed to get DB connection"); + + // First ensure TimescaleDB and its toolkit extension are enabled + conn.interact(|conn| { + sql_query("CREATE EXTENSION IF NOT EXISTS timescaledb;").execute(conn)?; + sql_query("CREATE EXTENSION IF NOT EXISTS timescaledb_toolkit;").execute(conn) + }) + .await + .expect("Failed to enable TimescaleDB extensions") + .expect("Database error while enabling TimescaleDB extensions"); + + // Then run the migrations conn.interact(|conn| conn.run_pending_migrations(MIGRATIONS).map(|_| ())) .await - .unwrap() - .unwrap(); + .expect("Failed to run migrations") + .expect("Database error during migration"); } diff --git a/pragma-entities/src/dto/future_entry.rs b/pragma-entities/src/dto/future_entry.rs index b182a0a6..4bb7fc95 100644 --- a/pragma-entities/src/dto/future_entry.rs +++ b/pragma-entities/src/dto/future_entry.rs @@ -27,7 +27,7 @@ impl From for FutureEntry { source: future_entry.source, timestamp: future_entry.timestamp.and_utc().timestamp_millis() as u64, expiration_timestamp, - publisher_signature: future_entry.publisher_signature, + publisher_signature: future_entry.publisher_signature.unwrap_or_default(), price: future_entry.price.to_u128().unwrap_or(0), // change default value ? } } diff --git a/pragma-entities/src/error.rs b/pragma-entities/src/error.rs index 5866f38f..147af773 100644 --- a/pragma-entities/src/error.rs +++ b/pragma-entities/src/error.rs @@ -1,8 +1,5 @@ -use deadpool_diesel::InteractError; -use pragma_common::{ - timestamp::TimestampRangeError, - types::{AggregationMode, Interval}, -}; +use deadpool_diesel::{InteractError, PoolError}; +use pragma_common::{AggregationMode, InstrumentType, Interval, starknet::StarknetNetwork}; use std::{ fmt::{self, Debug}, num::TryFromIntError, @@ -10,116 +7,89 @@ use std::{ use thiserror::Error; use utoipa::ToSchema; -use crate::models::entry_error::EntryError; +use crate::models::entries::timestamp::TimestampError; -#[derive(Debug, ToSchema, thiserror::Error)] -pub enum InfraError { - InternalServerError, - RoutingError, - NotFound, - DisputerNotSet, - SettlerNotSet, - InvalidTimestamp(String), - #[error(transparent)] - #[schema(value_type = String)] - NonZeroU32Conversion(#[from] TryFromIntError), - #[error(transparent)] - #[schema(value_type = String)] - AxumError(#[from] axum::Error), - UnsupportedInterval(Interval, AggregationMode), -} - -impl InfraError { - pub fn to_entry_error(&self, pair_id: &String) -> EntryError { - match self { - Self::NotFound => EntryError::NotFound(pair_id.to_string()), - Self::RoutingError => EntryError::MissingData(pair_id.to_string()), - Self::InvalidTimestamp(e) => { - EntryError::InvalidTimestamp(TimestampRangeError::Other(e.to_string())) - } - Self::UnsupportedInterval(i, d) => EntryError::UnsupportedInterval(*i, *d), - Self::InternalServerError - | Self::DisputerNotSet - | Self::SettlerNotSet - | Self::NonZeroU32Conversion(_) - | Self::AxumError(_) => EntryError::InternalServerError, - } - } +#[derive(Debug, Error, ToSchema)] +pub enum WebSocketError { + #[error("could not create a channel with the client")] + ChannelInit, + #[error("could not decode client message: {0}")] + MessageDecode(String), + #[error("could not close the channel")] + ChannelClose, } #[derive(Debug, Error)] -pub enum ErrorKind { +pub enum PragmaNodeError { #[error("cannot init database pool : {0}")] PoolDatabase(String), #[error("cannot find environment variable for database init : {0}")] - VariableDatabase(String), + MissingDbEnvVar(String), #[error("database init error : {0}")] GenericInitDatabase(String), #[error("cannot init redis connection : {0}")] RedisConnection(String), } -pub fn adapt_infra_error(error: T) -> InfraError { - error.as_infra_error() +#[derive(Debug, thiserror::Error)] +pub enum InfraError { + // Bad request (400) + InvalidTimestamp(TimestampError), + UnsupportedInterval(Interval, AggregationMode), + UnsupportedOnchainInterval(Interval), + UnsupportedDataTypeForNetwork(StarknetNetwork, InstrumentType), + // Not Found error (404) + RoutingError(String), + EntryNotFound(String), + PairNotFound(String), + CheckpointNotFound(String), + PublishersNotFound, + // Known internal errors + #[error(transparent)] + NonZeroU32Conversion(#[from] TryFromIntError), + #[error(transparent)] + AxumError(#[from] axum::Error), + RpcError(String), + DbPoolError(#[from] PoolError), + DbInteractionError(#[from] InteractError), + DbResultError(#[from] diesel::result::Error), + NoRpcAvailable(StarknetNetwork), + // Unknown internal Server Error - should never be shown to the user + InternalServerError, + WebSocketError(#[from] WebSocketError), } impl fmt::Display for InfraError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Self::NotFound => write!(f, "Not found"), - Self::RoutingError => write!(f, "Routing Error"), - Self::InternalServerError => write!(f, "Internal server error"), - Self::DisputerNotSet => write!(f, "Unable to fetch disputer address"), - Self::SettlerNotSet => write!(f, "Unable to fetch settler address"), - Self::InvalidTimestamp(e) => write!(f, "Invalid timestamp {e}"), - Self::NonZeroU32Conversion(e) => write!(f, "Non zero u32 conversion {e}"), - Self::AxumError(e) => write!(f, "Axum error {e}"), + // 400 + Self::InvalidTimestamp(e) => write!(f, "Invalid timestamp: {e}"), Self::UnsupportedInterval(i, a) => { write!(f, "Unsupported interval {i:?} for aggregation {a:?}") } + Self::UnsupportedOnchainInterval(i) => { + write!(f, "Unsupported interval {i:?} for onchain data") + } + Self::UnsupportedDataTypeForNetwork(n, d) => { + write!(f, "Unsupported data type {d:?} for network {n:?}") + } + // 404 + Self::EntryNotFound(pair_id) => write!(f, "Entry not found for pair {pair_id}"), + Self::PairNotFound(pair_id) => write!(f, "Pair {pair_id} not found"), + Self::RoutingError(details) => write!(f, "Routing error: {details}"), + Self::CheckpointNotFound(pair_id) => write!(f, "No checkpoint found for {pair_id}"), + Self::PublishersNotFound => write!(f, "No publishers found"), + // 500 + Self::DbResultError(e) => write!(f, "Error fetching from db {e}"), + Self::DbInteractionError(e) => write!(f, "Error querying from db {e}"), + Self::DbPoolError(e) => write!(f, "Error connecting to db {e}"), + Self::AxumError(e) => write!(f, "Axum error {e}"), + Self::NoRpcAvailable(network) => write!(f, "No RPC available for network {network}"), + Self::NonZeroU32Conversion(e) => write!(f, "Non zero u32 conversion {e}"), + Self::InternalServerError => write!(f, "Internal server error"), + Self::RpcError(e) => write!(f, "RPC error: {e}"), + // Unclassified + Self::WebSocketError(e) => write!(f, "WebSocket error {e}"), } } } - -pub trait Error { - fn as_infra_error(&self) -> InfraError; -} - -impl Error for diesel::result::Error { - fn as_infra_error(&self) -> InfraError { - match self { - Self::NotFound => InfraError::NotFound, - _ => InfraError::InternalServerError, - } - } -} - -impl Error for deadpool_diesel::PoolError { - fn as_infra_error(&self) -> InfraError { - InfraError::InternalServerError - } -} - -impl Error for InteractError { - fn as_infra_error(&self) -> InfraError { - InfraError::InternalServerError - } -} - -#[derive(Debug, thiserror::Error, ToSchema)] -pub enum RedisError { - #[error("internal server error")] - InternalServerError, - #[error("could not establish a connection with Redis")] - Connection, - #[error("could not find option for instrument {1} at block {0}")] - OptionNotFound(u64, String), - #[error("merkle tree not found for block {0}")] - MerkleTreeNotFound(u64), - #[error("invalid option hash, could not convert to felt: {0}")] - InvalidOptionHash(String), - #[error("could not deserialize RawMerkleTree into MerkleTree")] - TreeDeserialization, - #[error("no merkle feeds published for network: {0}")] - NoBlocks(String), -} diff --git a/pragma-entities/src/lib.rs b/pragma-entities/src/lib.rs index 30db472f..57bb012d 100644 --- a/pragma-entities/src/lib.rs +++ b/pragma-entities/src/lib.rs @@ -7,14 +7,15 @@ pub mod models; pub mod schema; pub use models::entries::entry_error::EntryError; +pub use models::entries::timestamp::TimestampError; +pub use models::entries::timestamp::UnixTimestamp; + +pub use error::InfraError; -// exporting for idiomatic use -pub use error::{adapt_infra_error, InfraError}; pub use models::{ checkpoint_error::CheckpointError, - currency::Currency, entry::{Entry, NewEntry}, - entry_error::VolatilityError, + funding_rate::{FundingRate, NewFundingRate}, future_entry::{FutureEntry, NewFutureEntry}, publisher::{NewPublisher, Publishers}, publisher_error::PublisherError, diff --git a/pragma-entities/src/macros.rs b/pragma-entities/src/macros.rs index 97ee65e5..7ec6136a 100644 --- a/pragma-entities/src/macros.rs +++ b/pragma-entities/src/macros.rs @@ -1,44 +1,36 @@ -/// Convert entry to database format +/// Convert timestamp to datetime /// /// Arguments: -/// * `entry`: Entry to convert -/// * `signature`: Signature to use +/// * `timestamp`: Timestamp to convert /// /// Returns: -/// * `NewEntry`: New entry +/// * `Result, EntryError>`: Datetime #[macro_export] macro_rules! convert_timestamp_to_datetime { ($timestamp:expr) => {{ if $timestamp > (i64::MAX as u64).try_into().unwrap() { - Err(EntryError::InvalidTimestamp( - pragma_common::timestamp::TimestampRangeError::Other(format!( - "Timestamp {} is too large", - $timestamp - )), - )) + Err(EntryError::InvalidTimestamp(TimestampError::Other( + format!("Timestamp {} is too large", $timestamp), + ))) } else if $timestamp.to_string().len() >= 13 { #[allow(clippy::cast_possible_wrap)] chrono::DateTime::::from_timestamp_millis($timestamp as i64) .map(|dt| dt.naive_utc()) .ok_or_else(|| { - EntryError::InvalidTimestamp( - pragma_common::timestamp::TimestampRangeError::Other(format!( - "Could not convert {} to DateTime (millis)", - $timestamp - )), - ) + EntryError::InvalidTimestamp(TimestampError::Other(format!( + "Could not convert {} to DateTime (millis)", + $timestamp + ))) }) } else { #[allow(clippy::cast_possible_wrap)] chrono::DateTime::::from_timestamp($timestamp as i64, 0) .map(|dt| dt.naive_utc()) .ok_or_else(|| { - EntryError::InvalidTimestamp( - pragma_common::timestamp::TimestampRangeError::Other(format!( - "Could not convert {} to DateTime (seconds)", - $timestamp - )), - ) + EntryError::InvalidTimestamp(TimestampError::Other(format!( + "Could not convert {} to DateTime (seconds)", + $timestamp + ))) }) } }}; @@ -49,6 +41,7 @@ macro_rules! convert_timestamp_to_datetime { mod tests { use crate::EntryError; + use crate::TimestampError; use chrono::TimeZone; use chrono::Utc; diff --git a/pragma-entities/src/models/checkpoint_error.rs b/pragma-entities/src/models/checkpoint_error.rs index 01417290..c2587858 100644 --- a/pragma-entities/src/models/checkpoint_error.rs +++ b/pragma-entities/src/models/checkpoint_error.rs @@ -1,32 +1,34 @@ +use axum::Json; use axum::http::StatusCode; use axum::response::IntoResponse; -use axum::Json; use serde_json::json; use crate::error::InfraError; #[derive(Debug, thiserror::Error)] pub enum CheckpointError { - #[error("internal server error")] - InternalServerError, + // 400 #[error("invalid limit : {0}")] InvalidLimit(u64), - #[error("no checkpoints found for requested pair")] - NotFound, + // 404 + #[error("no checkpoints found for pair {0}")] + CheckpointNotFound(String), + // 500 + #[error("internal server error{0}")] + InternalServerError(String), } impl From for CheckpointError { fn from(error: InfraError) -> Self { match error { - InfraError::NotFound => Self::NotFound, - InfraError::InternalServerError - | InfraError::UnsupportedInterval(_, _) - | InfraError::RoutingError - | InfraError::DisputerNotSet - | InfraError::SettlerNotSet - | InfraError::InvalidTimestamp(_) - | InfraError::NonZeroU32Conversion(_) - | InfraError::AxumError(_) => Self::InternalServerError, + // 404 + InfraError::CheckpointNotFound(pair_id) => Self::CheckpointNotFound(pair_id), + // 500 + InfraError::NoRpcAvailable(network) => { + Self::InternalServerError(format!(": no RPC available for network {network}")) + } + // Those errors should never proc for the Checkpoints. + _ => Self::InternalServerError(String::default()), } } } @@ -37,13 +39,13 @@ impl IntoResponse for CheckpointError { Self::InvalidLimit(limit) => { (StatusCode::BAD_REQUEST, format!("Invalid Limit {limit}")) } - Self::NotFound => ( + Self::CheckpointNotFound(pair_id) => ( StatusCode::NOT_FOUND, - String::from("No checkpoints found for requested pair"), + format!("No checkpoints found for pair {pair_id}"), ), - Self::InternalServerError => ( + Self::InternalServerError(details) => ( StatusCode::INTERNAL_SERVER_ERROR, - String::from("Internal server error"), + format!("Internal server error{details}"), ), }; ( diff --git a/pragma-entities/src/models/currency.rs b/pragma-entities/src/models/currency.rs deleted file mode 100644 index d740b13b..00000000 --- a/pragma-entities/src/models/currency.rs +++ /dev/null @@ -1,56 +0,0 @@ -use super::DieselResult; -use crate::schema::currencies; -use bigdecimal::BigDecimal; -use diesel::{ExpressionMethods, OptionalExtension, PgConnection, QueryDsl, RunQueryDsl}; -use utoipa::ToSchema; -use uuid::Uuid; - -#[derive(Clone, Debug, PartialEq, Eq, ToSchema)] -pub struct Currency { - pub id: Uuid, - pub name: String, - #[schema(value_type = u32)] - pub decimals: BigDecimal, - pub is_abstract: bool, - pub ethereum_address: Option, -} - -impl Currency { - pub fn get_all(conn: &mut PgConnection) -> DieselResult> { - currencies::table.select(currencies::name).get_results(conn) - } - - pub fn get_abstract_all(conn: &mut PgConnection) -> DieselResult> { - currencies::table - .select(currencies::name) - .filter(currencies::abstract_.eq(true)) - .get_results(conn) - } - - pub fn get_decimals_all(conn: &mut PgConnection) -> DieselResult> { - currencies::table - .select((currencies::name, currencies::decimals)) - .get_results::<(String, BigDecimal)>(conn) - } - - pub fn get_decimals_for( - conn: &mut PgConnection, - pairs: Vec, - ) -> DieselResult> { - currencies::table - .filter(currencies::name.eq_any(pairs)) - .select((currencies::name, currencies::decimals)) - .get_results::<(String, BigDecimal)>(conn) - } - - pub fn get_decimals_by_name( - conn: &mut PgConnection, - name: &str, - ) -> DieselResult> { - currencies::table - .filter(currencies::name.eq(name)) - .select(currencies::decimals) - .first(conn) - .optional() - } -} diff --git a/pragma-entities/src/models/entries/entry.rs b/pragma-entities/src/models/entries/entry.rs index ae5a289e..ced3028c 100644 --- a/pragma-entities/src/models/entries/entry.rs +++ b/pragma-entities/src/models/entries/entry.rs @@ -1,11 +1,11 @@ +use crate::EntryError; +use crate::TimestampError; use crate::convert_timestamp_to_datetime; use crate::dto::entry as dto; use crate::models::DieselResult; use crate::schema::entries; -use crate::EntryError; use bigdecimal::BigDecimal; use diesel::internal::derives::multiconnection::chrono::NaiveDateTime; -use diesel::upsert::excluded; use diesel::{ AsChangeset, ExpressionMethods, Insertable, OptionalExtension, PgConnection, PgTextExpressionMethods, QueryDsl, Queryable, RunQueryDsl, Selectable, SelectableHelper, @@ -33,7 +33,6 @@ pub struct NewEntry { pub publisher: String, pub source: String, pub timestamp: NaiveDateTime, - pub publisher_signature: String, pub price: BigDecimal, } @@ -50,15 +49,7 @@ impl Entry { .values(data) .returning(Self::as_returning()) .on_conflict((entries::pair_id, entries::source, entries::timestamp)) - .do_update() - .set(( - entries::pair_id.eq(excluded(entries::pair_id)), - entries::publisher.eq(excluded(entries::publisher)), - entries::source.eq(excluded(entries::source)), - entries::publisher_signature.eq(excluded(entries::publisher_signature)), - entries::timestamp.eq(excluded(entries::timestamp)), - entries::price.eq(excluded(entries::price)), - )) + .do_nothing() .get_results(conn) } diff --git a/pragma-entities/src/models/entries/entry_error.rs b/pragma-entities/src/models/entries/entry_error.rs index 064de6ad..817428d9 100644 --- a/pragma-entities/src/models/entries/entry_error.rs +++ b/pragma-entities/src/models/entries/entry_error.rs @@ -1,76 +1,102 @@ -use crate::error::InfraError; -use crate::models::publisher_error::PublisherError; +use axum::Json; use axum::http::StatusCode; use axum::response::IntoResponse; -use axum::Json; -use pragma_common::signing::SignerError; -use pragma_common::timestamp::TimestampRangeError; -use pragma_common::types::{AggregationMode, Interval}; +use pragma_common::starknet::{SignerError, StarknetNetwork}; +use pragma_common::{AggregationMode, InstrumentType, Interval}; use serde_json::json; -use starknet::core::crypto::EcdsaVerifyError; use utoipa::ToSchema; -#[derive(Debug, thiserror::Error, ToSchema)] -pub enum VolatilityError { - #[error("invalid timestamps range: {0} > {1}")] - InvalidTimestampsRange(u64, u64), -} +use crate::PublisherError; +use crate::error::{InfraError, WebSocketError}; + +use super::timestamp::TimestampError; #[derive(Debug, thiserror::Error, ToSchema)] +#[schema(example = json!({ + "code": "UNAUTHORIZED", + "message": "Unauthorized request: Invalid API key", + "timestamp": "2024-03-20T10:30:00Z" +}))] pub enum EntryError { - #[error("internal server error")] - InternalServerError, - #[error("bad request")] - BadRequest, - #[error("entry not found: {0}")] - NotFound(String), - #[error("infra error: {0}")] - InfraError(InfraError), - #[error("invalid signature")] - #[schema(value_type = String)] - InvalidSignature(EcdsaVerifyError), - #[error("could not sign price")] - InvalidSigner, - #[error("unauthorized request: {0}")] - Unauthorized(String), + // 400 Error - Bad Requests + #[schema(example = "invalid signature")] + #[error("invalid signature: {0}")] + InvalidSignature(#[from] SignerError), + #[schema(example = "invalid timestamp")] #[error("invalid timestamp: {0}")] - InvalidTimestamp(#[from] TimestampRangeError), + InvalidTimestamp(#[from] TimestampError), + #[schema(example = "invalid expiry")] #[error("invalid expiry")] InvalidExpiry, + #[schema(example = "unsupported interval 1s for aggregation median")] + #[error("unsupported interval {0:?} for aggregation {1:?}")] + InvalidInterval(Interval, AggregationMode), + #[schema(example = "unsupported interval 1s for onchain data")] + #[error("unsupported interval {0:?} for onchain data")] + InvalidOnchainInterval(Interval), + #[error("invalid login message: {0}")] + InvalidLoginMessage(String), + #[error("unsupported data_type {1:?} for network {0:?}")] + InvalidDataTypeForNetwork(StarknetNetwork, InstrumentType), + + // 401 Error - Unauthorized + #[error("unauthorized request: {0}")] + Unauthorized(String), + + // 404 errors + #[error("pair not found: {0}")] + PairNotFound(String), + #[error("entry not found: {0}")] + EntryNotFound(String), + #[error("publisher not found: {0}")] + PublisherNotFound(String), #[error("missing data for routing on pair: {0}")] - MissingData(String), - #[error("publisher error: {0}")] + RouteNotFound(String), + #[error("history not found")] + HistoryNotFound, + + // ??? publishing... + #[error("can't publish data: {0}")] PublisherError(#[from] PublisherError), - #[error("pair id invalid: {0}")] - UnknownPairId(String), - #[error("volatility error: {0}")] - VolatilityError(#[from] VolatilityError), #[error("can't publish data: {0}")] PublishData(String), #[error("can't build publish message: {0}")] BuildPublish(String), - #[error(transparent)] - SignerError(#[from] SignerError), - #[error("invalid login message: {0}")] - InvalidLoginMessage(String), - #[error("unsupported interval {0:?} for aggregation {1:?}")] - UnsupportedInterval(Interval, AggregationMode), + + // Internal shit + #[error("could not sign price")] + InvalidSigner, + + // Onchain db error + #[error("could not fetch price from onchain db: {0}")] + DatabaseError(String), + + // 500 Error - Internal server error + #[error("internal server error: {0}")] + InternalServerError(String), + + #[error("websocket error: {0}")] + WebSocketError(#[from] WebSocketError), } impl From for EntryError { fn from(error: InfraError) -> Self { match error { - InfraError::NotFound => Self::NotFound("Unknown".to_string()), - InfraError::RoutingError => Self::MissingData("Not enough data".to_string()), - InfraError::InvalidTimestamp(e) => { - Self::InvalidTimestamp(TimestampRangeError::Other(e)) + InfraError::InvalidTimestamp(err) => Self::InvalidTimestamp(err), + InfraError::UnsupportedInterval(interval, mode) => { + Self::InvalidInterval(interval, mode) + } + InfraError::UnsupportedOnchainInterval(interval) => { + Self::InvalidOnchainInterval(interval) + } + InfraError::UnsupportedDataTypeForNetwork(network, data_type) => { + Self::InvalidDataTypeForNetwork(network, data_type) } - InfraError::UnsupportedInterval(i, a) => Self::UnsupportedInterval(i, a), - InfraError::InternalServerError - | InfraError::DisputerNotSet - | InfraError::SettlerNotSet - | InfraError::NonZeroU32Conversion(_) - | InfraError::AxumError(_) => Self::InternalServerError, + InfraError::RoutingError(err_msg) => Self::RouteNotFound(err_msg), + InfraError::EntryNotFound(entry_id) => Self::EntryNotFound(entry_id), + InfraError::PairNotFound(pair_id) => Self::PairNotFound(pair_id), + // Those errors should never proc for Entry + e => Self::InternalServerError(e.to_string()), } } } @@ -78,57 +104,84 @@ impl From for EntryError { impl IntoResponse for EntryError { fn into_response(self) -> axum::response::Response { let (status, err_msg) = match self { - Self::NotFound(pair_id) => ( - StatusCode::NOT_FOUND, - format!("EntryModel with pair id {pair_id} has not been found"), - ), - Self::MissingData(pair_id) => ( - StatusCode::NOT_FOUND, - format!("Not enough data on pair {pair_id} to perform routing"), - ), - Self::InfraError(db_error) => ( - StatusCode::INTERNAL_SERVER_ERROR, - format!("Internal server error: {db_error}"), - ), Self::InvalidSignature(err) => { (StatusCode::BAD_REQUEST, format!("Invalid signature: {err}")) } - Self::Unauthorized(reason) => ( - StatusCode::UNAUTHORIZED, - format!("Unauthorized publisher: {reason}"), - ), Self::InvalidTimestamp(reason) => ( StatusCode::BAD_REQUEST, format!("Invalid timestamp: {reason}"), ), Self::InvalidExpiry => (StatusCode::BAD_REQUEST, "Invalid expiry".to_string()), - Self::PublisherError(err) => ( - StatusCode::INTERNAL_SERVER_ERROR, - format!("Publisher error: {err}"), + Self::InvalidInterval(interval, mode) => ( + StatusCode::BAD_REQUEST, + format!("Unsupported interval {interval:?} for aggregation {mode:?}"), + ), + Self::InvalidOnchainInterval(interval) => ( + StatusCode::BAD_REQUEST, + format!("Unsupported interval {interval:?} for onchain data"), + ), + Self::InvalidDataTypeForNetwork(network, data_type) => ( + StatusCode::BAD_REQUEST, + format!("Unsupported data type {data_type:?} for aggregation {network:?}"), + ), + Self::InvalidLoginMessage(msg) => ( + StatusCode::BAD_REQUEST, + format!("Invalid login message: {msg}"), + ), + Self::Unauthorized(reason) => ( + StatusCode::UNAUTHORIZED, + format!("Unauthorized request: {reason}"), + ), + Self::PairNotFound(pair_id) => { + (StatusCode::NOT_FOUND, format!("Pair not found: {pair_id}")) + } + Self::HistoryNotFound => (StatusCode::NOT_FOUND, String::from("History not found")), + Self::EntryNotFound(entry_id) => ( + StatusCode::NOT_FOUND, + format!("Entry not found: {entry_id}"), + ), + Self::RouteNotFound(pair_id) => ( + StatusCode::NOT_FOUND, + format!("Missing data for routing on pair: {pair_id}"), + ), + Self::PublisherNotFound(publisher) => ( + StatusCode::NOT_FOUND, + format!("Publisher not found: {publisher}"), ), Self::PublishData(err) => ( StatusCode::INTERNAL_SERVER_ERROR, - format!("Unable to publish data: {err}"), + format!("Can't publish data: {err}"), + ), + Self::PublisherError(err) => ( + StatusCode::INTERNAL_SERVER_ERROR, + format!("Can't publish data: {err}"), ), Self::BuildPublish(err) => ( StatusCode::INTERNAL_SERVER_ERROR, - format!("Unable to build publish message: {err}"), + format!("Can't build publish message: {err}"), ), - Self::BadRequest => (StatusCode::BAD_REQUEST, "Bad request".to_string()), - Self::UnknownPairId(pair_id) => { - (StatusCode::NOT_FOUND, format!("Unknown pair id: {pair_id}")) - } - Self::SignerError(err) => (StatusCode::BAD_REQUEST, format!("Invalid message: {err}")), - _ => ( + Self::DatabaseError(reason) => ( + StatusCode::INTERNAL_SERVER_ERROR, + format!("Failed to fetch price data: {reason}"), + ), + Self::InvalidSigner => (StatusCode::BAD_REQUEST, "Could not sign price".to_string()), + Self::InternalServerError(reason) => ( StatusCode::INTERNAL_SERVER_ERROR, - String::from("Internal server error"), + format!("Interval server error: {reason}"), + ), + Self::WebSocketError(err) => ( + StatusCode::INTERNAL_SERVER_ERROR, + format!("WebSocket error: {err}"), ), }; + ( status, - Json( - json!({"resource":"EntryModel", "message": err_msg, "happened_at" : chrono::Utc::now() }), - ), + Json(json!({ + "resource": "EntryModel", + "message": err_msg, + "happened_at": chrono::Utc::now() + })), ) .into_response() } diff --git a/pragma-entities/src/models/entries/future_entry.rs b/pragma-entities/src/models/entries/future_entry.rs index 391213d9..f3f6efeb 100644 --- a/pragma-entities/src/models/entries/future_entry.rs +++ b/pragma-entities/src/models/entries/future_entry.rs @@ -1,9 +1,9 @@ use crate::dto::entry as dto; use crate::models::DieselResult; use bigdecimal::BigDecimal; +use diesel::BoolExpressionMethods; use diesel::dsl::sql; use diesel::internal::derives::multiconnection::chrono::NaiveDateTime; -use diesel::BoolExpressionMethods; use diesel::{ AsChangeset, ExpressionMethods, Insertable, PgConnection, PgTextExpressionMethods, QueryDsl, Queryable, RunQueryDsl, Selectable, SelectableHelper, @@ -25,7 +25,7 @@ pub struct FutureEntry { // If expiration_timestamp is None, it means the entry is a perpetual future // else it is a regular future entry that will expire at the expiration_timestamp. pub expiration_timestamp: Option, - pub publisher_signature: String, + pub publisher_signature: Option, pub price: BigDecimal, } @@ -37,7 +37,6 @@ pub struct NewFutureEntry { pub source: String, pub timestamp: NaiveDateTime, pub expiration_timestamp: Option, - pub publisher_signature: String, pub price: BigDecimal, } diff --git a/pragma-entities/src/models/entries/mod.rs b/pragma-entities/src/models/entries/mod.rs index 29dbe22d..508567f9 100644 --- a/pragma-entities/src/models/entries/mod.rs +++ b/pragma-entities/src/models/entries/mod.rs @@ -1,3 +1,4 @@ pub mod entry; pub mod entry_error; pub mod future_entry; +pub mod timestamp; diff --git a/pragma-common/src/types/timestamp.rs b/pragma-entities/src/models/entries/timestamp.rs similarity index 83% rename from pragma-common/src/types/timestamp.rs rename to pragma-entities/src/models/entries/timestamp.rs index f68e4f02..91d57b43 100644 --- a/pragma-common/src/types/timestamp.rs +++ b/pragma-entities/src/models/entries/timestamp.rs @@ -28,6 +28,18 @@ pub enum TimestampRangeError { Other(String), } +#[derive(Debug, thiserror::Error, ToSchema)] +pub enum TimestampError { + #[error("Timestamp range error: {0}")] + RangeError(#[from] TimestampRangeError), + #[error("Could not convert unsigned timestamp to datetime: {0}")] + ToDatetimeErrorU64(u64), + #[error("Could not convert signed timestamp to datetime: {0}")] + ToDatetimeErrorI64(i64), + #[error("Timestamp error: {0}")] + Other(String), +} + impl TimestampRange { pub fn assert_time_is_valid(self) -> Result { let now = chrono::Utc::now().timestamp(); diff --git a/pragma-entities/src/models/funding_rate.rs b/pragma-entities/src/models/funding_rate.rs new file mode 100644 index 00000000..52e3f799 --- /dev/null +++ b/pragma-entities/src/models/funding_rate.rs @@ -0,0 +1,38 @@ +use chrono::NaiveDateTime; +use diesel::prelude::*; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::schema::funding_rates; + +#[derive(Debug, Clone, Queryable, Serialize, Deserialize)] +#[diesel(table_name = funding_rates)] +#[diesel(check_for_backend(diesel::pg::Pg))] +pub struct FundingRate { + pub id: Uuid, + pub source: String, + pub pair: String, + pub annualized_rate: f64, + pub timestamp: NaiveDateTime, + pub created_at: NaiveDateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Insertable, AsChangeset)] +#[diesel(table_name = funding_rates)] +pub struct NewFundingRate { + pub source: String, + pub pair: String, + pub annualized_rate: f64, + pub timestamp: NaiveDateTime, +} + +impl FundingRate { + pub fn create_many( + conn: &mut PgConnection, + new_entries: Vec, + ) -> Result, diesel::result::Error> { + diesel::insert_into(funding_rates::table) + .values(&new_entries) + .get_results(conn) + } +} diff --git a/pragma-entities/src/models/merkle_feed_error.rs b/pragma-entities/src/models/merkle_feed_error.rs deleted file mode 100644 index 0acff736..00000000 --- a/pragma-entities/src/models/merkle_feed_error.rs +++ /dev/null @@ -1,89 +0,0 @@ -use axum::{http::StatusCode, response::IntoResponse, Json}; -use serde_json::json; -use utoipa::ToSchema; - -use crate::error::RedisError; - -#[derive(Debug, thiserror::Error, ToSchema)] -pub enum MerkleFeedError { - #[error("internal server error")] - InternalServerError, - #[error("could not establish a connection with Redis")] - RedisConnection, - #[error("option for instrument {1} not found for block {0}")] - OptionNotFound(u64, String), - #[error("merkle tree not found for block {0}")] - MerkleTreeNotFound(u64), - #[error("invalid option hash, could not convert to felt: {0}")] - InvalidOptionHash(String), - #[error("could not deserialize the redis merkle tree into MerkleTree")] - TreeDeserialization, - #[error("could not generate a merkle proof for hash: {0}")] - MerkleProof(String), - #[error("no merkle feeds published for network: {0}")] - NoBlocks(String), -} - -impl From for MerkleFeedError { - fn from(error: RedisError) -> Self { - match error { - RedisError::Connection => Self::RedisConnection, - RedisError::OptionNotFound(block, name) => Self::OptionNotFound(block, name), - RedisError::MerkleTreeNotFound(block) => Self::MerkleTreeNotFound(block), - RedisError::InvalidOptionHash(r) => Self::InvalidOptionHash(r), - RedisError::TreeDeserialization => Self::TreeDeserialization, - RedisError::NoBlocks(network) => Self::NoBlocks(network), - RedisError::InternalServerError => Self::InternalServerError, - } - } -} - -impl IntoResponse for MerkleFeedError { - fn into_response(self) -> axum::response::Response { - let (status, err_msg) = match self { - Self::InvalidOptionHash(hash) => ( - StatusCode::BAD_REQUEST, - format!( - "Option hash is not a correct 0x prefixed hexadecimal hash: {hash}" - ), - ), - Self::OptionNotFound(block_number, instrument_name) => ( - StatusCode::NOT_FOUND, - format!( - "MerkleFeed option for instrument {instrument_name} has not been found for block {block_number}", - ), - ), - Self::MerkleTreeNotFound(block_number) => ( - StatusCode::NOT_FOUND, - format!("MerkleFeed tree not found for block {block_number}"), - ), - Self::RedisConnection => ( - StatusCode::SERVICE_UNAVAILABLE, - "Could not establish a connection with the Redis database".to_string(), - ), - Self::TreeDeserialization => ( - StatusCode::INTERNAL_SERVER_ERROR, - String::from("Internal server error: could not decode Redis merkle tree"), - ), - Self::NoBlocks(network) => ( - StatusCode::NOT_FOUND, - format!("No merkle feeds published for network {network}"), - ), - Self::MerkleProof(hash) => ( - StatusCode::NOT_FOUND, - format!("Could not generate a valid merkle proof for hash {hash}"), - ), - Self::InternalServerError => ( - StatusCode::INTERNAL_SERVER_ERROR, - String::from("Internal server error"), - ), - }; - ( - status, - Json( - json!({"resource":"MerkleFeed", "message": err_msg, "happened_at" : chrono::Utc::now() }), - ), - ) - .into_response() - } -} diff --git a/pragma-entities/src/models/mod.rs b/pragma-entities/src/models/mod.rs index 3170e0ff..e548004c 100644 --- a/pragma-entities/src/models/mod.rs +++ b/pragma-entities/src/models/mod.rs @@ -1,8 +1,6 @@ pub mod checkpoint_error; -pub mod currency; pub mod entries; -pub mod merkle_feed_error; -pub mod optimistic_oracle_error; +pub mod funding_rate; pub mod publisher; pub mod publisher_error; diff --git a/pragma-entities/src/models/optimistic_oracle_error.rs b/pragma-entities/src/models/optimistic_oracle_error.rs deleted file mode 100644 index f6cbcad2..00000000 --- a/pragma-entities/src/models/optimistic_oracle_error.rs +++ /dev/null @@ -1,69 +0,0 @@ -use axum::{http::StatusCode, response::IntoResponse, Json}; -use serde_json::json; -use utoipa::ToSchema; - -use crate::error::InfraError; - -#[derive(Debug, thiserror::Error, ToSchema)] -pub enum OptimisticOracleError { - #[error("internal server error")] - InternalServerError, - #[error("database connection error")] - DatabaseConnection, - #[error("assertion details issue: {0}")] - AssertionDetailsIssue(String), - #[error("disputer not set for assertion: {0}")] - DisputerNotSet(String), - #[error("settler not set for assertion: {0}")] - SettlerNotSet(String), - #[error("no assertions found for the given criteria")] - NoAssertionsFound, -} - -impl From for OptimisticOracleError { - fn from(error: InfraError) -> Self { - match error { - InfraError::DisputerNotSet => Self::DisputerNotSet("Unknown".to_string()), - InfraError::SettlerNotSet => Self::SettlerNotSet("Unknown".to_string()), - _ => Self::InternalServerError, - } - } -} - -impl IntoResponse for OptimisticOracleError { - fn into_response(self) -> axum::response::Response { - let (status, err_msg) = match self { - Self::DatabaseConnection => ( - StatusCode::SERVICE_UNAVAILABLE, - "Could not establish a connection with the database".to_string(), - ), - Self::AssertionDetailsIssue(id) => ( - StatusCode::NOT_FOUND, - format!("Issue to fetch assertion details with id: {id}"), - ), - Self::DisputerNotSet(id) => ( - StatusCode::INTERNAL_SERVER_ERROR, - format!("Disputer not set for assertion: {id}"), - ), - Self::SettlerNotSet(id) => ( - StatusCode::INTERNAL_SERVER_ERROR, - format!("Settler not set for assertion: {id}"), - ), - Self::NoAssertionsFound => ( - StatusCode::NOT_FOUND, - "No assertions found for the given criteria".to_string(), - ), - Self::InternalServerError => ( - StatusCode::INTERNAL_SERVER_ERROR, - String::from("Internal server error"), - ), - }; - ( - status, - Json( - json!({"resource":"OptimisticOracle", "message": err_msg, "happened_at" : chrono::Utc::now() }), - ), - ) - .into_response() - } -} diff --git a/pragma-entities/src/models/publisher_error.rs b/pragma-entities/src/models/publisher_error.rs index 9166cf23..74fcfec3 100644 --- a/pragma-entities/src/models/publisher_error.rs +++ b/pragma-entities/src/models/publisher_error.rs @@ -1,6 +1,6 @@ +use axum::Json; use axum::http::StatusCode; use axum::response::IntoResponse; -use axum::Json; use serde_json::json; use utoipa::ToSchema; @@ -8,22 +8,27 @@ use crate::error::InfraError; #[derive(Debug, thiserror::Error, ToSchema)] pub enum PublisherError { - #[error("internal server error")] - InternalServerError, + // 404 errors - Bad request #[error("invalid key : {0}")] InvalidKey(String), #[error("invalid address : {0}")] InvalidAddress(String), #[error("inactive publisher : {0}")] InactivePublisher(String), + + // 404 errors - Not found #[error("no publishers found")] NotFound, + + // 500 error - Internal + #[error("internal server error")] + InternalServerError, } impl From for PublisherError { fn from(error: InfraError) -> Self { match error { - InfraError::NotFound => Self::NotFound, + InfraError::PublishersNotFound => Self::NotFound, _ => Self::InternalServerError, } } diff --git a/pragma-entities/src/schema.rs b/pragma-entities/src/schema.rs index 46c37881..c76d7bd6 100644 --- a/pragma-entities/src/schema.rs +++ b/pragma-entities/src/schema.rs @@ -1,25 +1,25 @@ // @generated automatically by Diesel CLI. diesel::table! { - currencies (id) { + entries (id, timestamp) { id -> Uuid, - name -> Varchar, - decimals -> Numeric, - #[sql_name = "abstract"] - abstract_ -> Bool, - ethereum_address -> Nullable, + pair_id -> Varchar, + price -> Numeric, + timestamp -> Timestamptz, + publisher -> Text, + publisher_signature -> Nullable, + source -> Varchar, } } diesel::table! { - entries (id, timestamp) { + funding_rates (id, timestamp) { id -> Uuid, - pair_id -> Varchar, - publisher -> Text, - timestamp -> Timestamptz, - price -> Numeric, source -> Varchar, - publisher_signature -> Nullable, + pair -> Varchar, + annualized_rate -> Float8, + timestamp -> Timestamptz, + created_at -> Timestamptz, } } @@ -31,7 +31,7 @@ diesel::table! { timestamp -> Timestamptz, expiration_timestamp -> Nullable, publisher -> Text, - publisher_signature -> Text, + publisher_signature -> Nullable, source -> Varchar, } } @@ -42,9 +42,9 @@ diesel::table! { name -> Varchar, master_key -> Varchar, active_key -> Varchar, - active -> Bool, account_address -> Varchar, + active -> Bool, } } -diesel::allow_tables_to_appear_in_same_query!(currencies, entries, future_entries, publishers,); +diesel::allow_tables_to_appear_in_same_query!(entries, funding_rates, future_entries, publishers,); diff --git a/pragma-ingestor/Cargo.toml b/pragma-ingestor/Cargo.toml index 57aff758..2355ccbf 100644 --- a/pragma-ingestor/Cargo.toml +++ b/pragma-ingestor/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "pragma-ingestor" version = "0.1.0" -edition = "2021" +edition.workspace = true [lints] workspace = true @@ -9,10 +9,13 @@ workspace = true [dependencies] pragma-common = { workspace = true } pragma-entities = { workspace = true } +faucon-rs = { workspace = true } +chrono = { workspace = true } deadpool-diesel = { workspace = true, features = ["postgres"] } dotenvy = { workspace = true } envy = { workspace = true } +futures-util = { workspace = true } lazy_static = { workspace = true } rdkafka = { workspace = true } serde = { workspace = true } @@ -20,3 +23,4 @@ serde_json = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true, features = ["full"] } tracing = { workspace = true } +clap = { workspace = true, features = ["derive"] } diff --git a/pragma-ingestor/src/config.rs b/pragma-ingestor/src/config.rs index 99125094..aaff1d14 100644 --- a/pragma-ingestor/src/config.rs +++ b/pragma-ingestor/src/config.rs @@ -1,78 +1,32 @@ -use lazy_static::lazy_static; -use serde::{Deserialize, Serialize}; +use clap::Parser; +use std::sync::LazyLock; -use crate::error::ErrorKind; +pub(crate) static CONFIG: LazyLock = LazyLock::new(load_configuration); -lazy_static! { - #[derive(Debug)] - pub static ref CONFIG: Ingestor = load_configuration(); -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct Ingestor { - pub brokers: Vec, - pub topic: String, - pub group_id: String, -} +#[derive(Parser, Debug)] +#[command(author, version, about, long_about = None)] +pub(crate) struct Ingestor { + /// Number of consumers to run + #[arg(long, env = "NUM_CONSUMERS", default_value = "10")] + pub(crate) num_consumers: usize, -impl Ingestor { - pub fn from_env() -> Result { - envy::from_env::().map_err(ErrorKind::LoadConfig) - } -} - -pub fn load_configuration() -> Ingestor { - Ingestor::from_env().expect("cannot load configuration env") -} + /// Channel capacity for message queues + #[arg(long, env = "CHANNEL_CAPACITY", default_value = "1000000")] + pub(crate) channel_capacity: usize, -#[cfg(test)] -mod tests { - use super::*; - use std::env; + /// Publisher name for entries + #[arg(long, env = "PUBLISHER_NAME", default_value = "PRAGMA")] + pub(crate) publisher_name: String, - #[test] - fn test_ingestor_init() { - let brokers = vec!["localhost:9092".to_string()]; - let ingestor = Ingestor { - brokers: brokers.clone(), - topic: "test_topic".to_string(), - group_id: "test_group".to_string(), - }; + /// Kafka consumer group ID + #[arg(long, env = "KAFKA_GROUP_ID", default_value = "pragma-ingestor")] + pub(crate) kafka_group_id: String, - assert_eq!(ingestor.brokers, brokers); - assert_eq!(ingestor.topic, "test_topic"); - assert_eq!(ingestor.group_id, "test_group"); - } - - #[test] - fn test_load_from_env() { - unsafe { - env::set_var("BROKERS", "localhost:9092"); - env::set_var("TOPIC", "test_topic"); - env::set_var("GROUP_ID", "test_group"); - } - - let ingestor = Ingestor::from_env().unwrap(); - - assert_eq!(ingestor.brokers, vec!["localhost:9092".to_string()]); - assert_eq!(ingestor.topic, "test_topic"); - assert_eq!(ingestor.group_id, "test_group"); - unsafe { - env::remove_var("BROKERS"); - env::remove_var("TOPIC"); - env::remove_var("GROUP_ID"); - } - } - - #[test] - fn test_env_error_handling() { - unsafe { - env::remove_var("BROKERS"); - env::remove_var("TOPIC"); - env::remove_var("GROUP_ID"); - } + /// OpenTelemetry endpoint for telemetry data + #[arg(long, env = "OTEL_EXPORTER_OTLP_ENDPOINT")] + pub(crate) otel_endpoint: Option, +} - let result = Ingestor::from_env(); - assert!(result.is_err()); - } +pub(crate) fn load_configuration() -> Ingestor { + Ingestor::parse() } diff --git a/pragma-ingestor/src/consumer.rs b/pragma-ingestor/src/consumer.rs deleted file mode 100644 index 4cba188b..00000000 --- a/pragma-ingestor/src/consumer.rs +++ /dev/null @@ -1,43 +0,0 @@ -use crate::config::CONFIG; -use rdkafka::config::{ClientConfig, RDKafkaLogLevel}; -use rdkafka::consumer::{CommitMode, Consumer, StreamConsumer}; -use rdkafka::message::Message; -use tokio::sync::mpsc::UnboundedSender; -use tracing::{error, info}; - -pub async fn consume(tx: UnboundedSender>) { - let consumer: StreamConsumer = ClientConfig::new() - .set("group.id", &CONFIG.group_id) - .set("bootstrap.servers", CONFIG.brokers.join(",")) - .set("enable.partition.eof", "false") - .set("session.timeout.ms", "6000") - .set("auto.offset.reset", "earliest") - .set("enable.auto.commit", "false") - .set_log_level(RDKafkaLogLevel::Debug) - .create() - .expect("Consumer creation failed"); - - consumer - .subscribe(&[&CONFIG.topic]) - .expect("Can't subscribe to specified topics"); - - info!( - "start consuming at {}({})", - CONFIG.brokers.join(","), - &CONFIG.topic - ); - - loop { - if let Ok(ref message) = consumer.recv().await { - if let Some(payload) = message.payload() { - if let Err(e) = tx.send(payload.to_vec()) { - error!("cannot send message to bootstrap handler : {}.", e); - } - } - - if let Err(e) = consumer.commit_message(message, CommitMode::Async) { - error!("cannot commit message : {:?}", e); - } - } - } -} diff --git a/pragma-ingestor/src/db/insert.rs b/pragma-ingestor/src/db/insert.rs new file mode 100644 index 00000000..cb49bab5 --- /dev/null +++ b/pragma-ingestor/src/db/insert.rs @@ -0,0 +1,75 @@ +use deadpool_diesel::postgres::Pool; +use pragma_entities::{ + Entry, FundingRate, FutureEntry, InfraError, NewEntry, NewFundingRate, NewFutureEntry, +}; +use tracing::debug; + +#[tracing::instrument(skip_all, fields(num_entries = new_entries.len()))] +pub(crate) async fn insert_spot_entries( + pool: &Pool, + new_entries: Vec, +) -> Result<(), InfraError> { + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; + conn.interact(move |conn| Entry::create_many(conn, new_entries)) + .await + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; + + Ok(()) +} + +#[tracing::instrument(skip_all, fields(num_entries = new_entries.len()))] +pub(crate) async fn insert_future_entries( + pool: &Pool, + new_entries: Vec, +) -> Result<(), InfraError> { + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; + + let new_entries = new_entries + .into_iter() + .map(|mut entry| { + if let Some(expiration_timestamp) = entry.expiration_timestamp { + if expiration_timestamp.and_utc().timestamp() == 0 { + entry.expiration_timestamp = None; + } + } + entry + }) + .collect::>(); + + debug!( + "[PERP] {} new entries available", + new_entries + .iter() + .filter(|entry| entry.expiration_timestamp.is_none()) + .count() + ); + + conn.interact(move |conn| FutureEntry::create_many(conn, new_entries)) + .await + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; + Ok(()) +} + +#[tracing::instrument(skip_all, fields(num_entries = new_entries.len()))] +pub(crate) async fn insert_funding_rate_entries( + pool: &Pool, + new_entries: Vec, +) -> Result<(), InfraError> { + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; + let entries = conn + .interact(move |conn| FundingRate::create_many(conn, new_entries)) + .await + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; + + for entry in &entries { + debug!( + "new funding rate entry created {} - {}({}) - {}", + entry.source, entry.pair, entry.annualized_rate, entry.timestamp + ); + } + + Ok(()) +} diff --git a/pragma-ingestor/src/db/mod.rs b/pragma-ingestor/src/db/mod.rs new file mode 100644 index 00000000..de1e2efa --- /dev/null +++ b/pragma-ingestor/src/db/mod.rs @@ -0,0 +1,2 @@ +pub(crate) mod insert; +pub(crate) mod process; diff --git a/pragma-ingestor/src/db/process.rs b/pragma-ingestor/src/db/process.rs new file mode 100644 index 00000000..814696d3 --- /dev/null +++ b/pragma-ingestor/src/db/process.rs @@ -0,0 +1,94 @@ +use deadpool_diesel::postgres::Pool; +use pragma_entities::{NewEntry, NewFundingRate, NewFutureEntry}; +use tokio::sync::mpsc; + +use crate::db::insert::{insert_funding_rate_entries, insert_future_entries, insert_spot_entries}; + +const BATCH_SIZE: usize = 100; + +#[tracing::instrument(skip(pool, rx))] +pub async fn process_spot_entries(pool: Pool, mut rx: mpsc::Receiver) { + let mut buffer = Vec::with_capacity(BATCH_SIZE); + + loop { + tokio::select! { + Some(entry) = rx.recv() => { + buffer.push(entry); + + if buffer.len() >= BATCH_SIZE { + if let Err(e) = insert_spot_entries(&pool, std::mem::take(&mut buffer)).await { + tracing::error!("❌ Failed to insert spot entries: {}", e); + } + buffer = Vec::with_capacity(BATCH_SIZE); + } + } + else => { + // Channel closed, flush remaining entries + if !buffer.is_empty() { + if let Err(e) = insert_spot_entries(&pool, buffer).await { + tracing::error!("❌ Failed to flush final spot entries: {}", e); + } + } + break; + } + } + } +} + +#[tracing::instrument(skip(pool, rx))] +pub async fn process_future_entries(pool: Pool, mut rx: mpsc::Receiver) { + let mut buffer = Vec::with_capacity(BATCH_SIZE); + + loop { + tokio::select! { + Some(entry) = rx.recv() => { + buffer.push(entry); + + if buffer.len() >= BATCH_SIZE { + if let Err(e) = insert_future_entries(&pool, std::mem::take(&mut buffer)).await { + tracing::error!("❌ Failed to insert future entries: {}", e); + } + buffer = Vec::with_capacity(BATCH_SIZE); + } + } + else => { + // Channel closed, flush remaining entries + if !buffer.is_empty() { + if let Err(e) = insert_future_entries(&pool, buffer).await { + tracing::error!("❌ Failed to flush final future entries: {}", e); + } + } + break; + } + } + } +} + +#[tracing::instrument(skip(pool, rx))] +pub async fn process_funding_rate_entries(pool: Pool, mut rx: mpsc::Receiver) { + let mut buffer = Vec::with_capacity(BATCH_SIZE); + + loop { + tokio::select! { + Some(entry) = rx.recv() => { + buffer.push(entry); + + if buffer.len() >= BATCH_SIZE { + if let Err(e) = insert_funding_rate_entries(&pool, std::mem::take(&mut buffer)).await { + tracing::error!("❌ Failed to insert funding rate entries: {}", e); + } + buffer = Vec::with_capacity(BATCH_SIZE); + } + } + else => { + // Channel closed, flush remaining entries + if !buffer.is_empty() { + if let Err(e) = insert_funding_rate_entries(&pool, buffer).await { + tracing::error!("❌ Failed to flush final funding rate entries: {}", e); + } + } + break; + } + } + } +} diff --git a/pragma-ingestor/src/error.rs b/pragma-ingestor/src/error.rs index 0f36fdd2..1d1fce86 100644 --- a/pragma-ingestor/src/error.rs +++ b/pragma-ingestor/src/error.rs @@ -1,7 +1,7 @@ use thiserror::Error; #[derive(Error, Debug)] -pub enum ErrorKind { +pub(crate) enum PragmaConsumerError { #[error("read config error: {0}")] ReadConfig(#[from] std::io::Error), #[error("load config error: {0}")] diff --git a/pragma-ingestor/src/main.rs b/pragma-ingestor/src/main.rs index e5ffbe28..8e5ce86e 100644 --- a/pragma-ingestor/src/main.rs +++ b/pragma-ingestor/src/main.rs @@ -1,145 +1,212 @@ -pub mod config; -pub mod consumer; -mod error; - -use deadpool_diesel::postgres::Pool; use dotenvy::dotenv; +use futures_util::stream::StreamExt; +use rdkafka::Message as _; use tokio::sync::mpsc; +use tokio::task::JoinSet; use tracing::{error, info}; +use faucon_rs::config::{FauConfig, FauconEnvironment}; +use faucon_rs::consumer::FauConsumer; +use faucon_rs::topics::FauconTopic; +use pragma_common::{ + CapnpDeserialize, InstrumentType, + entries::{FundingRateEntry, PriceEntry}, + task_group::TaskGroup, +}; use pragma_entities::connection::ENV_OFFCHAIN_DATABASE_URL; -use pragma_entities::{ - adapt_infra_error, Entry, FutureEntry, InfraError, NewEntry, NewFutureEntry, +use pragma_entities::{NewEntry, NewFundingRate, NewFutureEntry}; + +use crate::config::CONFIG; +use crate::db::process::{ + process_funding_rate_entries, process_future_entries, process_spot_entries, }; +mod config; +mod db; +mod error; + #[tokio::main] #[tracing::instrument] async fn main() -> Result<(), Box> { - let _ = dotenv(); // .env file is not present in prod + // Initialize environment and telemetry + dotenv().ok(); - let otel_endpoint = std::env::var("OTEL_EXPORTER_OTLP_ENDPOINT") - .unwrap_or_else(|_| "http://signoz.dev.pragma.build:4317".to_string()); - pragma_common::telemetry::init_telemetry("pragma-ingestor".into(), otel_endpoint, None)?; + pragma_common::telemetry::init_telemetry("pragma-ingestor", CONFIG.otel_endpoint.clone())?; - info!( - "kafka configuration : hostname={:?}, group_id={}, topic={}", - config::CONFIG.brokers, - config::CONFIG.group_id, - config::CONFIG.topic - ); + // Load Kafka configuration + let config = FauConfig::new(FauconEnvironment::Development); + info!("🌐 Kafka configuration: hostname={:?}", config.broker_id); + // Initialize database connection pool let pool = pragma_entities::connection::init_pool("pragma-ingestor", ENV_OFFCHAIN_DATABASE_URL) - .expect("cannot connect to offchain database"); - - let (tx, mut rx) = mpsc::unbounded_channel::>(); - tokio::spawn(consumer::consume(tx)); - loop { - while let Some(payload) = rx.recv().await { - if let Err(e) = process_payload(&pool, payload).await { - error!("error while processing payload: {:?}", e); - } + .expect("Failed to connect to offchain database"); + + // Set up channels for spot, future, and funding rate entries with backpressure + let (spot_tx, spot_rx) = mpsc::channel::(CONFIG.channel_capacity); + let (future_tx, future_rx) = mpsc::channel::(CONFIG.channel_capacity); + let (funding_rate_tx, funding_rate_rx) = + mpsc::channel::(CONFIG.channel_capacity); + + // Spawn database worker tasks + let task_group = TaskGroup::new() + .with_handle(tokio::spawn(process_spot_entries(pool.clone(), spot_rx))) + .with_handle(tokio::spawn(process_future_entries( + pool.clone(), + future_rx, + ))) + .with_handle(tokio::spawn(process_funding_rate_entries( + pool, + funding_rate_rx, + ))); + + // Spawn consumers + let mut join_set = JoinSet::new(); + for _ in 0..CONFIG.num_consumers { + join_set.spawn(run_price_consumer( + config.clone(), + CONFIG.kafka_group_id.clone(), + spot_tx.clone(), + future_tx.clone(), + )); + join_set.spawn(run_funding_rate_consumer( + config.clone(), + CONFIG.kafka_group_id.clone(), + funding_rate_tx.clone(), + )); + } + + while let Some(result) = join_set.join_next().await { + if let Err(e) = result { + error!("Consumer error: {}", e); } } + + // Drop original senders to close channels when consumers finish + drop(spot_tx); + drop(future_tx); + drop(funding_rate_tx); + + // Await all tasks and abort if one fails + task_group.abort_all_if_one_resolves().await; + Ok(()) } -#[tracing::instrument(skip(pool, payload))] -async fn process_payload(pool: &Pool, payload: Vec) -> Result<(), Box> { - let decoded_payload = String::from_utf8_lossy(&payload); - let is_future_entries = decoded_payload.contains("expiration_timestamp"); - if is_future_entries { - match serde_json::from_slice::>(&payload) { - Ok(future_entries) => { - if !future_entries.is_empty() { - if let Err(e) = insert_future_entries(pool, future_entries).await { - error!("error while inserting future entries : {:?}", e); +/// Runs a Kafka consumer for price entries +#[tracing::instrument(skip_all)] +async fn run_price_consumer( + config: FauConfig, + group_id: String, + spot_tx: mpsc::Sender, + future_tx: mpsc::Sender, +) -> Result<(), Box> { + let mut consumer = FauConsumer::new(config, &group_id)?; + consumer.subscribe(FauconTopic::PRICES_V1)?; + let mut stream = consumer.stream(); + + tracing::info!("🚀 Starting price consumer"); + + while let Some(msg_result) = stream.next().await { + match msg_result { + Ok(msg) => { + let owned_message = msg.detach(); + if let Some(payload) = owned_message.payload() { + match PriceEntry::from_capnp(payload) { + Ok(entry) => { + let timestamp = + chrono::DateTime::from_timestamp_millis(entry.timestamp_ms) + .map_or_else( + || { + error!("Invalid timestamp: {}", entry.timestamp_ms); + chrono::NaiveDateTime::default() + }, + |dt| dt.naive_utc(), + ); + + match entry.instrument_type() { + InstrumentType::Spot => { + let spot_entry = NewEntry { + source: entry.source, + pair_id: entry.pair.to_string(), + publisher: CONFIG.publisher_name.clone(), + price: entry.price.into(), + timestamp, + }; + if let Err(e) = spot_tx.send(spot_entry).await { + error!("Failed to send spot entry: {}", e); + } + } + InstrumentType::Perp => { + let future_entry = NewFutureEntry { + pair_id: entry.pair.to_string(), + publisher: CONFIG.publisher_name.clone(), + source: entry.source, + price: entry.price.into(), + timestamp, + expiration_timestamp: None, + }; + if let Err(e) = future_tx.send(future_entry).await { + error!("Failed to send future entry: {}", e); + } + } + } + } + Err(e) => error!("Failed to deserialize price entry: {}", e), } } } - Err(e) => { - error!("Failed to deserialize payload: {:?}", e); - } - } - } else { - match serde_json::from_slice::>(&payload) { - Ok(entries) => { - info!("[SPOT] total of '{}' new entries available.", entries.len()); - if let Err(e) = insert_spot_entries(pool, entries).await { - error!("error while inserting entries : {:?}", e); - } - } - Err(e) => { - error!("Failed to deserialize payload: {:?}", e); - } + Err(e) => error!("Consumer error: {}", e), } } - Ok(()) -} - -#[tracing::instrument(skip(pool))] -pub async fn insert_spot_entries( - pool: &Pool, - new_entries: Vec, -) -> Result<(), InfraError> { - let conn = pool.get().await.map_err(adapt_infra_error)?; - let entries = conn - .interact(move |conn| Entry::create_many(conn, new_entries)) - .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; - - for entry in &entries { - info!( - "new entry created {} - {}({}) - {}", - entry.publisher, entry.pair_id, entry.price, entry.source - ); - } Ok(()) } -#[tracing::instrument(skip(pool))] -pub async fn insert_future_entries( - pool: &Pool, - new_entries: Vec, -) -> Result<(), InfraError> { - let conn = pool.get().await.map_err(adapt_infra_error)?; - - // Double check that we don't have expiration_timestamp set to 0, - // if we do, we set them to NULL to be extra clear in the database - // those future entries are perp entries. - let new_entries = new_entries - .into_iter() - .map(|mut entry| { - if let Some(expiration_timestamp) = entry.expiration_timestamp { - if expiration_timestamp.and_utc().timestamp() == 0 { - entry.expiration_timestamp = None; +/// Runs a Kafka consumer for funding rate entries +#[tracing::instrument(skip_all)] +async fn run_funding_rate_consumer( + config: FauConfig, + group_id: String, + funding_rate_tx: mpsc::Sender, +) -> Result<(), Box> { + let mut consumer = FauConsumer::new(config, &group_id)?; + consumer.subscribe(FauconTopic::FUNDING_RATES_V1)?; + let mut stream = consumer.stream(); + + tracing::info!("🚀 Starting funding rate consumer"); + + while let Some(msg_result) = stream.next().await { + match msg_result { + Ok(msg) => { + let owned_message = msg.detach(); + if let Some(payload) = owned_message.payload() { + match FundingRateEntry::from_capnp(payload) { + Ok(entry) => { + let funding_rate_entry = NewFundingRate { + source: entry.source, + pair: entry.pair.to_string(), + annualized_rate: entry.annualized_rate, + timestamp: chrono::DateTime::from_timestamp_millis( + entry.timestamp_ms, + ) + .map_or_else( + || { + error!("Invalid timestamp: {}", entry.timestamp_ms); + chrono::NaiveDateTime::default() + }, + |dt| dt.naive_utc(), + ), + }; + if let Err(e) = funding_rate_tx.send(funding_rate_entry).await { + error!("Failed to send funding rate entry: {}", e); + } + } + Err(e) => error!("Failed to deserialize funding rate entry: {}", e), + } } } - entry - }) - .collect::>(); - - let len_perp_entries = new_entries - .iter() - .filter(|entry| entry.expiration_timestamp.is_none()) - .count(); - - info!("[PERP] {} new entries available", len_perp_entries); - info!( - "[FUTURE] {} new entries available", - new_entries.len() - len_perp_entries - ); - - let entries = conn - .interact(move |conn| FutureEntry::create_many(conn, new_entries)) - .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; - for entry in &entries { - info!( - "new future entry created {} - {}({}) - {}", - entry.publisher, entry.pair_id, entry.price, entry.source - ); + Err(e) => error!("Consumer error: {}", e), + } } + Ok(()) } diff --git a/pragma-node/Cargo.toml b/pragma-node/Cargo.toml index 25ea06a3..9fd3c0d1 100644 --- a/pragma-node/Cargo.toml +++ b/pragma-node/Cargo.toml @@ -1,14 +1,14 @@ [package] name = "pragma-node" version = "0.1.0" -edition = "2021" +edition.workspace = true [lints] workspace = true [dependencies] -pragma-common = { path = "../pragma-common" } -pragma-entities = { path = "../pragma-entities" } +pragma-common = { workspace = true } +pragma-entities = { workspace = true } async-trait = { workspace = true } aws-config = { workspace = true, features = ["behavior-version-latest"] } @@ -18,7 +18,6 @@ axum-extra = { workspace = true } axum-macros = { workspace = true } axum-tracing-opentelemetry = { workspace = true } bigdecimal = { workspace = true, features = ["serde"] } -cainome = { workspace = true, features = ["abigen-rs"] } chrono = { workspace = true, features = ["serde"] } dashmap = { workspace = true } deadpool-diesel = { workspace = true, features = ["postgres"] } @@ -32,16 +31,14 @@ dotenvy = { workspace = true } envy = { workspace = true } futures.workspace = true futures-util = { workspace = true } +google-secretmanager1 = { workspace = true } governor = { workspace = true } -indexmap = { workspace = true, features = ["serde"] } lazy_static = { workspace = true } moka = { workspace = true, features = ["future"] } nonzero_ext = { workspace = true } opentelemetry = { workspace = true } pragma-monitoring = { workspace = true } rdkafka = { workspace = true } -redis = { workspace = true, features = ["tokio-comp", "json"] } -reqwest.workspace = true serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true, features = ["arbitrary_precision"] } starknet = { workspace = true } @@ -50,16 +47,18 @@ strum = { workspace = true, features = ["derive"] } thiserror = { workspace = true } tokio = { workspace = true, features = ["sync", "macros", "rt-multi-thread"] } tokio-stream.workspace = true +tokio-util = { workspace = true } tower-http = { workspace = true, features = ["fs", "trace", "cors"] } tracing = { workspace = true } +url = { workspace = true } utoipa = { workspace = true } -utoipa-swagger-ui = { workspace = true, features = ["axum"] } +utoipa-swagger-ui = { workspace = true } utoipauto = { workspace = true } uuid = { workspace = true, features = ["fast-rng", "v4", "serde"] } [dev-dependencies] rstest = { workspace = true } tokio-tungstenite = { version = "0.20.1", features = ["connect", "native-tls"] } -url = "2.5.0" +bigdecimal = "0.3" ratatui = "0.24.0" crossterm = "0.27.0" diff --git a/pragma-node/examples/starkex.rs b/pragma-node/examples/starkex.rs index 0f68cace..d53878d1 100644 --- a/pragma-node/examples/starkex.rs +++ b/pragma-node/examples/starkex.rs @@ -2,13 +2,13 @@ use chrono::Utc; use crossterm::{ event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode}, execute, - terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen}, + terminal::{EnterAlternateScreen, LeaveAlternateScreen, disable_raw_mode, enable_raw_mode}, }; use futures_util::{SinkExt as _, StreamExt as _}; use ratatui::{ + Terminal, prelude::*, widgets::{Block, Borders, List, ListItem, Paragraph}, - Terminal, }; use serde::{Deserialize, Serialize}; use starknet::core::utils::parse_cairo_short_string; @@ -18,9 +18,9 @@ use tokio_tungstenite::{connect_async, tungstenite::protocol::Message}; use url::Url; const TEST_PAIRS: &[&str] = &[ - "EUR/USD", + // "EUR/USD", // "ETH/USD", - // "SOL/USD", + "SOL/USD", // "AVAX/USD", // "MATIC/USD", // "ARB/USD", @@ -28,23 +28,28 @@ const TEST_PAIRS: &[&str] = &[ const TEST_MARK_PAIRS: &[&str] = &["BTC/USD"]; +#[derive(Debug)] +enum WebSocketMessage { + Ack(SubscriptionAck), + Update(SubscribeToEntryResponse), +} + #[derive(Serialize, Deserialize, Debug)] struct SubscribeMessage { msg_type: String, pairs: Vec, } -#[derive(Debug, Deserialize)] +#[derive(Debug, Clone, Deserialize)] #[allow(unused)] struct SignedPublisherPrice { oracle_asset_id: String, oracle_price: String, signing_key: String, - signature: String, timestamp: String, } -#[derive(Debug, Deserialize)] +#[derive(Debug, Clone, Deserialize)] #[allow(unused)] struct AssetOraclePrice { global_asset_id: String, @@ -53,7 +58,7 @@ struct AssetOraclePrice { signed_prices: Vec, } -#[derive(Debug, Deserialize)] +#[derive(Debug, Clone, Deserialize)] struct SubscribeToEntryResponse { oracle_prices: Vec, timestamp: i64, @@ -71,7 +76,7 @@ impl Environment { let ws_url = match env_type.as_str() { "prod" => "wss://ws.pragma.build/node/v1/data/subscribe", - "dev" => "wss://ws.dev.pragma.build/node/v1/data/subscribe", + "dev" => "wss://ws.devnet.pragma.build/node/v1/data/subscribe", "local" => "ws://0.0.0.0:3000/node/v1/data/subscribe", _ => panic!("Invalid environment: {env_type}. Use 'prod', 'dev', or 'local'",), } @@ -155,7 +160,15 @@ fn main() -> Result<(), Box> { match message { Ok(msg) => { if let Message::Text(text) = msg { - if tx.send(text).is_err() { + if let Ok(ack) = serde_json::from_str::(&text) { + if tx.send(Ok(WebSocketMessage::Ack(ack))).is_err() { + break; + } + } else if let Ok(response) = serde_json::from_str::(&text) { + if tx.send(Ok(WebSocketMessage::Update(response))).is_err() { + break; + } + } else if tx.send(Err("Failed to parse message".to_string())).is_err() { break; } } @@ -191,11 +204,15 @@ fn main() -> Result<(), Box> { } // Check for WebSocket messages - if let Ok(text) = rx.try_recv() { - if let Ok(ack) = serde_json::from_str::(&text) { - app.subscription_pairs = ack.pairs; - } else if let Ok(response) = serde_json::from_str::(&text) { - app.latest_update = Some(response); + if let Ok(msg) = rx.try_recv() { + match msg { + Ok(WebSocketMessage::Ack(ack)) => { + app.subscription_pairs = ack.pairs; + } + Ok(WebSocketMessage::Update(response)) => { + app.latest_update = Some(response); + } + Err(e) => eprintln!("Error: {e}"), } } @@ -232,21 +249,30 @@ fn parse_hex_asset_id(hex_id: &str) -> String { return hex_id.to_string(); } - let hex_str = &hex_id[2..]; - u128::from_str_radix(hex_str, 16) - .ok() - .and_then(|felt| parse_cairo_short_string(&felt.into()).ok()) - .unwrap_or_else(|| hex_id.to_string()) - .replace('/', "") + // Remove "0x" prefix and any trailing zeros + let hex_str = hex_id[2..].trim_end_matches('0'); + + // Convert hex to felt and then to string + if let Ok(felt) = u128::from_str_radix(hex_str, 16) { + if let Ok(s) = parse_cairo_short_string(&felt.into()) { + // The format is always ASSET-USD-8, so we can safely remove the -8 suffix + if s.ends_with("-8") { + return s[..s.len() - 2].to_string(); + } + return s; + } + } + hex_id.to_string() } /// Extracts and formats all received pairs from oracle prices. +/// Converts from the `StarkEx` encoded format back to human-readable pairs. /// /// # Arguments /// * `oracle_prices` - Slice of `AssetOraclePrice` containing the received price updates /// /// # Returns -/// A Vec containing all formatted asset pairs (e.g., "ETHUSD") +/// A Vec containing all formatted asset pairs (e.g., "BTC-USD") fn get_received_pairs(oracle_prices: &[AssetOraclePrice]) -> Vec { oracle_prices .iter() @@ -255,18 +281,24 @@ fn get_received_pairs(oracle_prices: &[AssetOraclePrice]) -> Vec { } /// Identifies which subscribed pairs are missing from the received pairs. -/// Handles the format difference between subscribed pairs (ETH/USD) and received pairs (ETHUSD). +/// Handles the format difference between subscribed pairs and received pairs. /// /// # Arguments -/// * `subscribed` - Slice of subscribed pair strings (format: "ETH/USD") -/// * `received` - Slice of received pair strings (format: "ETHUSD") +/// * `subscribed` - Slice of subscribed pair strings (format: "BTC/USD") +/// * `received` - Slice of received pair strings (format: "BTC-USD") /// /// # Returns /// A Vec containing all subscribed pairs that weren't received fn get_missing_pairs(subscribed: &[String], received: &[String]) -> Vec { subscribed .iter() - .filter(|p| !received.contains(&p.replace('/', ""))) + .filter(|p| { + let normalized_sub = p.replace('/', "-"); + !received.iter().any(|r| { + let normalized_rec = r.replace('/', "-"); + normalized_sub == normalized_rec + }) + }) .cloned() .collect() } @@ -340,18 +372,7 @@ fn ui(f: &mut Frame<'_>, app: &App) { // Price updates list let mut items = vec![]; for price in &update.oracle_prices { - let asset_display = if price.global_asset_id.starts_with("0x") { - let hex_str = &price.global_asset_id[2..]; - u128::from_str_radix(hex_str, 16).map_or_else( - |_| price.global_asset_id.clone(), - |felt| { - parse_cairo_short_string(&felt.into()) - .unwrap_or_else(|_| price.global_asset_id.clone()) - }, - ) - } else { - price.global_asset_id.clone() - }; + let asset_display = parse_hex_asset_id(&price.global_asset_id); items.push(ListItem::new(vec![ Line::from(format!( @@ -386,3 +407,78 @@ fn ui(f: &mut Frame<'_>, app: &App) { f.render_widget(prices_list, chunks[2]); } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_get_received_pairs() { + let oracle_prices = vec![ + AssetOraclePrice { + global_asset_id: "0x534f4c2d5553442d38000000000000".to_string(), // SOL-USD-8 + median_price: "100".to_string(), + signature: "sig".to_string(), + signed_prices: vec![], + }, + AssetOraclePrice { + global_asset_id: "0x4254432d5553442d38000000000000".to_string(), // BTC-USD-8 + median_price: "100".to_string(), + signature: "sig".to_string(), + signed_prices: vec![], + }, + ]; + + let received = get_received_pairs(&oracle_prices); + assert_eq!(received, vec!["SOL-USD", "BTC-USD"]); + } + + #[test] + fn test_get_missing_pairs() { + let subscribed = vec![ + "SOL/USD".to_string(), + "BTC/USD".to_string(), + "ETH/USD".to_string(), + ]; + let received = vec!["SOL-USD".to_string(), "BTC-USD".to_string()]; + + let missing = get_missing_pairs(&subscribed, &received); + assert_eq!(missing, vec!["ETH/USD"]); + } + + #[test] + fn test_get_missing_pairs_with_mixed_separators() { + let subscribed = vec![ + "SOL/USD".to_string(), + "BTC-USD".to_string(), + "ETH/USD".to_string(), + ]; + let received = vec!["SOL-USD".to_string(), "BTC/USD".to_string()]; + + let missing = get_missing_pairs(&subscribed, &received); + assert_eq!(missing, vec!["ETH/USD"]); + } + + #[test] + fn test_get_missing_pairs_all_present() { + let subscribed = vec!["SOL/USD".to_string(), "BTC/USD".to_string()]; + let received = vec!["SOL-USD".to_string(), "BTC-USD".to_string()]; + + let missing = get_missing_pairs(&subscribed, &received); + assert!(missing.is_empty(), "Expected no missing pairs"); + } + + #[test] + fn test_parse_hex_asset_id() { + let test_cases = vec![ + ("0x534f4c2d5553442d38000000000000", "SOL-USD"), + ("0x4254432d5553442d38000000000000", "BTC-USD"), + ("0x4554482d5553442d38000000000000", "ETH-USD"), + ]; + + for (input, expected) in test_cases { + let result = parse_hex_asset_id(input); + assert_eq!(result, expected, "Failed to parse {}", input); + } + } +} diff --git a/pragma-node/src/caches.rs b/pragma-node/src/caches.rs index 83c5d462..d6f5334c 100644 --- a/pragma-node/src/caches.rs +++ b/pragma-node/src/caches.rs @@ -2,13 +2,13 @@ use std::collections::HashMap; use std::time::Duration; use moka::future::Cache; -use pragma_common::types::merkle_tree::MerkleTree; + +use pragma_common::starknet::StarknetNetwork; use pragma_entities::dto::Publisher; use crate::constants::caches::{ - MERKLE_FEED_TREE_CACHE_TIME_TO_IDLE_IN_SECONDS, MERKLE_FEED_TREE_CACHE_TIME_TO_LIVE_IN_SECONDS, - PUBLISHERS_CACHE_TIME_TO_IDLE_IN_SECONDS, PUBLISHERS_CACHE_TIME_TO_LIVE_IN_SECONDS, - PUBLISHERS_UDPATES_CACHE_TIME_TO_IDLE_IN_SECONDS, + DECIMALS_TIME_TO_LIVE_IN_SECONDS, PUBLISHERS_CACHE_TIME_TO_IDLE_IN_SECONDS, + PUBLISHERS_CACHE_TIME_TO_LIVE_IN_SECONDS, PUBLISHERS_UDPATES_CACHE_TIME_TO_IDLE_IN_SECONDS, PUBLISHERS_UDPATES_CACHE_TIME_TO_LIVE_IN_SECONDS, }; use crate::infra::repositories::onchain_repository::publisher::RawPublisherUpdates; @@ -19,7 +19,7 @@ use crate::infra::repositories::onchain_repository::publisher::RawPublisherUpdat #[derive(Clone, Debug)] pub struct CacheRegistry { onchain_publishers_updates: Cache>, - merkle_feed_tree: Cache, + onchain_decimals: Cache>, publishers: Cache, } @@ -32,7 +32,7 @@ impl Default for CacheRegistry { impl CacheRegistry { /// Initialize all of our caches empty. pub fn new() -> Self { - let onchain_publishers_updates_cache = Cache::builder() + let onchain_publishers_updates = Cache::builder() .time_to_live(Duration::from_secs( PUBLISHERS_UDPATES_CACHE_TIME_TO_LIVE_IN_SECONDS, )) // 30 minutes @@ -41,16 +41,11 @@ impl CacheRegistry { )) // 5 minutes .build(); - let merkle_feed_tree_cache = Cache::builder() - .time_to_live(Duration::from_secs( - MERKLE_FEED_TREE_CACHE_TIME_TO_LIVE_IN_SECONDS, - )) - .time_to_idle(Duration::from_secs( - MERKLE_FEED_TREE_CACHE_TIME_TO_IDLE_IN_SECONDS, - )) + let onchain_decimals = Cache::builder() + .time_to_live(Duration::from_secs(DECIMALS_TIME_TO_LIVE_IN_SECONDS)) .build(); - let publishers_cache = Cache::builder() + let publishers = Cache::builder() .time_to_live(Duration::from_secs( PUBLISHERS_CACHE_TIME_TO_LIVE_IN_SECONDS, )) @@ -60,9 +55,9 @@ impl CacheRegistry { .build(); Self { - onchain_publishers_updates: onchain_publishers_updates_cache, - merkle_feed_tree: merkle_feed_tree_cache, - publishers: publishers_cache, + onchain_publishers_updates, + onchain_decimals, + publishers, } } @@ -72,8 +67,8 @@ impl CacheRegistry { &self.onchain_publishers_updates } - pub const fn merkle_feeds_tree(&self) -> &Cache { - &self.merkle_feed_tree + pub const fn onchain_decimals(&self) -> &Cache> { + &self.onchain_decimals } pub const fn publishers(&self) -> &Cache { diff --git a/pragma-node/src/config.rs b/pragma-node/src/config.rs index a88e0ae7..a362f9c7 100644 --- a/pragma-node/src/config.rs +++ b/pragma-node/src/config.rs @@ -1,7 +1,7 @@ use serde::Deserialize; use tokio::sync::OnceCell; -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Clone)] pub struct ServerConfig { host: String, port: u16, @@ -16,7 +16,7 @@ impl Default for ServerConfig { } } -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Clone)] pub struct KafkaConfig { pub topic: String, } @@ -29,22 +29,7 @@ impl Default for KafkaConfig { } } -#[derive(Debug, Deserialize)] -pub struct RedisConfig { - redis_host: String, - redis_port: u16, -} - -impl Default for RedisConfig { - fn default() -> Self { - Self { - redis_host: "0.0.0.0".to_string(), - redis_port: 6379, - } - } -} - -#[derive(Default, Debug, Deserialize, PartialEq, Eq)] +#[derive(Default, Debug, Deserialize, PartialEq, Eq, Clone)] #[serde(rename_all = "lowercase")] pub enum Mode { Dev, @@ -52,17 +37,25 @@ pub enum Mode { Production, } -#[derive(Default, Debug, Deserialize)] +#[derive(Default, Debug, Deserialize, PartialEq, Eq, Clone)] +#[serde(rename_all = "lowercase")] +pub enum CloudEnv { + Aws, + #[default] + Gcp, +} + +#[derive(Default, Debug, Deserialize, Clone)] pub struct ModeConfig { mode: Mode, + cloud_env: Option, } -#[derive(Default, Debug, Deserialize)] +#[derive(Default, Debug, Deserialize, Clone)] pub struct Config { mode: ModeConfig, server: ServerConfig, kafka: KafkaConfig, - redis: RedisConfig, } impl Config { @@ -70,6 +63,10 @@ impl Config { self.mode.mode == Mode::Production } + pub fn cloud_env(&self) -> CloudEnv { + self.mode.cloud_env.clone().unwrap_or_default() + } + pub fn server_host(&self) -> &str { &self.server.host } @@ -81,14 +78,6 @@ impl Config { pub fn kafka_topic(&self) -> &str { &self.kafka.topic } - - pub fn redis_host(&self) -> &str { - &self.redis.redis_host - } - - pub const fn redis_port(&self) -> u16 { - self.redis.redis_port - } } pub static CONFIG: OnceCell = OnceCell::const_new(); @@ -96,13 +85,11 @@ pub static CONFIG: OnceCell = OnceCell::const_new(); async fn init_config() -> Config { let server_config = envy::from_env::().unwrap_or_default(); let kafka_config = envy::from_env::().unwrap_or_default(); - let redis_config = envy::from_env::().unwrap_or_default(); let mode_config = envy::from_env::().unwrap_or_default(); Config { server: server_config, kafka: kafka_config, - redis: redis_config, mode: mode_config, } } diff --git a/pragma-node/src/constants/caches.rs b/pragma-node/src/constants/caches.rs index 93f5b03c..4c202b02 100644 --- a/pragma-node/src/constants/caches.rs +++ b/pragma-node/src/constants/caches.rs @@ -7,10 +7,13 @@ pub const PUBLISHERS_UDPATES_CACHE_TIME_TO_LIVE_IN_SECONDS: u64 = 20 * 60; // 20 minutes pub const PUBLISHERS_UDPATES_CACHE_TIME_TO_IDLE_IN_SECONDS: u64 = 5 * 60; // 5 minutes -/// Cache of the stored Merkle Tree for a certain block in Redis. -/// Since this value never change we can cache it for faster iterations. -pub const MERKLE_FEED_TREE_CACHE_TIME_TO_LIVE_IN_SECONDS: u64 = 6 * 60; // 6 minutes -pub const MERKLE_FEED_TREE_CACHE_TIME_TO_IDLE_IN_SECONDS: u64 = 60; // 1 minutes +/// Cache of the onchain decimals. +/// +/// In pragma-node, every decimals are 18 but this is not the case for onchain entries. +/// So we use this cache to fetch RPC results & store decimals for a specific `pair_id` +/// for a given network. +/// The values don't change often at all so we use 1 week. +pub const DECIMALS_TIME_TO_LIVE_IN_SECONDS: u64 = 604_800; // 1 week /// Cache of the stored publishers in memory. /// This cache is used to retrieve the `Publisher` object from the database diff --git a/pragma-node/src/constants/currencies.rs b/pragma-node/src/constants/currencies.rs new file mode 100644 index 00000000..3c7f758b --- /dev/null +++ b/pragma-node/src/constants/currencies.rs @@ -0,0 +1,4 @@ +/// We used to have a `Currencies` table with abstract currencies. +/// We removed it - for now we just store them in this constant since we don't +/// update this often at all. +pub const ABSTRACT_CURRENCIES: [&str; 4] = ["USD", "EUR", "BTC", "USDPLUS"]; diff --git a/pragma-node/src/constants/mod.rs b/pragma-node/src/constants/mod.rs index a06ede18..f8c4e941 100644 --- a/pragma-node/src/constants/mod.rs +++ b/pragma-node/src/constants/mod.rs @@ -1,3 +1,10 @@ pub mod caches; +pub mod currencies; pub mod others; pub mod starkex_ws; + +/// All offchain entries are quoted with 18 decimals. +/// +/// This is not the case for on-chain entries! They still have indiviual decimals. +/// We use the `get_onchain_decimals` function to query the RPC and know how many. +pub const EIGHTEEN_DECIMALS: u32 = 18; diff --git a/pragma-node/src/constants/starkex_ws.rs b/pragma-node/src/constants/starkex_ws.rs index b85ac32d..e0c49f8c 100644 --- a/pragma-node/src/constants/starkex_ws.rs +++ b/pragma-node/src/constants/starkex_ws.rs @@ -4,29 +4,3 @@ /// See: /// pub const PRAGMA_ORACLE_NAME_FOR_STARKEX: &str = "PRGM"; - -/// Used for the subscription to the entry websocket. -/// -/// Represents the maximum interval from now that we check for entries. -/// If we don't have have any entries for that interval max, we stop searching. -pub const MAX_INTERVAL_WITHOUT_ENTRIES: u64 = 100_000; - -/// Used for the subscription to the entry websocket. -/// -/// Represents the initial interval in milliseconds that we check for entries. -/// If there's no entries for that interval, we increase the interval by -/// `INTERVAL_INCREMENT_IN_MS`. -pub const INITAL_INTERVAL_IN_MS: u64 = 500; - -/// Used for the subscription to the entry websocket. -/// -/// Represents the increment in milliseconds that we increase the interval by. -/// If we reach `MAX_INTERVAL_WITHOUT_ENTRIES`, we stop searching. -pub const INTERVAL_INCREMENT_IN_MS: u64 = 500; - -/// Used for the subscription to the entry websocket. -/// -/// Represents the minimum number of unique publishers that we need to have -/// for a `pair_id` in order to return the computed price. -/// TODO: should be lower for development mode (1) -pub const MINIMUM_NUMBER_OF_PUBLISHERS: usize = 1; diff --git a/pragma-node/src/errors.rs b/pragma-node/src/errors.rs index 8d07c0ee..750a4ceb 100644 --- a/pragma-node/src/errors.rs +++ b/pragma-node/src/errors.rs @@ -1,6 +1,6 @@ +use axum::Json; use axum::http::StatusCode; use axum::response::IntoResponse; -use axum::Json; use serde_json::json; use pragma_entities::EntryError; diff --git a/pragma-node/src/handlers/create_entry.rs b/pragma-node/src/handlers/create_entry.rs deleted file mode 100644 index 9227df7b..00000000 --- a/pragma-node/src/handlers/create_entry.rs +++ /dev/null @@ -1,136 +0,0 @@ -use axum::extract::{self, State}; -use axum::Json; -use pragma_entities::{EntryError, NewEntry}; -use serde::{Deserialize, Serialize}; -use starknet::core::types::Felt; -use utoipa::{ToResponse, ToSchema}; - -use crate::config::config; -use crate::utils::{convert_entry_to_db, publish_to_kafka, validate_publisher}; -use crate::AppState; -use pragma_common::signing::assert_request_signature_is_valid; -use pragma_common::types::entries::Entry; -use pragma_common::types::utils::felt_from_decimal; - -#[derive(Debug, Serialize, Deserialize, ToSchema)] -pub struct CreateEntryRequest { - #[schema(value_type = Vec)] - #[serde(deserialize_with = "felt_from_decimal")] - pub signature: Vec, - pub entries: Vec, -} - -impl AsRef<[Felt]> for CreateEntryRequest { - fn as_ref(&self) -> &[Felt] { - &self.signature - } -} - -impl AsRef<[Entry]> for CreateEntryRequest { - fn as_ref(&self) -> &[Entry] { - &self.entries - } -} - -#[derive(Debug, Serialize, Deserialize, ToSchema, ToResponse)] -pub struct CreateEntryResponse { - pub number_entries_created: usize, -} - -#[utoipa::path( - post, - path = "/node/v1/data/publish", - request_body = CreateEntryRequest, - responses( - (status = 200, description = "Entries published successfuly", body = CreateEntryResponse), - (status = 401, description = "Unauthorized Publisher", body = EntryError) - ) -)] -#[tracing::instrument(skip(state))] -pub async fn create_entries( - State(state): State, - extract::Json(new_entries): extract::Json, -) -> Result, EntryError> { - tracing::info!("Received new entries: {:?}", new_entries); - - if new_entries.entries.is_empty() { - return Ok(Json(CreateEntryResponse { - number_entries_created: 0, - })); - } - - let publisher_name = new_entries.entries[0].base.publisher.clone(); - let publishers_cache = state.caches.publishers(); - let (public_key, account_address) = - validate_publisher(&state.offchain_pool, &publisher_name, publishers_cache).await?; - - let signature = assert_request_signature_is_valid::( - &new_entries, - &account_address, - &public_key, - )?; - - let new_entries_db = new_entries - .entries - .iter() - .map(|entry| convert_entry_to_db(entry, &signature)) - .collect::, EntryError>>()?; - - let config = config().await; - publish_to_kafka( - new_entries_db, - config.kafka_topic().to_string(), - &publisher_name, - ) - .await?; - - Ok(Json(CreateEntryResponse { - number_entries_created: new_entries.entries.len(), - })) -} - -#[cfg(test)] -mod tests { - use pragma_common::types::entries::{build_publish_message, BaseEntry, Entry}; - - use super::*; - use rstest::rstest; - - #[rstest] - fn test_build_publish_message_empty() { - let entries: Vec = vec![]; - let typed_data = build_publish_message(&entries); - - assert_eq!(typed_data.primary_type, "Request"); - assert_eq!(typed_data.domain.name, "Pragma"); - assert_eq!(typed_data.domain.version, "1"); - // assert_eq!(typed_data.message.action, "Publish"); - // assert_eq!(typed_data.message.entries, entries); - } - - #[rstest] - #[ignore = "TODO: Compute hash with Pragma SDK"] - fn test_build_publish_message() { - let entries = vec![Entry { - base: BaseEntry { - timestamp: 0, - source: "source".to_string(), - publisher: "publisher".to_string(), - }, - pair_id: "pair_id".to_string(), - price: 0, - volume: 0, - }]; - let typed_data = build_publish_message(&entries); - - assert_eq!(typed_data.primary_type, "Request"); - assert_eq!(typed_data.domain.name, "Pragma"); - assert_eq!(typed_data.domain.version, "1"); - // assert_eq!(typed_data.message.action, "Publish"); - // assert_eq!(typed_data.message.entries, entries); - - let msg_hash = typed_data.encode(Felt::ZERO).unwrap().message_hash; - // Hash computed with the Pragma SDK (python) - assert_eq!(msg_hash, Felt::from_hex("").unwrap()); - } -} diff --git a/pragma-node/src/handlers/create_future_entry.rs b/pragma-node/src/handlers/create_future_entry.rs deleted file mode 100644 index 69673c08..00000000 --- a/pragma-node/src/handlers/create_future_entry.rs +++ /dev/null @@ -1,164 +0,0 @@ -use axum::extract::{self, State}; -use axum::Json; -use chrono::{DateTime, Utc}; -use pragma_common::timestamp::TimestampRangeError; -use pragma_entities::{EntryError, NewFutureEntry}; -use serde::{Deserialize, Serialize}; -use starknet::core::types::Felt; -use utoipa::{ToResponse, ToSchema}; - -use crate::config::config; -use crate::infra::kafka; -use crate::utils::validate_publisher; -use crate::AppState; -use pragma_common::signing::assert_request_signature_is_valid; -use pragma_common::types::entries::FutureEntry; -use pragma_common::types::utils::felt_from_decimal; - -#[derive(Debug, Serialize, Deserialize, ToSchema)] -pub struct CreateFutureEntryRequest { - #[schema(value_type = Vec)] - #[serde(deserialize_with = "felt_from_decimal")] - pub signature: Vec, - pub entries: Vec, -} - -impl AsRef<[Felt]> for CreateFutureEntryRequest { - fn as_ref(&self) -> &[Felt] { - &self.signature - } -} - -impl AsRef<[FutureEntry]> for CreateFutureEntryRequest { - fn as_ref(&self) -> &[FutureEntry] { - &self.entries - } -} - -#[derive(Debug, Serialize, Deserialize, ToSchema, ToResponse)] -pub struct CreateFutureEntryResponse { - number_entries_created: usize, -} - -#[utoipa::path( - post, - path = "/node/v1/data/publish_future", - request_body = CreateFutureEntryRequest, - responses( - (status = 200, description = "Entries published successfuly", body = CreateFutureEntryResponse), - (status = 401, description = "Unauthorized Publisher", body = EntryError) - ) -)] -#[tracing::instrument(skip(state))] -pub async fn create_future_entries( - State(state): State, - extract::Json(new_entries): extract::Json, -) -> Result, EntryError> { - tracing::info!("Received new future entries: {:?}", new_entries); - let config = config().await; - - if new_entries.entries.is_empty() { - return Ok(Json(CreateFutureEntryResponse { - number_entries_created: 0, - })); - } - - let publisher_name = new_entries.entries[0].base.publisher.clone(); - - let (account_address, public_key) = validate_publisher( - &state.offchain_pool, - &publisher_name, - state.caches.publishers(), - ) - .await?; - - let signature = assert_request_signature_is_valid::( - &new_entries, - &account_address, - &public_key, - )?; - - let new_entries_db = new_entries - .entries - .iter() - .map(|future_entry| { - #[allow(clippy::cast_possible_wrap)] - let dt = match DateTime::::from_timestamp(future_entry.base.timestamp as i64, 0) { - Some(dt) => dt.naive_utc(), - None => { - return Err(EntryError::InvalidTimestamp( - TimestampRangeError::ConversionError, - )) - } - }; - - // For expiration_timestamp, 0 is sent by publishers for perpetual entries. - // We set them to None in the database to easily filter them out. - let expiry_dt = if future_entry.expiration_timestamp == 0 { - None - } else { - #[allow(clippy::cast_possible_wrap)] - match DateTime::::from_timestamp_millis( - future_entry.expiration_timestamp as i64, - ) { - Some(dt) => Some(dt.naive_utc()), - None => { - return Err(EntryError::InvalidTimestamp( - TimestampRangeError::ConversionError, - )) - } - } - }; - - Ok(NewFutureEntry { - pair_id: future_entry.pair_id.clone(), - publisher: future_entry.base.publisher.clone(), - source: future_entry.base.source.clone(), - timestamp: dt, - expiration_timestamp: expiry_dt, - publisher_signature: format!("0x{signature}"), - price: future_entry.price.into(), - }) - }) - .collect::, EntryError>>()?; - - let data = - serde_json::to_vec(&new_entries_db).map_err(|e| EntryError::PublishData(e.to_string()))?; - - if let Err(e) = kafka::send_message(config.kafka_topic(), &data, &publisher_name).await { - tracing::error!("Error sending message to kafka: {:?}", e); - return Err(EntryError::PublishData(String::from( - "Error sending message to kafka", - ))); - }; - - Ok(Json(CreateFutureEntryResponse { - number_entries_created: new_entries.entries.len(), - })) -} - -#[cfg(test)] -mod tests { - use rstest::rstest; - - use pragma_common::types::entries::{build_publish_message, FutureEntry, PerpEntry}; - - #[rstest] - fn test_build_publish_message_empty() { - let entries: Vec = vec![]; - let typed_data = build_publish_message(&entries); - assert_eq!(typed_data.primary_type, "Request"); - assert_eq!(typed_data.domain.name, "Pragma"); - assert_eq!(typed_data.domain.version, "1"); - // assert_eq!(typed_data.message.action, "Publish"); - // assert_eq!(typed_data.message.entries, entries); - - let entries: Vec = vec![]; - let typed_data = build_publish_message(&entries); - assert_eq!(typed_data.primary_type, "Request"); - assert_eq!(typed_data.domain.name, "Pragma"); - assert_eq!(typed_data.domain.version, "1"); - // assert_eq!(typed_data.message.action, "Publish"); - // assert_eq!(typed_data.message.entries, entries); - } -} diff --git a/pragma-node/src/handlers/get_entry.rs b/pragma-node/src/handlers/get_entry.rs index 9627f881..6110eef2 100644 --- a/pragma-node/src/handlers/get_entry.rs +++ b/pragma-node/src/handlers/get_entry.rs @@ -1,66 +1,67 @@ -use axum::extract::{Query, State}; use axum::Json; +use axum::extract::{Query, State}; use chrono::{DateTime, NaiveDateTime, Utc}; - -use pragma_common::timestamp::TimestampRangeError; -use pragma_common::types::pair::Pair; -use pragma_common::types::{AggregationMode, DataType, Interval}; -use pragma_entities::EntryError; +use pragma_entities::models::entries::timestamp::TimestampRangeError; use serde::{Deserialize, Serialize}; use utoipa::{ToResponse, ToSchema}; -use crate::infra::repositories::entry_repository::{self, MedianEntry}; -use crate::utils::PathExtractor; -use crate::AppState; +use pragma_common::Pair; +use pragma_common::{AggregationMode, InstrumentType, Interval}; +use pragma_entities::{EntryError, TimestampError}; +use crate::constants::EIGHTEEN_DECIMALS; +use crate::infra::repositories::entry_repository::{ + MedianEntry, get_last_updated_timestamp, routing, +}; +use crate::state::AppState; +use crate::utils::PathExtractor; use crate::utils::big_decimal_price_to_hex; use super::GetEntryParams; #[derive(Default, Clone, Debug)] -pub struct RoutingParams { +pub struct EntryParams { pub interval: Interval, pub timestamp: i64, pub aggregation_mode: AggregationMode, - pub data_type: DataType, + pub data_type: InstrumentType, pub expiry: String, + pub with_components: bool, } -impl TryFrom for RoutingParams { +impl TryFrom for EntryParams { type Error = EntryError; fn try_from(params: GetEntryParams) -> Result { let now = chrono::Utc::now().timestamp(); - let timestamp = params.timestamp.map_or(now, |timestamp| timestamp); + // Unwrap timestamp or use current time + let timestamp = params.timestamp.unwrap_or(now); + // Validate timestamp isn't in the future if timestamp > now { - return Err(EntryError::InvalidTimestamp( + return Err(EntryError::InvalidTimestamp(TimestampError::RangeError( TimestampRangeError::EndInFuture, - )); + ))); } - let interval = params - .interval - .map_or(Interval::TwoHours, |interval| interval); - - let aggregation_mode = params - .aggregation - .map_or(AggregationMode::Twap, |aggregation_mode| aggregation_mode); + // Unwrap parameters with their defaults + let interval = params.interval.unwrap_or_default(); + let aggregation_mode = params.aggregation.unwrap_or_default(); + let with_components = params.with_components.unwrap_or(false); + // Convert entry_type to DataType let data_type = params .entry_type - .map_or(DataType::SpotEntry, DataType::from); - - let expiry = if let Some(expiry) = params.expiry { - let expiry_dt = NaiveDateTime::parse_from_str(&expiry, "%Y-%m-%dT%H:%M:%S") - .map(|naive| DateTime::::from_naive_utc_and_offset(naive, Utc)); - match expiry_dt { - Ok(expiry_dt) => expiry_dt.format("%Y-%m-%d %H:%M:%S%:z").to_string(), - Err(_) => return Err(EntryError::InvalidExpiry), - } - } else { - String::default() + .map_or(InstrumentType::Spot, InstrumentType::from); + + // Parse and format expiry date if provided + let expiry = match params.expiry { + Some(expiry_str) => NaiveDateTime::parse_from_str(&expiry_str, "%Y-%m-%dT%H:%M:%S") + .map(|naive| DateTime::::from_naive_utc_and_offset(naive, Utc)) + .map(|dt| dt.format("%Y-%m-%d %H:%M:%S%:z").to_string()) + .map_err(|_| EntryError::InvalidExpiry)?, + None => String::default(), }; Ok(Self { @@ -69,30 +70,54 @@ impl TryFrom for RoutingParams { aggregation_mode, data_type, expiry, + with_components, }) } } - #[derive(Debug, Serialize, Deserialize, ToSchema, ToResponse)] pub struct GetEntryResponse { - num_sources_aggregated: usize, - pair_id: String, - price: String, - timestamp: u64, - decimals: u32, + pub num_sources_aggregated: usize, + pub pair_id: String, + pub price: String, + pub timestamp: u64, + pub decimals: u32, + pub components: Option>, } +/// Get the latest price entry for a trading pair #[utoipa::path( get, path = "/node/v1/data/{base}/{quote}", + tag = "Price Data", responses( - (status = 200, description = "Get median entry successfuly", body = [GetEntryResponse]) + (status = 200, description = "Successfully retrieved price entry", body = GetEntryResponse, + example = json!({ + "num_sources_aggregated": 5, + "pair_id": "BTC/USD", + "price": "0x1234567890abcdef", + "timestamp": 1_647_820_800, + "decimals": 18, + "components": [ + { + "source": "BINANCE", + "price": "0x6cc61113f5871b1000", + "timestamp": 1_743_082_057 + }, + ] + }) + ), + (status = 400, description = "Invalid request parameters", body = EntryError), + (status = 404, description = "Price entry not found", body = EntryError), + (status = 500, description = "Internal server error", body = EntryError) ), params( - ("base" = String, Path, description = "Base Asset"), - ("quote" = String, Path, description = "Quote Asset"), - GetEntryParams, + ("base" = String, Path, description = "Base asset symbol (e.g. BTC)"), + ("quote" = String, Path, description = "Quote asset symbol (e.g. USD)"), + GetEntryParams ), + security( + ("api_key" = []) + ) )] #[tracing::instrument(skip(state))] pub async fn get_entry( @@ -100,21 +125,20 @@ pub async fn get_entry( PathExtractor(pair): PathExtractor<(String, String)>, Query(params): Query, ) -> Result, EntryError> { - let is_routing = params.routing.unwrap_or(false); + let is_routing = params.routing.unwrap_or(true); - let routing_params = RoutingParams::try_from(params)?; + let entry_params = EntryParams::try_from(params)?; let pair = Pair::from(pair); - let (entry, decimals) = - entry_repository::routing(&state.offchain_pool, is_routing, &pair, &routing_params) - .await - .map_err(|e| e.to_entry_error(&(pair.to_pair_id())))?; + let entry = routing(&state.offchain_pool, is_routing, &pair, &entry_params) + .await + .map_err(EntryError::from)?; - let last_updated_timestamp: NaiveDateTime = entry_repository::get_last_updated_timestamp( + let last_updated_timestamp: NaiveDateTime = get_last_updated_timestamp( &state.offchain_pool, pair.to_pair_id(), - routing_params.timestamp, + entry_params.timestamp, ) .await? .unwrap_or(entry.time); @@ -122,7 +146,6 @@ pub async fn get_entry( Ok(Json(adapt_entry_to_entry_response( pair.into(), &entry, - decimals, last_updated_timestamp, ))) } @@ -130,7 +153,6 @@ pub async fn get_entry( pub fn adapt_entry_to_entry_response( pair_id: String, entry: &MedianEntry, - decimals: u32, last_updated_timestamp: NaiveDateTime, ) -> GetEntryResponse { GetEntryResponse { @@ -138,6 +160,17 @@ pub fn adapt_entry_to_entry_response( timestamp: last_updated_timestamp.and_utc().timestamp_millis() as u64, num_sources_aggregated: entry.num_sources as usize, price: big_decimal_price_to_hex(&entry.median_price), - decimals, + decimals: EIGHTEEN_DECIMALS, + components: entry + .components + .as_ref() + .map(|prices| prices.iter().cloned().map(Into::into).collect()), } } + +#[derive(Serialize, Deserialize, Default, ToSchema, Clone, ToResponse, Debug)] +pub struct EntryComponent { + pub source: String, + pub price: String, + pub timestamp: u64, +} diff --git a/pragma-node/src/handlers/get_expiries.rs b/pragma-node/src/handlers/get_expiries.rs deleted file mode 100644 index 370da747..00000000 --- a/pragma-node/src/handlers/get_expiries.rs +++ /dev/null @@ -1,35 +0,0 @@ -use axum::extract::State; -use axum::Json; -use chrono::NaiveDateTime; - -use pragma_common::types::pair::Pair; -use pragma_entities::EntryError; - -use crate::infra::repositories::entry_repository; -use crate::utils::PathExtractor; -use crate::AppState; - -#[utoipa::path( - get, - path = "/node/v1/data/{base}/{quote}/future_expiries", - responses( - (status = 200, description = "Get available future expiries for a pair", body = [Vec]) - ), - params( - ("base" = String, Path, description = "Base Asset"), - ("quote" = String, Path, description = "Quote Asset"), - ), -)] -#[tracing::instrument(skip(state))] -pub async fn get_expiries( - State(state): State, - PathExtractor(pair): PathExtractor<(String, String)>, -) -> Result>, EntryError> { - let pair = Pair::from(pair); - - let req_result = entry_repository::get_expiries_list(&state.offchain_pool, pair.to_pair_id()) - .await - .map_err(|e| e.to_entry_error(&(pair.into())))?; - - Ok(Json(req_result)) -} diff --git a/pragma-node/src/handlers/get_ohlc.rs b/pragma-node/src/handlers/get_ohlc.rs index e6127606..bc71d175 100644 --- a/pragma-node/src/handlers/get_ohlc.rs +++ b/pragma-node/src/handlers/get_ohlc.rs @@ -1,36 +1,98 @@ -use axum::extract::{Query, State}; use axum::Json; -use pragma_common::timestamp::TimestampRangeError; -use pragma_common::types::pair::Pair; +use axum::extract::{Query, State}; +use pragma_common::Pair; +use pragma_entities::models::entries::timestamp::TimestampRangeError; use serde::{Deserialize, Serialize}; use utoipa::{ToResponse, ToSchema}; use crate::handlers::Interval; use crate::infra::repositories::entry_repository::{self, OHLCEntry}; +use crate::state::AppState; use crate::utils::PathExtractor; -use crate::AppState; -use pragma_entities::EntryError; +use pragma_entities::{EntryError, TimestampError}; use super::GetEntryParams; +/// Response containing OHLC (candlestick) data for a trading pair #[derive(Debug, Serialize, Deserialize, ToSchema, ToResponse)] +#[schema(example = json!({ + "pair_id": "BTC/USD", + "data": [ + { + "time": "2025-03-10T07:30:00", + "open": "82069269773700000000000", + "low": "82023393045000000000000", + "high": "82289627995410000000000", + "close": "82208749021850000000000" + } + ] +}))] pub struct GetOHLCResponse { - pair_id: String, - data: Vec, + /// Trading pair identifier (e.g., "BTC/USD") + pub pair_id: String, + + /// Array of OHLC entries ordered by timestamp + pub data: Vec, } #[utoipa::path( - get, - path = "/node/v1/aggregation/candlestick/{base}/{quote}", - responses( - (status = 200, description = "Get OHLC data successfuly", body = [GetOHLCResponse]) + get, + path = "/node/v1/aggregation/candlestick/{base}/{quote}", + tag = "Market Data", + responses( + (status = 200, + description = "Successfully retrieved OHLC data", + body = GetOHLCResponse, + example = json!({ + "pair_id": "BTC/USD", + "data": [ + { + "time": "2025-03-10T07:30:00", + "open": "82069269773700000000000", + "low": "82023393045000000000000", + "high": "82289627995410000000000", + "close": "82208749021850000000000" + } + ] + }) + ), + (status = 400, + description = "Invalid parameters", + body = EntryError, + example = json!({ + "happened_at": "2025-03-10T08:27:29.324879945Z", + "message": "Invalid timestamp: Timestamp range error: End timestamp is in the future", + "resource": "EntryModel" + }) ), - params( - ("base" = String, Path, description = "Base Asset"), - ("quote" = String, Path, description = "Quote Asset"), - GetEntryParams, + (status = 404, + description = "No data found", + body = EntryError, + example = json!({ + "happened_at": "2025-03-10T08:27:29.324879945Z", + "message": "Entry not found", + "resource": "EntryModel" + }) ), - )] + (status = 500, + description = "Internal server error", + body = EntryError, + example = json!({ + "happened_at": "2025-03-10T08:27:29.324879945Z", + "message": "Database error: connection failed", + "resource": "EntryModel" + }) + ) + ), + params( + ("base" = String, Path, description = "Base asset symbol"), + ("quote" = String, Path, description = "Quote asset symbol"), + GetEntryParams, + ), + security( + ("api_key" = []) + ) +)] #[tracing::instrument(skip(state))] pub async fn get_ohlc( State(state): State, @@ -49,15 +111,19 @@ pub async fn get_ohlc( // Validate given timestamp if timestamp > now { - return Err(EntryError::InvalidTimestamp( + return Err(EntryError::InvalidTimestamp(TimestampError::RangeError( TimestampRangeError::EndInFuture, - )); + ))); } - let entries = - entry_repository::get_ohlc(&state.offchain_pool, pair.to_pair_id(), interval, timestamp) - .await - .map_err(|db_error| db_error.to_entry_error(&pair.to_pair_id()))?; + let entries = entry_repository::get_spot_ohlc( + &state.offchain_pool, + pair.to_pair_id(), + interval, + timestamp, + ) + .await + .map_err(EntryError::from)?; Ok(Json(adapt_entry_to_entry_response(pair.into(), &entries))) } diff --git a/pragma-node/src/handlers/get_volatility.rs b/pragma-node/src/handlers/get_volatility.rs deleted file mode 100644 index f654dff9..00000000 --- a/pragma-node/src/handlers/get_volatility.rs +++ /dev/null @@ -1,90 +0,0 @@ -use axum::extract::{Query, State}; -use axum::Json; -use pragma_common::types::pair::Pair; -use serde::{Deserialize, Serialize}; -use utoipa::{IntoParams, ToResponse, ToSchema}; - -use crate::infra::repositories::entry_repository::{self, MedianEntry}; -use crate::utils::PathExtractor; -use crate::AppState; -use pragma_entities::{EntryError, VolatilityError}; - -use crate::utils::compute_volatility; - -/// Volatility query -#[derive(Deserialize, IntoParams, Debug)] -pub struct VolatilityQuery { - /// Initial timestamp, combined with `end`, it helps define the period over which the mean is computed - start: u64, - /// Final timestamp - end: u64, -} - -#[derive(Debug, Serialize, Deserialize, ToResponse, ToSchema)] -pub struct GetVolatilityResponse { - pair_id: String, - volatility: f64, - decimals: u32, -} - -#[utoipa::path( - get, - path = "/node/v1/volatility/{quote}/{base}", - responses( - (status = 200, description = "Get realized volatility successfuly", body = [GetVolatilityResponse]) - ), - params( - ("quote" = String, Path, description = "Quote Asset"), - ("base" = String, Path, description = "Base Asset"), - VolatilityQuery - ), - )] -#[tracing::instrument(skip(state))] -pub async fn get_volatility( - State(state): State, - PathExtractor(pair): PathExtractor<(String, String)>, - Query(volatility_query): Query, -) -> Result, EntryError> { - let pair = Pair::from(pair); - - if volatility_query.start > volatility_query.end { - return Err(EntryError::VolatilityError( - VolatilityError::InvalidTimestampsRange(volatility_query.start, volatility_query.end), - )); - } - - // Fetch entries between start and end timestamps - let entries = entry_repository::get_median_entries_1_min_between( - &state.offchain_pool, - pair.to_pair_id(), - volatility_query.start, - volatility_query.end, - ) - .await?; - - if entries.is_empty() { - return Err(EntryError::UnknownPairId(pair.to_pair_id())); - } - - let decimals = entry_repository::get_decimals(&state.offchain_pool, &pair).await?; - - Ok(Json(adapt_entry_to_entry_response( - pair.into(), - &entries, - decimals, - ))) -} - -fn adapt_entry_to_entry_response( - pair_id: String, - entries: &[MedianEntry], - decimals: u32, -) -> GetVolatilityResponse { - let volatility = compute_volatility(entries); - - GetVolatilityResponse { - pair_id, - volatility, - decimals, - } -} diff --git a/pragma-node/src/handlers/merkle_feeds/get_merkle_proof.rs b/pragma-node/src/handlers/merkle_feeds/get_merkle_proof.rs deleted file mode 100644 index 6d834d9f..00000000 --- a/pragma-node/src/handlers/merkle_feeds/get_merkle_proof.rs +++ /dev/null @@ -1,70 +0,0 @@ -// https://docs.rs/redis/0.26.1/redis/#async - -use axum::extract::{Query, State}; -use axum::Json; -use pragma_common::types::block_id::{BlockId, BlockTag}; -use pragma_common::types::merkle_tree::MerkleProof; -use pragma_common::types::Network; -use pragma_entities::models::merkle_feed_error::MerkleFeedError; -use serde::{Deserialize, Serialize}; -use starknet::core::types::Felt; -use utoipa::{IntoParams, ToResponse, ToSchema}; - -use crate::infra::redis; -use crate::utils::PathExtractor; -use crate::AppState; -use pragma_common::types::hex_hash::HexHash; - -#[derive(Default, Deserialize, IntoParams, ToSchema, Debug)] -pub struct GetMerkleProofQuery { - pub network: Option, - pub block_id: Option, -} - -#[derive(Debug, Serialize, Deserialize, ToResponse, ToSchema)] -pub struct GetMerkleProofResponse(pub MerkleProof); - -#[utoipa::path( - get, - path = "/node/v1/merkle_feeds/proof/{option_hash}", - responses( - (status = 200, description = "Get the merkle proof", body = [GetMerkleProofResponse]) - ), - params( - ("option_hash" = String, Path, description = "Hexadecimal hash of the option"), - GetMerkleProofQuery - ), -)] -#[tracing::instrument(skip(state))] -pub async fn get_merkle_feeds_proof( - State(state): State, - PathExtractor(option_hex_hash): PathExtractor, - Query(params): Query, -) -> Result, MerkleFeedError> { - if state.redis_client.is_none() { - return Err(MerkleFeedError::RedisConnection); - } - - let option_hex_hash = option_hex_hash.0; - let network = params.network.unwrap_or_default(); - let block_id = params.block_id.unwrap_or(BlockId::Tag(BlockTag::Latest)); - - let merkle_tree = redis::get_merkle_tree( - state.redis_client.unwrap(), - network, - block_id, - state.caches.merkle_feeds_tree().clone(), - ) - .await - .map_err(MerkleFeedError::from)?; - - let option_felt_hash = Felt::from_hex(&option_hex_hash) - .map_err(|_| MerkleFeedError::InvalidOptionHash(option_hex_hash.clone()))?; - - let merkle_proof = merkle_tree - .get_proof(&option_felt_hash) - .ok_or(MerkleFeedError::MerkleProof(option_hex_hash))?; - - let hexadecimals_proof = MerkleProof::from(merkle_proof); - Ok(Json(GetMerkleProofResponse(hexadecimals_proof))) -} diff --git a/pragma-node/src/handlers/merkle_feeds/get_option.rs b/pragma-node/src/handlers/merkle_feeds/get_option.rs deleted file mode 100644 index 0b3dafa5..00000000 --- a/pragma-node/src/handlers/merkle_feeds/get_option.rs +++ /dev/null @@ -1,67 +0,0 @@ -// https://docs.rs/redis/0.26.1/redis/#async - -use axum::extract::{Query, State}; -use axum::Json; -use pragma_common::types::block_id::{BlockId, BlockTag}; -use pragma_common::types::options::OptionData; -use pragma_common::types::Network; -use pragma_entities::models::merkle_feed_error::MerkleFeedError; -use serde::{Deserialize, Serialize}; -use utoipa::{IntoParams, ToResponse, ToSchema}; - -use crate::infra::redis; -use crate::utils::PathExtractor; -use crate::AppState; - -#[derive(Default, Deserialize, IntoParams, ToSchema, Debug)] -pub struct GetOptionQuery { - pub network: Option, - #[serde(rename = "block_id")] - pub block_id: Option, -} - -#[derive(Debug, Serialize, Deserialize, ToResponse, ToSchema)] -pub struct GetOptionResponse { - #[serde(flatten)] - pub option_data: OptionData, - pub hash: String, -} - -#[utoipa::path( - get, - path = "/node/v1/merkle_feeds/options/{instrument}", - responses( - (status = 200, description = "Get the option", body = [GetOptionResponse]) - ), - params( - ("instrument" = String, Path, description = "Name of the instrument"), - GetOptionQuery - ), -)] -#[tracing::instrument(skip(state))] -pub async fn get_merkle_feeds_option( - State(state): State, - PathExtractor(instrument): PathExtractor, - Query(params): Query, -) -> Result, MerkleFeedError> { - if state.redis_client.is_none() { - return Err(MerkleFeedError::RedisConnection); - } - - let network = params.network.unwrap_or_default(); - let block_id = params.block_id.unwrap_or(BlockId::Tag(BlockTag::Latest)); - - let option_data = - redis::get_option_data(state.redis_client.unwrap(), network, block_id, instrument) - .await - .map_err(MerkleFeedError::from)?; - - let option_data_hash = option_data - .pedersen_hash_as_hex_string() - .map_err(|_| MerkleFeedError::InvalidOptionHash(format!("{option_data:?}")))?; - - Ok(Json(GetOptionResponse { - hash: option_data_hash, - option_data, - })) -} diff --git a/pragma-node/src/handlers/merkle_feeds/mod.rs b/pragma-node/src/handlers/merkle_feeds/mod.rs deleted file mode 100644 index a177e1fb..00000000 --- a/pragma-node/src/handlers/merkle_feeds/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod get_merkle_proof; -pub mod get_option; diff --git a/pragma-node/src/handlers/mod.rs b/pragma-node/src/handlers/mod.rs index a1a4da60..3a593409 100644 --- a/pragma-node/src/handlers/mod.rs +++ b/pragma-node/src/handlers/mod.rs @@ -1,33 +1,17 @@ -pub mod create_entry; -pub mod create_future_entry; pub mod get_entry; -pub mod get_expiries; pub mod get_ohlc; -pub mod get_volatility; -pub mod merkle_feeds; pub mod onchain; -pub mod optimistic_oracle; -pub mod publish_entry_ws; -pub mod stream_entry; -pub mod subscribe_to_entry; -pub mod subscribe_to_price; +pub mod stream; +pub mod websocket; -pub use create_entry::create_entries; -pub use create_future_entry::create_future_entries; pub use get_entry::get_entry; -pub use get_expiries::get_expiries; pub use get_ohlc::get_ohlc; -pub use get_volatility::get_volatility; -pub use publish_entry_ws::publish_entry; -pub use subscribe_to_entry::subscribe_to_entry; -pub use subscribe_to_price::subscribe_to_price; +use pragma_entities::UnixTimestamp; use serde::Deserialize; use utoipa::{IntoParams, ToSchema}; -use pragma_common::types::{AggregationMode, DataType, Interval}; - -use pragma_common::types::timestamp::UnixTimestamp; +use pragma_common::{AggregationMode, InstrumentType, Interval}; #[derive(Default, Debug, Deserialize, ToSchema, Clone, Copy)] pub enum EntryType { @@ -40,27 +24,99 @@ pub enum EntryType { Future, } -impl From for DataType { +impl From for InstrumentType { fn from(value: EntryType) -> Self { match value { - EntryType::Spot => Self::SpotEntry, - EntryType::Future => Self::FutureEntry, - EntryType::Perp => Self::PerpEntry, + EntryType::Spot => Self::Spot, + EntryType::Future | EntryType::Perp => Self::Perp, } } } +/// Parameters for retrieving price entries #[derive(Debug, Deserialize, IntoParams, ToSchema)] pub struct GetEntryParams { - /// The unix timestamp in seconds. This endpoint will return the first update whose - /// timestamp is <= the provided value. - #[schema(value_type = i64)] + /// The unix timestamp in seconds to retrieve historical price data. + /// This endpoint will return the first update whose timestamp is <= the provided value. + /// + /// If not provided, returns the latest available price. + /// + /// # Examples + /// - `1_647_820_800`: Returns price data from March 21, 2022 00:00:00 UTC + /// - `null`: Returns the most recent price update + /// + /// NOTE: This only works for `median` aggregation + #[schema(value_type = i64, example = 1_647_820_800)] pub timestamp: Option, + + /// Time interval for aggregated price data. Different intervals affect how price data is + /// aggregated and can be used to get OHLC (Open/High/Low/Close) data at various timeframes. + /// + /// # Available intervals + /// - `100ms`: 100 milliseconds - High frequency trading + /// - `1s`: 1 second - Real-time trading + /// - `5s`: 5 seconds - Short-term price movements + /// - `1min`: 1 minute - Intraday trading + /// - `15min`: 15 minutes - Medium-term analysis + /// - `1h`: 1 hour - Daily trading patterns + /// - `2h`: 2 hours (default) - Extended market analysis + /// - `1d`: 1 day - Long-term trends + /// - `1w`: 1 week - Strategic market overview + #[schema(example = "1min")] pub interval: Option, + + /// Enable price routing through intermediate pairs. + /// When true, if a direct price for the requested pair is not available, + /// the system will attempt to calculate it using intermediate pairs. + /// + /// # Example + /// For BTC/EUR when routing is enabled: + /// - If direct BTC/EUR price is unavailable + /// - System might route through BTC/USD and EUR/USD + /// + /// Default: true + #[schema(example = true)] + #[serde(default)] + #[serde(deserialize_with = "deserialize_bool_from_string")] pub routing: Option, + + /// Method used to aggregate prices from multiple sources. + /// + /// # Available modes + /// - `median`: Middle value (default, more manipulation resistant) + /// - `mean`: Average of all values + /// - `twap`: Time-Weighted Average Price + #[schema(example = "median")] pub aggregation: Option, + + /// Type of market entry to retrieve + /// + /// # Available types + /// - `spot`: Spot market prices (default) + /// - `perp`: Perpetual futures prices + /// - `future`: Fixed-expiry futures prices + #[schema(example = "spot")] pub entry_type: Option, + + /// Expiry date for future contracts in ISO 8601 format. + /// Only applicable when `entry_type` is "future". + /// + /// # Example + /// - `"2024-12-31"`: December 31, 2024 expiry + /// - `null`: Not applicable for spot/perp markets + #[schema(example = "2024-12-31")] pub expiry: Option, + + /// Include source components in the response. + /// When true, the response will include price data from individual sources. + /// + /// # Example + /// - `true`: Include source breakdown in response + /// - `false`: Return aggregated data only (default) + #[schema(example = false)] + #[serde(default)] + #[serde(deserialize_with = "deserialize_bool_from_string")] + pub with_components: Option, } impl Default for GetEntryParams { @@ -72,6 +128,24 @@ impl Default for GetEntryParams { aggregation: Some(AggregationMode::default()), entry_type: Some(EntryType::default()), expiry: None, + with_components: Some(false), } } } + +fn deserialize_bool_from_string<'de, D>(deserializer: D) -> Result, D::Error> +where + D: serde::de::Deserializer<'de>, +{ + // First, try to deserialize as an Option + let opt_str = Option::::deserialize(deserializer)?; + + match opt_str.as_deref() { + Some("true") => Ok(Some(true)), + Some("false") => Ok(Some(false)), + Some(s) => Err(serde::de::Error::custom(format!( + "Invalid boolean value: {s}" + ))), + None => Ok(None), + } +} diff --git a/pragma-node/src/handlers/onchain/get_checkpoints.rs b/pragma-node/src/handlers/onchain/get_checkpoints.rs index b69e8f4a..6eb2823e 100644 --- a/pragma-node/src/handlers/onchain/get_checkpoints.rs +++ b/pragma-node/src/handlers/onchain/get_checkpoints.rs @@ -1,30 +1,30 @@ -use axum::extract::{Query, State}; use axum::Json; +use axum::extract::{Query, State}; -use pragma_common::types::pair::Pair; -use pragma_common::types::Network; +use pragma_common::Pair; +use pragma_common::starknet::StarknetNetwork; use pragma_entities::CheckpointError; use serde::{Deserialize, Serialize}; use utoipa::{IntoParams, ToResponse, ToSchema}; -use crate::infra::repositories::entry_repository::get_decimals; use crate::infra::repositories::onchain_repository::checkpoint::get_checkpoints; +use crate::infra::repositories::onchain_repository::get_onchain_decimals; +use crate::state::AppState; use crate::utils::PathExtractor; -use crate::AppState; pub const DEFAULT_LIMIT: u64 = 100; pub const MAX_LIMIT: u64 = 1000; #[derive(Debug, Deserialize, IntoParams, ToSchema)] pub struct GetOnchainCheckpointsParams { - pub network: Network, + pub network: StarknetNetwork, pub limit: Option, } impl Default for GetOnchainCheckpointsParams { fn default() -> Self { Self { - network: Network::default(), + network: StarknetNetwork::default(), limit: Some(DEFAULT_LIMIT), } } @@ -66,14 +66,19 @@ pub async fn get_onchain_checkpoints( return Err(CheckpointError::InvalidLimit(limit)); } - let decimals = get_decimals(&state.offchain_pool, &pair) - .await - .map_err(CheckpointError::from)?; + let decimals = get_onchain_decimals( + state.caches.onchain_decimals(), + &state.rpc_clients, + params.network, + &pair, + ) + .await + .map_err(CheckpointError::from)?; let checkpoints = get_checkpoints( &state.onchain_pool, params.network, - pair.into(), + pair.clone().into(), decimals, limit, ) @@ -81,7 +86,7 @@ pub async fn get_onchain_checkpoints( .map_err(CheckpointError::from)?; if checkpoints.is_empty() { - return Err(CheckpointError::NotFound); + return Err(CheckpointError::CheckpointNotFound(pair.to_pair_id())); } Ok(Json(GetOnchainCheckpointsResponse(checkpoints))) } diff --git a/pragma-node/src/handlers/onchain/get_entry.rs b/pragma-node/src/handlers/onchain/get_entry.rs index 6eaac038..52ecdd72 100644 --- a/pragma-node/src/handlers/onchain/get_entry.rs +++ b/pragma-node/src/handlers/onchain/get_entry.rs @@ -1,23 +1,22 @@ use std::collections::HashMap; -use axum::extract::{Query, State}; use axum::Json; +use axum::extract::{Query, State}; use bigdecimal::BigDecimal; -use pragma_common::types::pair::Pair; -use pragma_common::types::{AggregationMode, Interval, Network}; +use pragma_common::{AggregationMode, Interval, Pair, starknet::StarknetNetwork}; use pragma_entities::EntryError; use serde::{Deserialize, Serialize}; use utoipa::{IntoParams, ToResponse, ToSchema}; use crate::infra::repositories::onchain_repository::entry::{ - get_last_updated_timestamp, get_variations, routing, OnchainRoutingArguments, + OnchainEntryArguments, get_last_updated_timestamp, get_variations, routing, }; -use crate::utils::{big_decimal_price_to_hex, PathExtractor}; -use crate::AppState; +use crate::state::AppState; +use crate::utils::{PathExtractor, big_decimal_price_to_hex}; #[derive(Debug, Default, Deserialize, IntoParams, ToSchema)] pub struct GetOnchainEntryParams { - pub network: Network, + pub network: StarknetNetwork, pub aggregation: Option, pub routing: Option, pub timestamp: Option, @@ -72,7 +71,7 @@ pub async fn get_onchain_entry( let now = chrono::Utc::now().timestamp(); let timestamp = params.timestamp.map_or(now, |timestamp| timestamp); - let routing_arguments = OnchainRoutingArguments { + let routing_arguments = OnchainEntryArguments { pair_id: pair.to_pair_id(), network: params.network, timestamp: (timestamp as u64), @@ -80,24 +79,29 @@ pub async fn get_onchain_entry( is_routing: params.routing.unwrap_or(false), }; - let raw_data = routing(&state.onchain_pool, &state.offchain_pool, routing_arguments) - .await - .map_err(|db_error| db_error.to_entry_error(&pair.to_pair_id()))?; + let raw_data = routing( + &state.onchain_pool, + routing_arguments, + &state.rpc_clients, + state.caches.onchain_decimals(), + ) + .await + .map_err(EntryError::from)?; let entry = raw_data .first() - .ok_or_else(|| EntryError::NotFound(pair.to_pair_id()))?; + .ok_or_else(|| EntryError::EntryNotFound(pair.to_pair_id()))?; let last_updated_timestamp = get_last_updated_timestamp(&state.onchain_pool, params.network, entry.pair_used.clone()) .await - .map_err(|db_error| db_error.to_entry_error(&pair.to_pair_id()))?; + .map_err(EntryError::from)?; let variations = if with_variations { Some( get_variations(&state.onchain_pool, params.network, pair.to_pair_id()) .await - .map_err(|db_error| db_error.to_entry_error(&pair.to_pair_id()))?, + .map_err(EntryError::from)?, ) } else { None diff --git a/pragma-node/src/handlers/onchain/get_history.rs b/pragma-node/src/handlers/onchain/get_history.rs index 7b79cdbc..5624e414 100644 --- a/pragma-node/src/handlers/onchain/get_history.rs +++ b/pragma-node/src/handlers/onchain/get_history.rs @@ -1,21 +1,21 @@ -use axum::extract::{Query, State}; use axum::Json; -use pragma_common::types::pair::Pair; -use pragma_common::types::{Interval, Network}; -use pragma_entities::{EntryError, InfraError}; +use axum::extract::{Query, State}; +use pragma_common::starknet::StarknetNetwork; +use pragma_common::{Interval, Pair}; +use pragma_entities::models::entries::timestamp::TimestampRange; +use pragma_entities::{EntryError, InfraError, TimestampError}; use serde::{Deserialize, Serialize}; use utoipa::{IntoParams, ToResponse, ToSchema}; use crate::infra::repositories::onchain_repository::history::{ - get_historical_entries_and_decimals, retry_with_routing, HistoricalEntryRaw, + HistoricalEntryRaw, get_historical_entries_and_decimals, retry_with_routing, }; -use crate::utils::{big_decimal_price_to_hex, PathExtractor}; -use crate::AppState; -use pragma_common::types::timestamp::TimestampRange; +use crate::state::AppState; +use crate::utils::{PathExtractor, big_decimal_price_to_hex}; #[derive(Debug, Deserialize, IntoParams, ToSchema)] pub struct GetOnchainHistoryParams { - pub network: Network, + pub network: StarknetNetwork, pub timestamp: TimestampRange, pub chunk_interval: Option, pub routing: Option, @@ -57,18 +57,20 @@ pub async fn get_onchain_history( let timestamp_range = params .timestamp .assert_time_is_valid() - .map_err(EntryError::InvalidTimestamp)?; + .map_err(|e| EntryError::InvalidTimestamp(TimestampError::RangeError(e)))?; + let chunk_interval = params.chunk_interval.unwrap_or_default(); let with_routing = params.routing.unwrap_or(false); // We first try to get the historical entries for the selected pair let query_result = get_historical_entries_and_decimals( &state.onchain_pool, - &state.offchain_pool, network, &pair, ×tamp_range, chunk_interval, + state.caches.onchain_decimals(), + &state.rpc_clients, ) .await; @@ -80,20 +82,21 @@ pub async fn get_onchain_history( Err(_) if with_routing => { retry_with_routing( &state.onchain_pool, - &state.offchain_pool, network, &pair, ×tamp_range, chunk_interval, + state.caches.onchain_decimals(), + &state.rpc_clients, ) .await? } Err(e) => { // We early returns an empty array if no history is found - if matches!(e, InfraError::NotFound) { + if matches!(e, InfraError::RoutingError(_)) { return Ok(Json(GetOnchainHistoryResponse(vec![]))); } - return Err(e.to_entry_error(&pair.to_pair_id())); + return Err(e.into()); } }; diff --git a/pragma-node/src/handlers/onchain/get_publishers.rs b/pragma-node/src/handlers/onchain/get_publishers.rs index bd26410d..7cfd62ff 100644 --- a/pragma-node/src/handlers/onchain/get_publishers.rs +++ b/pragma-node/src/handlers/onchain/get_publishers.rs @@ -1,21 +1,20 @@ -use axum::extract::{Query, State}; use axum::Json; +use axum::extract::{Query, State}; -use pragma_common::types::{DataType, Network}; +use pragma_common::{InstrumentType, starknet::StarknetNetwork}; use pragma_entities::EntryError; use serde::{Deserialize, Serialize}; use utoipa::{IntoParams, ToResponse, ToSchema}; -use crate::infra::repositories::entry_repository::get_all_currencies_decimals; use crate::infra::repositories::onchain_repository::publisher::{ get_publishers, get_publishers_with_components, }; -use crate::AppState; +use crate::state::AppState; #[derive(Debug, Default, Deserialize, IntoParams, ToSchema)] pub struct GetOnchainPublishersParams { - pub network: Network, - pub data_type: DataType, + pub network: StarknetNetwork, + pub data_type: InstrumentType, } #[derive(Debug, Serialize, Deserialize, ToSchema)] @@ -32,7 +31,7 @@ pub struct PublisherEntry { pub struct Publisher { pub publisher: String, pub website_url: String, - pub last_updated_timestamp: u64, + pub last_updated_timestamp: Option, pub r#type: u32, pub nb_feeds: u32, pub daily_updates: u32, @@ -62,17 +61,14 @@ pub async fn get_onchain_publishers( .await .map_err(EntryError::from)?; - let currencies_decimals = get_all_currencies_decimals(&state.offchain_pool) - .await - .map_err(EntryError::from)?; - let publishers_with_components = get_publishers_with_components( &state.onchain_pool, params.network, params.data_type, - currencies_decimals, publishers, - state.caches.onchain_publishers_updates().clone(), + state.caches.onchain_publishers_updates(), + state.caches.onchain_decimals(), + &state.rpc_clients, ) .await .map_err(EntryError::from)?; diff --git a/pragma-node/src/handlers/onchain/subscribe_to_ohlc.rs b/pragma-node/src/handlers/onchain/subscribe_to_ohlc.rs index 983f7a15..c3438908 100644 --- a/pragma-node/src/handlers/onchain/subscribe_to_ohlc.rs +++ b/pragma-node/src/handlers/onchain/subscribe_to_ohlc.rs @@ -1,23 +1,20 @@ use std::net::SocketAddr; -use std::num::NonZeroU32; use std::sync::Arc; +use axum::extract::ws::{WebSocket, WebSocketUpgrade}; use axum::extract::{ConnectInfo, State}; use axum::response::IntoResponse; -use futures_util::SinkExt; use pragma_entities::InfraError; use serde::{Deserialize, Serialize}; - -use pragma_common::types::{Interval, Network}; use utoipa::{ToResponse, ToSchema}; +use pragma_common::{Interval, starknet::StarknetNetwork}; + use crate::infra::repositories::entry_repository::OHLCEntry; use crate::infra::repositories::onchain_repository; -use crate::utils::is_onchain_existing_pair; -use crate::utils::{ChannelHandler, Subscriber, SubscriptionType}; -use crate::{metrics, AppState}; - -use axum::extract::ws::{WebSocket, WebSocketUpgrade}; +use crate::state::AppState; +use crate::utils::ChannelHandler; +use crate::utils::{Subscriber, SubscriptionType}; #[derive(Debug, Default, Serialize, Deserialize, ToSchema, ToResponse)] pub struct GetOnchainOHLCResponse { @@ -34,9 +31,6 @@ pub async fn subscribe_to_onchain_ohlc( ws.on_upgrade(move |socket| create_new_subscriber(socket, state, client_addr)) } -/// Interval in milliseconds that the channel will update the client with the latest prices. -const CHANNEL_UPDATE_INTERVAL_IN_MS: u64 = 30000; // 30 seconds - #[tracing::instrument( skip(socket, app_state), fields( @@ -45,27 +39,25 @@ const CHANNEL_UPDATE_INTERVAL_IN_MS: u64 = 30000; // 30 seconds ) )] async fn create_new_subscriber(socket: WebSocket, app_state: AppState, client_addr: SocketAddr) { - let (mut subscriber, _) = match Subscriber::::new( - "subscribe_to_ohlc".into(), + const CHANNEL_UPDATE_INTERVAL_IN_MS: u64 = 30000; // 30 seconds + let mut subscriber = match Subscriber::::new( + "subscribe_to_onchain_ohlc".into(), socket, client_addr.ip(), Arc::new(app_state), None, CHANNEL_UPDATE_INTERVAL_IN_MS, - ) - .await - { + None, + ) { Ok(subscriber) => subscriber, Err(e) => { - tracing::error!("Failed to register subscriber: {}", e); + tracing::error!("Failed to register subscriber: {:?}", e); return; } }; - // Main event loop for the subscriber let handler = WsOHLCHandler; - let status = subscriber.listen(handler).await; - if let Err(e) = status { + if let Err(e) = subscriber.listen(handler).await { tracing::error!( "[{}] Error occurred while listening to the subscriber: {:?}", subscriber.id, @@ -94,18 +86,19 @@ impl ChannelHandler for WsOH ) -> Result<(), InfraError> { match subscription.msg_type { SubscriptionType::Subscribe => { - let pair_exists = is_onchain_existing_pair( + let pair_exists = crate::utils::is_onchain_existing_pair( &subscriber.app_state.onchain_pool, &subscription.pair, subscription.network, ) .await; if !pair_exists { - let error_msg = "Pair does not exist in the onchain database."; - subscriber.send_err(error_msg).await; + subscriber + .send_err("Pair does not exist in the onchain database.") + .await; return Ok(()); } - let mut state = subscriber.state.lock().await; + let mut state = subscriber.state.write().await; *state = SubscriptionState { subscribed_pair: Some(subscription.pair.clone()), network: subscription.network, @@ -115,12 +108,11 @@ impl ChannelHandler for WsOH }; } SubscriptionType::Unsubscribe => { - let mut state = subscriber.state.lock().await; + let mut state = subscriber.state.write().await; *state = SubscriptionState::default(); } - }; + } self.send_ack_message(subscriber, subscription).await?; - // Trigger the first update manually self.periodic_interval(subscriber).await?; Ok(()) } @@ -136,7 +128,7 @@ impl ChannelHandler for WsOH &mut self, subscriber: &mut Subscriber, ) -> Result<(), InfraError> { - let mut state = subscriber.state.lock().await; + let mut state = subscriber.state.write().await; if state.subscribed_pair.is_none() { return Ok(()); } @@ -164,18 +156,8 @@ impl ChannelHandler for WsOH return Err(e); } - match serde_json::to_string(&ohlc_data_res.unwrap()) { - Ok(json_response) => { - self.check_rate_limit(subscriber, &json_response).await?; - - if subscriber.send_msg(json_response).await.is_err() { - subscriber.send_err("Could not send prices.").await; - return Err(InfraError::InternalServerError); - } - } - Err(_) => { - subscriber.send_err("Could not serialize prices.").await; - } + if subscriber.send_msg(ohlc_data_res.unwrap()).await.is_err() { + return Err(InfraError::InternalServerError); } Ok(()) @@ -188,57 +170,18 @@ impl WsOHLCHandler { subscriber: &mut Subscriber, subscription: SubscriptionRequest, ) -> Result<(), InfraError> { - if let Ok(ack_message) = serde_json::to_string(&SubscriptionAck { + let ack_message = SubscriptionAck { msg_type: subscription.msg_type, pair: subscription.pair, network: subscription.network, interval: subscription.interval, - }) { - if subscriber.send_msg(ack_message).await.is_err() { - let error_msg = "Message received but could not send ack message."; - subscriber.send_err(error_msg).await; - } - } else { - let error_msg = "Could not serialize ack message."; - subscriber.send_err(error_msg).await; - } - Ok(()) - } - - #[tracing::instrument( - skip(self, subscriber, message), - fields( - subscriber_id = %subscriber.id, - ip = %subscriber.ip_address, - msg_len = message.len() - ) - )] - - async fn check_rate_limit( - &self, - subscriber: &mut Subscriber, - message: &str, - ) -> Result<(), InfraError> { - let ip_addr = subscriber.ip_address; - // Close the connection if rate limit is exceeded. - if subscriber.rate_limiter.check_key_n( - &ip_addr, - NonZeroU32::new(message.len().try_into()?).ok_or(InfraError::InternalServerError)?, - ) != Ok(Ok(())) - { - tracing::warn!( - subscriber_id = %subscriber.id, - ip = %ip_addr, - "Rate limit exceeded. Closing connection.", - ); - - subscriber.record_metric(metrics::Interaction::RateLimit, metrics::Status::Error); + }; - subscriber.send_err("Rate limit exceeded.").await; - subscriber.sender.close().await?; - subscriber.closed = true; + if subscriber.send_msg(ack_message).await.is_err() { + subscriber + .send_err("Message received but could not send ack message.") + .await; } - Ok(()) } } @@ -246,7 +189,7 @@ impl WsOHLCHandler { #[derive(Debug, Clone, Deserialize, Serialize, Default)] struct SubscriptionState { subscribed_pair: Option, - network: Network, + network: StarknetNetwork, interval: Interval, is_first_update: bool, candles_to_get: u64, @@ -256,7 +199,7 @@ struct SubscriptionState { struct SubscriptionRequest { msg_type: SubscriptionType, pair: String, - network: Network, + network: StarknetNetwork, interval: Interval, candles_to_get: Option, } @@ -265,6 +208,6 @@ struct SubscriptionRequest { struct SubscriptionAck { msg_type: SubscriptionType, pair: String, - network: Network, + network: StarknetNetwork, interval: Interval, } diff --git a/pragma-node/src/handlers/optimistic_oracle/get_assertion_details.rs b/pragma-node/src/handlers/optimistic_oracle/get_assertion_details.rs deleted file mode 100644 index 4e3988fd..00000000 --- a/pragma-node/src/handlers/optimistic_oracle/get_assertion_details.rs +++ /dev/null @@ -1,29 +0,0 @@ -use crate::infra::repositories::oo_repository::assertions; -use crate::AppState; -use axum::extract::{Path, State}; -use axum::Json; -use pragma_entities::models::optimistic_oracle_error::OptimisticOracleError; - -use crate::handlers::optimistic_oracle::types::AssertionDetails; - -#[utoipa::path( - get, - path = "node/v1/optimistic/assertions/{assertion_id}", - responses( - (status = 200, description = "Get assertion details successfully", body = AssertionDetails) - ), - params( - ("assertion_id" = String, Path, description = "Unique identifier of the assertion"), - ), -)] -#[tracing::instrument] -pub async fn get_assertion_details( - State(state): State, - Path(assertion_id): Path, -) -> Result, OptimisticOracleError> { - let assertion_details = assertions::get_assertion_details(&state.onchain_pool, &assertion_id) - .await - .map_err(OptimisticOracleError::from)?; - - Ok(Json(assertion_details)) -} diff --git a/pragma-node/src/handlers/optimistic_oracle/get_assertions.rs b/pragma-node/src/handlers/optimistic_oracle/get_assertions.rs deleted file mode 100644 index 1d9c232e..00000000 --- a/pragma-node/src/handlers/optimistic_oracle/get_assertions.rs +++ /dev/null @@ -1,47 +0,0 @@ -use crate::handlers::optimistic_oracle::types::{GetAssertionsParams, GetAssertionsResponse}; -use crate::infra::repositories::oo_repository::assertions; -use crate::AppState; -use axum::extract::{Query, State}; -use axum::Json; -use pragma_entities::models::optimistic_oracle_error::OptimisticOracleError; - -pub const DEFAULT_LIMIT: u32 = 100; - -#[utoipa::path( - get, - path = "node/v1/optimistic/assertions", - responses( - (status = 200, description = "Get assertions successfully", body = GetAssertionsResponse) - ), - params( - ("status" = Option, Query, description = "Filter by assertion status"), - ("page" = Option, Query, description = "Page number for pagination"), - ("limit" = Option, Query, description = "Number of items per page"), - ), -)] -#[tracing::instrument] -pub async fn get_assertions( - State(state): State, - Query(params): Query, -) -> Result, OptimisticOracleError> { - let page = params.page.unwrap_or(1); - let page_size = params.limit.unwrap_or(DEFAULT_LIMIT); - - let assertions = - assertions::get_assertions(&state.onchain_pool, params.status, page, page_size) - .await - .map_err(OptimisticOracleError::from)?; - - let total_count = assertions.len(); - let total_pages = (total_count as u32).div_ceil(page_size); - - let response = GetAssertionsResponse { - assertions, - #[allow(clippy::cast_possible_wrap)] - total_count: total_count as i64, - current_page: page, - total_pages, - }; - - Ok(Json(response)) -} diff --git a/pragma-node/src/handlers/optimistic_oracle/get_disputed_assertions.rs b/pragma-node/src/handlers/optimistic_oracle/get_disputed_assertions.rs deleted file mode 100644 index 1cb92995..00000000 --- a/pragma-node/src/handlers/optimistic_oracle/get_disputed_assertions.rs +++ /dev/null @@ -1,47 +0,0 @@ -use crate::handlers::optimistic_oracle::types::{ - GetDisputedAssertionsParams, GetDisputedAssertionsResponse, -}; -use crate::infra::repositories::oo_repository::assertions; -use crate::AppState; -use axum::extract::{Query, State}; -use axum::Json; -use pragma_entities::models::optimistic_oracle_error::OptimisticOracleError; - -pub const DEFAULT_LIMIT: u32 = 100; - -#[utoipa::path( - get, - path = "node/v1/optimistic/disputed-assertions", - responses( - (status = 200, description = "Get disputed assertions successfully", body = GetDisputedAssertionsResponse) - ), - params( - ("page" = Option, Query, description = "Page number for pagination"), - ("limit" = Option, Query, description = "Number of items per page"), - ), -)] -#[tracing::instrument] -pub async fn get_disputed_assertions( - State(state): State, - Query(params): Query, -) -> Result, OptimisticOracleError> { - let page = params.page.unwrap_or(1); - let page_size = params.limit.unwrap_or(DEFAULT_LIMIT); - - let disputed_assertions = - assertions::get_disputed_assertions(&state.onchain_pool, page, page_size) - .await - .map_err(OptimisticOracleError::from)?; - - let total_count = disputed_assertions.len(); - let total_pages = (total_count as u32).div_ceil(page_size); - - let response = GetDisputedAssertionsResponse { - disputed_assertions, - total_count, - current_page: page, - total_pages, - }; - - Ok(Json(response)) -} diff --git a/pragma-node/src/handlers/optimistic_oracle/get_resolved_assertions.rs b/pragma-node/src/handlers/optimistic_oracle/get_resolved_assertions.rs deleted file mode 100644 index 98d9c0c0..00000000 --- a/pragma-node/src/handlers/optimistic_oracle/get_resolved_assertions.rs +++ /dev/null @@ -1,48 +0,0 @@ -use axum::extract::{Query, State}; -use axum::Json; - -use crate::handlers::optimistic_oracle::types::{ - GetResolvedAssertionsParams, GetResolvedAssertionsResponse, -}; -use crate::infra::repositories::oo_repository::assertions; -use crate::AppState; -use pragma_entities::models::optimistic_oracle_error::OptimisticOracleError; - -pub const DEFAULT_LIMIT: u32 = 100; - -#[utoipa::path( - get, - path = "node/v1/optimistic/resolved-assertions", - responses( - (status = 200, description = "Get resolved assertions successfully", body = GetResolvedAssertionsResponse) - ), - params( - ("page" = Option, Query, description = "Page number for pagination"), - ("limit" = Option, Query, description = "Number of items per page"), - ), -)] -#[tracing::instrument] -pub async fn get_resolved_assertions( - State(state): State, - Query(params): Query, -) -> Result, OptimisticOracleError> { - let page = params.page.unwrap_or(1); - let page_size = params.limit.unwrap_or(DEFAULT_LIMIT); - - let resolved_assertions = - assertions::get_resolved_assertions(&state.onchain_pool, page, page_size) - .await - .map_err(OptimisticOracleError::from)?; - - let total_count = resolved_assertions.len(); // TO VERIFY - let total_pages = (total_count as u32).div_ceil(page_size); - - let response = GetResolvedAssertionsResponse { - resolved_assertions, - total_count, - current_page: page, - total_pages, - }; - - Ok(Json(response)) -} diff --git a/pragma-node/src/handlers/optimistic_oracle/mod.rs b/pragma-node/src/handlers/optimistic_oracle/mod.rs deleted file mode 100644 index 5d9414c1..00000000 --- a/pragma-node/src/handlers/optimistic_oracle/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub mod get_assertion_details; -pub mod get_assertions; -pub mod get_disputed_assertions; -pub mod get_resolved_assertions; -pub mod types; diff --git a/pragma-node/src/handlers/optimistic_oracle/types.rs b/pragma-node/src/handlers/optimistic_oracle/types.rs deleted file mode 100644 index 61777673..00000000 --- a/pragma-node/src/handlers/optimistic_oracle/types.rs +++ /dev/null @@ -1,147 +0,0 @@ -use bigdecimal::BigDecimal; -use chrono::NaiveDateTime; -use serde::{Deserialize, Serialize}; -use std::fmt; -use strum::Display; -use utoipa::ToSchema; - -#[derive(Debug, Serialize, Display, ToSchema)] -pub enum Status { - Active, - Disputed, - Settled, -} - -#[derive(Debug, Serialize, ToSchema)] -pub enum SettlementResolution { - True, - False, - Undefined, -} - -impl From> for SettlementResolution { - fn from(res: Option) -> Self { - match res { - Some(true) => Self::True, - Some(false) => Self::False, - None => Self::Undefined, - } - } -} -impl From for SettlementResolution { - fn from(res: bool) -> Self { - if res { - Self::True - } else { - Self::False - } - } -} - -#[derive(Debug, Deserialize, ToSchema)] -pub struct GetAssertionsParams { - pub status: Option, - pub page: Option, - pub limit: Option, -} - -#[derive(Debug, Serialize, ToSchema)] -pub struct Assertion { - pub assertion_id: String, - pub claim: String, - #[schema(value_type = String)] - pub bond: BigDecimal, - pub expiration_time: NaiveDateTime, - pub identifier: String, - pub status: Status, - pub timestamp: NaiveDateTime, - pub currency: String, -} - -impl fmt::Display for Assertion { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "Assertion {{ assertion_id: {}, claim: {}, bond: {}, expiration_time: {}, identifier: {}, status: {}, timestamp: {}, currency: {} }}", - self.assertion_id, - self.claim, - self.bond, - self.expiration_time, - self.identifier, - self.status, - self.timestamp, - self.currency - ) - } -} -#[derive(Debug, Serialize, ToSchema)] -pub struct ResolvedAssertion { - pub assertion: Assertion, - pub settled_address: String, - pub settlement_resolution: SettlementResolution, - pub settled_at: NaiveDateTime, - pub settle_caller: String, - pub disputed: bool, - pub settlement_tx: String, - pub dispute_id: String, -} - -#[derive(Debug, Serialize, ToSchema)] -pub struct DisputedAssertion { - pub assertion: Assertion, - pub disputer: String, - pub disputed_at: NaiveDateTime, - pub dispute_id: String, - pub disputed_tx: String, -} - -#[derive(Debug, Serialize, ToSchema)] -pub struct GetAssertionsResponse { - pub assertions: Vec, - pub total_count: i64, - pub current_page: u32, - pub total_pages: u32, -} - -#[derive(Debug, Serialize, ToSchema)] -pub struct AssertionDetails { - pub assertion: Assertion, - pub domain_id: String, - pub asserter: String, - pub disputer: String, - pub disputed: bool, - pub dispute_id: String, - pub callback_recipient: String, - pub caller: String, - pub settled: bool, - pub settle_caller: String, - pub settlement_resolution: SettlementResolution, -} - -#[derive(Debug, Deserialize, ToSchema)] -pub struct GetDisputedAssertionsParams { - pub page: Option, - pub limit: Option, -} - -#[derive(Debug, Serialize, ToSchema)] -pub struct GetDisputedAssertionsResponse { - pub disputed_assertions: Vec, - pub total_count: usize, - pub current_page: u32, - pub total_pages: u32, -} - -#[derive(Debug, Deserialize, ToSchema)] -pub struct GetResolvedAssertionsParams { - pub page: Option, - pub limit: Option, -} - -#[derive(Debug, Serialize, ToSchema)] -pub struct GetResolvedAssertionsResponse { - pub resolved_assertions: Vec, - pub total_count: usize, - pub current_page: u32, - pub total_pages: u32, -} diff --git a/pragma-node/src/handlers/publish_entry_ws.rs b/pragma-node/src/handlers/publish_entry_ws.rs deleted file mode 100644 index 7703aad0..00000000 --- a/pragma-node/src/handlers/publish_entry_ws.rs +++ /dev/null @@ -1,417 +0,0 @@ -use serde::{Deserialize, Serialize}; -use std::net::SocketAddr; -use std::sync::Arc; -use std::time::{Duration, SystemTime}; -use utoipa::ToSchema; - -use crate::handlers::create_entry::CreateEntryResponse; -use crate::utils::{convert_entry_to_db, publish_to_kafka, validate_publisher}; -use crate::utils::{ChannelHandler, Subscriber, WebSocketError}; -use crate::AppState; -use pragma_common::signing::assert_login_is_valid; -use pragma_common::types::auth::{build_login_message, LoginMessage}; -use pragma_common::types::entries::Entry; - -use pragma_entities::EntryError; -use starknet_crypto::{Felt, Signature}; - -use axum::extract::ws::{WebSocket, WebSocketUpgrade}; -use axum::extract::{ConnectInfo, State}; -use axum::http::StatusCode; -use axum::response::IntoResponse; - -// Session expiry time in minutes -const SESSION_EXPIRY_DURATION: Duration = Duration::from_secs(5 * 60); - -#[derive(Debug)] -pub struct PublisherSession { - login_time: SystemTime, - ip_address: std::net::IpAddr, -} - -impl PublisherSession { - fn new(ip_address: std::net::IpAddr) -> Self { - Self { - login_time: SystemTime::now(), - ip_address, - } - } - - /// Checks if the session has expired - /// In that case the publisher should login again - fn is_expired(&self) -> bool { - SystemTime::now() - .duration_since(self.login_time) - .map(|duration| duration > SESSION_EXPIRY_DURATION) - .unwrap_or(true) - } - - /// Checks if the IP address matches the one stored in the session - /// This is used to check if the publisher is sending entries from the same IP address he logged in from - fn validate_ip(&self, ip: &std::net::IpAddr) -> bool { - &self.ip_address == ip - } -} - -#[derive(Debug, Serialize, Deserialize, ToSchema)] -pub struct PublishEntryRequest { - pub entries: Vec, -} - -#[derive(Debug, Deserialize)] -#[serde(tag = "msg_type")] -enum ClientMessage { - #[serde(rename = "publish")] - Publish(PublishEntryRequest), - #[serde(rename = "login")] - Login(LoginMessage), -} - -#[derive(Debug, Default)] -pub struct PublishEntryState { - publisher_name: Option, - is_logged_in: bool, -} - -#[tracing::instrument(skip(state, ws), fields(endpoint_name = "publish_entry"))] -pub async fn publish_entry( - ws: WebSocketUpgrade, - State(state): State, - ConnectInfo(client_addr): ConnectInfo, -) -> impl IntoResponse { - if state.pragma_signer.is_none() { - return (StatusCode::LOCKED, "Locked: Pragma signer not found").into_response(); - } - - ws.on_upgrade(move |socket| create_new_subscriber(socket, state, client_addr)) -} - -/// Interval in milliseconds that the channel will update the client with the latest prices. -const CHANNEL_UPDATE_INTERVAL_IN_MS: u64 = 500; - -#[tracing::instrument( - skip(socket, app_state), - fields( - subscriber_id, - client_ip = %client_addr.ip() - ) -)] -async fn create_new_subscriber(socket: WebSocket, app_state: AppState, client_addr: SocketAddr) { - let (mut subscriber, _) = match Subscriber::::new( - "publish_entry".into(), - socket, - client_addr.ip(), - Arc::new(app_state), - Some(PublishEntryState { - publisher_name: None, - is_logged_in: false, - }), - CHANNEL_UPDATE_INTERVAL_IN_MS, - ) - .await - { - Ok(subscriber) => subscriber, - Err(e) => { - tracing::error!("Failed to register subscriber: {}", e); - return; - } - }; - - // Main event loop for the subscriber - let handler = PublishEntryHandler; - let status = subscriber.listen(handler).await; - - // Clean up session on disconnect - let publisher_name = &subscriber.state.lock().await.publisher_name; - if let Some(publisher_name) = publisher_name { - subscriber - .app_state - .publisher_sessions - .remove(publisher_name); - } - - if let Err(e) = status { - tracing::error!( - "[{}] Error occurred while listening to the subscriber: {:?}", - subscriber.id, - e - ); - } -} - -pub struct PublishEntryHandler; - -#[derive(Debug, Serialize)] -struct PublishResponse { - status: String, - message: String, - data: Option, -} - -#[derive(Debug, Serialize)] -struct LoginResponse { - status: String, - message: String, -} - -#[async_trait::async_trait] -impl ChannelHandler for PublishEntryHandler { - #[allow(clippy::too_many_lines)] - async fn handle_client_msg( - &mut self, - subscriber: &mut Subscriber, - client_message: ClientMessage, - ) -> Result<(), WebSocketError> { - let app_state = subscriber.app_state.clone(); - match client_message { - ClientMessage::Login(login_message) => { - // Check if this publisher already has an active session - if let Some(mut session) = app_state - .publisher_sessions - .get_mut(&login_message.publisher_name) - { - if session.is_expired() { - // Remove expired session - subscriber - .app_state - .publisher_sessions - .remove(&login_message.publisher_name); - } else { - // Reset the session login time - session.login_time = SystemTime::now(); - } - } - - let result = process_login(subscriber, login_message.clone()).await; - let has_login_failed = result.is_err(); - let response = match result { - Ok(()) => { - // Store the new session with IP address - subscriber.app_state.publisher_sessions.insert( - login_message.publisher_name.clone(), - PublisherSession::new(subscriber.ip_address), - ); - // Update subscriber state - { - let mut state = subscriber.state.lock().await; - *state = PublishEntryState { - publisher_name: Some(login_message.publisher_name), - is_logged_in: true, - }; - } - LoginResponse { - status: "success".to_string(), - message: "Login successful".to_string(), - } - } - Err(e) => LoginResponse { - status: "error".to_string(), - message: e.to_string(), - }, - }; - subscriber - .send_msg(serde_json::to_string(&response).unwrap()) - .await - .map_err(|_| WebSocketError::ChannelClose)?; - - // If login was unsuccessful we just close the channel - if has_login_failed { - return Err(WebSocketError::ChannelClose); - } - } - ClientMessage::Publish(new_entries) => { - // Check login state, session expiry and IP match - let should_send_error = { - let state = subscriber.state.lock().await; - - if !state.is_logged_in { - Some(PublishResponse { - status: "error".to_string(), - message: "Not logged in".to_string(), - data: None, - }) - } else if let Some(publisher_name) = &state.publisher_name { - if let Some(session) = - subscriber.app_state.publisher_sessions.get(publisher_name) - { - if session.is_expired() { - subscriber - .app_state - .publisher_sessions - .remove(publisher_name); - Some(PublishResponse { - status: "error".to_string(), - message: "Session expired, please login again".to_string(), - data: None, - }) - } else if !session.validate_ip(&subscriber.ip_address) { - Some(PublishResponse { - status: "error".to_string(), - message: "Invalid IP address for this publisher session" - .to_string(), - data: None, - }) - } else { - None - } - } else { - None - } - } else { - None - } - }; - - if let Some(error_response) = should_send_error { - subscriber - .send_msg(serde_json::to_string(&error_response).unwrap()) - .await - .map_err(|_| WebSocketError::ChannelClose)?; - if error_response.message.contains("expired") { - return Err(WebSocketError::ChannelClose); - } - return Ok(()); - } - - // Process entries without signature verification - let result = process_entries_without_verification(subscriber, new_entries).await; - let response = match result { - Ok(response) => PublishResponse { - status: "success".to_string(), - message: "Entries published successfully".to_string(), - data: Some(response), - }, - Err(e) => PublishResponse { - status: "error".to_string(), - message: e.to_string(), - data: None, - }, - }; - subscriber - .send_msg(serde_json::to_string(&response).unwrap()) - .await - .map_err(|_| WebSocketError::ChannelClose)?; - } - } - - Ok(()) - } - - async fn periodic_interval( - &mut self, - subscriber: &mut Subscriber, - ) -> Result<(), WebSocketError> { - // Check session expiry periodically - let should_close = { - let state = subscriber.state.lock().await; - if let Some(publisher_name) = &state.publisher_name { - if let Some(session) = subscriber.app_state.publisher_sessions.get(publisher_name) { - if session.is_expired() { - subscriber - .app_state - .publisher_sessions - .remove(publisher_name); - true - } else { - false - } - } else { - false - } - } else { - false - } - }; - - if should_close { - let response = PublishResponse { - status: "error".to_string(), - message: "Session expired, please login again".to_string(), - data: None, - }; - subscriber - .send_msg(serde_json::to_string(&response).unwrap()) - .await - .map_err(|_| WebSocketError::ChannelClose)?; - return Err(WebSocketError::ChannelClose); - } - Ok(()) - } -} - -#[tracing::instrument(skip(subscriber))] -async fn process_entries_without_verification( - subscriber: &Subscriber, - new_entries: PublishEntryRequest, -) -> Result { - tracing::info!("Received new entries via WebSocket: {:?}", new_entries); - - if new_entries.entries.is_empty() { - return Ok(CreateEntryResponse { - number_entries_created: 0, - }); - } - - let publisher_name = &subscriber.state.lock().await.publisher_name; - let publisher_name = publisher_name - .as_ref() - .ok_or_else(|| EntryError::NotFound("No publisher name in session state".to_string()))?; - - let new_entries_db = new_entries - .entries - .iter() - .map(|entry| { - convert_entry_to_db( - entry, - &Signature { - r: Felt::ZERO, - s: Felt::ZERO, - }, - ) - }) - .collect::, EntryError>>()?; - - let config = crate::config::config().await; - publish_to_kafka( - new_entries_db, - config.kafka_topic().to_string(), - publisher_name, - ) - .await?; - - Ok(CreateEntryResponse { - number_entries_created: new_entries.entries.len(), - }) -} - -#[tracing::instrument(skip(subscriber))] -async fn process_login( - subscriber: &Subscriber, - login_message: LoginMessage, -) -> Result<(), EntryError> { - let publisher_name = login_message.publisher_name; - let state = subscriber.app_state.clone(); - - // Check if the expiration timestamp is valid - let current_time = chrono::Utc::now().timestamp() as u64; - if login_message.expiration_timestamp <= current_time { - return Err(EntryError::InvalidLoginMessage( - "Login message has expired".to_string(), - )); - } - - let message = build_login_message(&publisher_name, login_message.expiration_timestamp); - - let signature = &Signature { - r: login_message.signature[0], - s: login_message.signature[1], - }; - - let publishers_cache = state.caches.publishers(); - let (public_key, account_address) = - validate_publisher(&state.offchain_pool, &publisher_name, publishers_cache).await?; - - assert_login_is_valid(message, signature, &account_address, &public_key) - .map_err(EntryError::SignerError)?; - Ok(()) -} diff --git a/pragma-node/src/handlers/stream/common.rs b/pragma-node/src/handlers/stream/common.rs new file mode 100644 index 00000000..fc76bb2a --- /dev/null +++ b/pragma-node/src/handlers/stream/common.rs @@ -0,0 +1,138 @@ +use std::pin::Pin; + +use axum::response::sse::Event; +use pragma_common::{AggregationMode, Pair}; +use pragma_entities::EntryError; + +use crate::{ + handlers::get_entry::{EntryParams, GetEntryResponse, adapt_entry_to_entry_response}, + infra::repositories::entry_repository, + state::AppState, +}; + +pub const DEFAULT_HISTORICAL_PRICES: usize = 50; + +pub type BoxedFuture = Pin + Send>>; +pub type BoxedStreamItem = Box BoxedFuture + Send>; + +pub async fn get_historical_entries( + state: &AppState, + pair: &Pair, + entry_params: &EntryParams, + count: usize, +) -> Result, EntryError> { + let interval = entry_params.interval; + // Get current timestamp + let end_timestamp = chrono::Utc::now().timestamp() as u64; + // Get timestamp from count minutes ago + let start_timestamp = end_timestamp.saturating_sub(count as u64 * interval.to_seconds() as u64); + + // Get entries based on aggregation mode + let entries = match entry_params.aggregation_mode { + AggregationMode::Median => entry_repository::get_median_prices_between( + &state.offchain_pool, + pair.to_pair_id(), + entry_params.clone(), + start_timestamp, + end_timestamp, + ) + .await + .map_err(EntryError::from)?, + AggregationMode::Twap => unreachable!(), + }; + + let responses: Vec = entries + .into_iter() + .take(count) + .map(|entry| adapt_entry_to_entry_response(pair.to_pair_id(), &entry, entry.time)) + .collect(); + + Ok(responses) +} + +pub async fn get_latest_entry( + state: &AppState, + pair: &Pair, + is_routing: bool, + entry_params: &EntryParams, +) -> Result { + // We have to update the timestamp to now every tick + let mut new_routing = entry_params.clone(); + new_routing.timestamp = chrono::Utc::now().timestamp(); + + let entry = entry_repository::routing(&state.offchain_pool, is_routing, pair, &new_routing) + .await + .map_err(EntryError::from)?; + + let last_updated_timestamp = entry_repository::get_last_updated_timestamp( + &state.offchain_pool, + pair.to_pair_id(), + new_routing.timestamp, + ) + .await? + .unwrap_or(entry.time); + + Ok(adapt_entry_to_entry_response( + pair.to_pair_id(), + &entry, + last_updated_timestamp, + )) +} + +pub async fn get_historical_entries_multi_pair( + state: &AppState, + pairs: &[Pair], + entry_params: &EntryParams, + count: usize, +) -> Result>, EntryError> { + let mut all_entries = Vec::with_capacity(pairs.len()); + + for pair in pairs { + match get_historical_entries(state, pair, entry_params, count).await { + Ok(entries) => all_entries.push(entries), + Err(e) => { + tracing::error!( + "❌ Failed to get historical entries for pair {}: {}", + pair.to_pair_id(), + e + ); + } + } + } + + // Return error only if we couldn't get any entries + if all_entries.is_empty() { + return Err(EntryError::HistoryNotFound); + } + + Ok(all_entries) +} + +pub async fn get_latest_entries_multi_pair( + state: &AppState, + pairs: &[Pair], + is_routing: bool, + entry_params: &EntryParams, +) -> Result, EntryError> { + let mut latest_entries = Vec::with_capacity(pairs.len()); + + for pair in pairs { + match get_latest_entry(state, pair, is_routing, entry_params).await { + Ok(entry) => latest_entries.push(entry), + Err(e) => { + tracing::error!( + "❌ Failed to get latest entry for pair {}: {}", + pair.to_pair_id(), + e + ); + } + } + } + + // Return error only if we couldn't get any entries + if latest_entries.is_empty() { + return Err(EntryError::HistoryNotFound); + } + + Ok(latest_entries) +} diff --git a/pragma-node/src/handlers/stream/mod.rs b/pragma-node/src/handlers/stream/mod.rs new file mode 100644 index 00000000..864db7c9 --- /dev/null +++ b/pragma-node/src/handlers/stream/mod.rs @@ -0,0 +1,4 @@ +pub mod common; +pub mod stream_multi; + +pub use common::*; diff --git a/pragma-node/src/handlers/stream_entry.rs b/pragma-node/src/handlers/stream/stream_multi.rs similarity index 55% rename from pragma-node/src/handlers/stream_entry.rs rename to pragma-node/src/handlers/stream/stream_multi.rs index 3afa8387..fe038a62 100644 --- a/pragma-node/src/handlers/stream_entry.rs +++ b/pragma-node/src/handlers/stream/stream_multi.rs @@ -1,58 +1,84 @@ -use std::{convert::Infallible, pin::Pin, time::Duration}; +use std::{convert::Infallible, time::Duration}; use axum::{ - extract::{Query, State}, + extract::State, response::sse::{Event, Sse}, }; -use axum_extra::{headers, TypedHeader}; -use futures::{ - stream::{self, Stream}, - Future, -}; +use axum_extra::extract::Query; +use axum_extra::{TypedHeader, headers}; +use futures::stream::{self, Stream}; use serde::Deserialize; use tokio_stream::StreamExt; use utoipa::{IntoParams, ToSchema}; -use pragma_common::types::{pair::Pair, AggregationMode}; -use pragma_entities::EntryError; - -use super::{ - get_entry::{adapt_entry_to_entry_response, GetEntryResponse, RoutingParams}, - GetEntryParams, +use pragma_common::{AggregationMode, Interval, Pair}; + +use crate::{ + handlers::{ + GetEntryParams, + get_entry::EntryParams, + stream::{ + BoxedFuture, BoxedStreamItem, DEFAULT_HISTORICAL_PRICES, + get_historical_entries_multi_pair, get_latest_entries_multi_pair, + }, + }, + state::AppState, }; -use crate::{infra::repositories::entry_repository, utils::PathExtractor, AppState}; - -const DEFAULT_HISTORICAL_PRICES: usize = 50; - -type BoxedFuture = Pin + Send>>; -type BoxedStreamItem = Box BoxedFuture + Send>; - #[derive(Debug, Deserialize, IntoParams, ToSchema)] -pub struct StreamEntryParams { +pub struct StreamEntryMultipairParams { + /// Base parameters for entry requests including interval, aggregation mode, and routing options #[serde(flatten)] pub get_entry_params: GetEntryParams, + /// List of trading pairs to stream prices for (e.g. `["ETH/USD", "BTC/USD"]`) + #[serde(rename = "pairs[]")] + #[param(example = json!(["ETH/USD", "BTC/USD"]))] + pub pairs: Vec, + /// Number of historical price entries to fetch on initial connection (default: 100) + #[param(example = 100)] pub historical_prices: Option, } +#[utoipa::path( + get, + path = "/node/v1/data/multi/stream", + params( + StreamEntryMultipairParams + ), + responses( + (status = 200, description = "Server-sent events stream of price entries for multiple pairs", content_type = "text/event-stream") + ), + tag = "Stream" +)] #[allow(clippy::too_many_lines)] -pub async fn stream_entry( +pub async fn stream_entry_multi_pair( State(state): State, - PathExtractor(pair): PathExtractor<(String, String)>, - Query(params): Query, + Query(params): Query, TypedHeader(user_agent): TypedHeader, ) -> Sse>> { - let pair = Pair::from(pair); + let pairs = params + .pairs + .iter() + .map(|pair| Pair::from(pair.clone())) + .collect::>(); + let is_routing = params.get_entry_params.routing.unwrap_or(false); - let interval = params.get_entry_params.interval.unwrap_or_default(); + let interval = params + .get_entry_params + .interval + .unwrap_or(Interval::OneHundredMillisecond); let historical_prices = params .historical_prices .unwrap_or(DEFAULT_HISTORICAL_PRICES); tracing::info!( - "`{}` connected to price feed {} with {} historical prices", + "`{}` connected to price feeds {} with {} historical prices", user_agent.as_str(), - pair.to_pair_id(), + pairs + .iter() + .map(Pair::to_pair_id) + .collect::>() + .join(", "), historical_prices ); @@ -60,7 +86,7 @@ pub async fn stream_entry( || params.get_entry_params.timestamp.is_some() || matches!( params.get_entry_params.aggregation, - Some(AggregationMode::Twap | AggregationMode::Mean) + Some(AggregationMode::Twap) ) { let mut sent_error = false; Box::new(move || { @@ -80,13 +106,13 @@ pub async fn stream_entry( }) }) } else { - match RoutingParams::try_from(params.get_entry_params) { + match EntryParams::try_from(params.get_entry_params) { Ok(get_entry_params) => { let mut first_batch = true; Box::new(move || { let state = state.clone(); - let pair = pair.clone(); + let pairs = pairs.clone(); let params = get_entry_params.clone(); let is_first = first_batch; @@ -94,8 +120,8 @@ pub async fn stream_entry( Box::pin(async move { if is_first { - // For the first batch, get historical prices - match get_historical_entries(&state, &pair, ¶ms, historical_prices).await { + // For the first batch, get historical prices for all pairs + match get_historical_entries_multi_pair(&state, &pairs, ¶ms, historical_prices).await { Ok(entries) => Event::default() .json_data(&entries) .unwrap_or_else(|e| Event::default().json_data(serde_json::json!({ @@ -109,20 +135,22 @@ pub async fn stream_entry( .unwrap_or_else(|_| Event::default().data(r#"{"error": "Error serializing error message"}"#)), } } else { - // For subsequent updates, get latest price - match get_latest_entry(&state, &pair, is_routing, ¶ms).await { - Ok(entry_response) => Event::default() - .json_data(&entry_response) + // For subsequent updates, get latest prices for all pairs + match get_latest_entries_multi_pair(&state, &pairs, is_routing, ¶ms) + .await + { + Ok(entry_responses) => Event::default() + .json_data(&entry_responses) .unwrap_or_else(|e| { Event::default() .json_data(serde_json::json!({ - "error": format!("Error serializing entry: {e}") + "error": format!("Error serializing entries: {e}") })) .unwrap() }), Err(e) => Event::default() .json_data(serde_json::json!({ - "error": format!("Error fetching entry: {e}") + "error": format!("Error fetching entries: {e}") })) .unwrap_or_else(|_| { Event::default() @@ -155,7 +183,7 @@ pub async fn stream_entry( let stream = stream::repeat_with(generator) .then(|future| future) .map(Ok) - .throttle(Duration::from_secs(interval.to_seconds() as u64)); + .throttle(interval.into()); Sse::new(stream).keep_alive( axum::response::sse::KeepAlive::new() @@ -163,77 +191,3 @@ pub async fn stream_entry( .text("keep-alive-text"), ) } - -async fn get_historical_entries( - state: &AppState, - pair: &Pair, - routing_params: &RoutingParams, - count: usize, -) -> Result, EntryError> { - let interval = routing_params.interval; - // Get current timestamp - let end_timestamp = chrono::Utc::now().timestamp() as u64; - // Get timestamp from count minutes ago - let start_timestamp = end_timestamp.saturating_sub(count as u64 * interval.to_seconds() as u64); - - // Get entries based on aggregation mode - let (entries, decimals) = match routing_params.aggregation_mode { - AggregationMode::Median => { - let entries = entry_repository::get_median_prices_between( - &state.offchain_pool, - pair.to_pair_id(), - routing_params.clone(), - start_timestamp, - end_timestamp, - ) - .await - .map_err(|e| e.to_entry_error(&pair.to_pair_id()))?; - - let decimals = entry_repository::get_decimals(&state.offchain_pool, pair) - .await - .map_err(|e| e.to_entry_error(&pair.to_pair_id()))?; - - (entries, decimals) - } - AggregationMode::Mean | AggregationMode::Twap => unreachable!(), - }; - - let responses: Vec = entries - .into_iter() - .take(count) - .map(|entry| adapt_entry_to_entry_response(pair.to_pair_id(), &entry, decimals, entry.time)) - .collect(); - - Ok(responses) -} - -async fn get_latest_entry( - state: &AppState, - pair: &Pair, - is_routing: bool, - routing_params: &RoutingParams, -) -> Result { - // We have to update the timestamp to now every tick - let mut new_routing = routing_params.clone(); - new_routing.timestamp = chrono::Utc::now().timestamp(); - - let (entry, decimals) = - entry_repository::routing(&state.offchain_pool, is_routing, pair, &new_routing) - .await - .map_err(|e| e.to_entry_error(&(pair.to_pair_id())))?; - - let last_updated_timestamp = entry_repository::get_last_updated_timestamp( - &state.offchain_pool, - pair.to_pair_id(), - new_routing.timestamp, - ) - .await? - .unwrap_or(entry.time); - - Ok(adapt_entry_to_entry_response( - pair.to_pair_id(), - &entry, - decimals, - last_updated_timestamp, - )) -} diff --git a/pragma-node/src/handlers/subscribe_to_entry.rs b/pragma-node/src/handlers/subscribe_to_entry.rs deleted file mode 100644 index a11069fd..00000000 --- a/pragma-node/src/handlers/subscribe_to_entry.rs +++ /dev/null @@ -1,362 +0,0 @@ -use std::collections::HashSet; -use std::net::SocketAddr; -use std::sync::Arc; - -use axum::extract::ws::{WebSocket, WebSocketUpgrade}; -use axum::extract::{ConnectInfo, State}; -use axum::http::StatusCode; -use axum::response::IntoResponse; -use bigdecimal::BigDecimal; -use serde::{Deserialize, Serialize}; - -use pragma_common::types::DataType; -use pragma_entities::EntryError; -use utoipa::{ToResponse, ToSchema}; - -use crate::constants::starkex_ws::PRAGMA_ORACLE_NAME_FOR_STARKEX; -use crate::infra::repositories::entry_repository::MedianEntryWithComponents; -use crate::utils::only_existing_pairs; -use crate::utils::pricer::{IndexPricer, MarkPricer, Pricer}; -use crate::utils::{ChannelHandler, Subscriber, SubscriptionType}; -use crate::AppState; -use pragma_common::signing::sign_data; -use pragma_common::signing::starkex::StarkexPrice; -use pragma_common::types::timestamp::UnixTimestamp; - -#[derive(Debug, Clone, Default, Serialize, Deserialize, ToSchema)] -pub struct SignedPublisherPrice { - pub oracle_asset_id: String, - pub oracle_price: String, - pub signing_key: String, - pub timestamp: String, -} - -#[derive(Debug, Default, Serialize, Deserialize, ToSchema)] -pub struct AssetOraclePrice { - pub global_asset_id: String, - pub median_price: String, - pub signature: String, - pub signed_prices: Vec, -} - -#[derive(Debug, Default, Serialize, Deserialize, ToResponse, ToSchema)] -pub struct SubscribeToEntryResponse { - pub oracle_prices: Vec, - #[schema(value_type = i64)] - pub timestamp: UnixTimestamp, -} - -#[tracing::instrument(skip(state, ws), fields(endpoint_name = "subscribe_to_entry"))] -pub async fn subscribe_to_entry( - ws: WebSocketUpgrade, - State(state): State, - ConnectInfo(client_addr): ConnectInfo, -) -> impl IntoResponse { - if state.pragma_signer.is_none() { - return (StatusCode::LOCKED, "Locked: Pragma signer not found").into_response(); - } - ws.on_upgrade(move |socket| create_new_subscriber(socket, state, client_addr)) -} - -/// Interval in milliseconds that the channel will update the client with the latest prices. -const CHANNEL_UPDATE_INTERVAL_IN_MS: u64 = 500; - -#[tracing::instrument( - skip(socket, app_state), - fields( - subscriber_id, - client_ip = %client_addr.ip() - ) -)] -async fn create_new_subscriber(socket: WebSocket, app_state: AppState, client_addr: SocketAddr) { - let (mut subscriber, _) = match Subscriber::::new( - "subscribe_to_entry".into(), - socket, - client_addr.ip(), - Arc::new(app_state), - None, - CHANNEL_UPDATE_INTERVAL_IN_MS, - ) - .await - { - Ok(subscriber) => subscriber, - Err(e) => { - tracing::error!("Failed to register subscriber: {}", e); - return; - } - }; - - // Main event loop for the subscriber - let handler = WsEntriesHandler; - let status = subscriber.listen(handler).await; - if let Err(e) = status { - tracing::error!( - "[{}] Error occurred while listening to the subscriber: {:?}", - subscriber.id, - e - ); - } -} - -struct WsEntriesHandler; - -#[async_trait::async_trait] -impl ChannelHandler for WsEntriesHandler { - #[tracing::instrument( - skip(self, subscriber), - fields( - subscriber_id = %subscriber.id, - msg_type = ?request.msg_type, - pairs = ?request.pairs - ) - )] - async fn handle_client_msg( - &mut self, - subscriber: &mut Subscriber, - request: SubscriptionRequest, - ) -> Result<(), EntryError> { - let (existing_spot_pairs, existing_perp_pairs) = - only_existing_pairs(&subscriber.app_state.offchain_pool, request.pairs).await; - let mut state = subscriber.state.lock().await; - match request.msg_type { - SubscriptionType::Subscribe => { - state.add_spot_pairs(existing_spot_pairs); - state.add_perp_pairs(existing_perp_pairs); - } - SubscriptionType::Unsubscribe => { - state.remove_spot_pairs(&existing_spot_pairs); - state.remove_perp_pairs(&existing_perp_pairs); - } - }; - let subscribed_pairs = state.get_fmt_subscribed_pairs(); - drop(state); - // We send an ack message to the client with the subscribed pairs (so - // the client knows which pairs are successfully subscribed). - if let Ok(ack_message) = serde_json::to_string(&SubscriptionAck { - msg_type: request.msg_type, - pairs: subscribed_pairs, - }) { - if subscriber.send_msg(ack_message).await.is_err() { - let error_msg = "Message received but could not send ack message."; - subscriber.send_err(error_msg).await; - } - } else { - let error_msg = "Could not serialize ack message."; - subscriber.send_err(error_msg).await; - } - Ok(()) - } - - #[tracing::instrument( - skip(self, subscriber), - fields( - subscriber_id = %subscriber.id - ) - )] - async fn periodic_interval( - &mut self, - subscriber: &mut Subscriber, - ) -> Result<(), EntryError> { - let subscription = subscriber.state.lock().await; - if subscription.is_empty() { - return Ok(()); - } - let response = match self - .get_subscribed_pairs_medians(&subscriber.app_state, &subscription) - .await - { - Ok(response) => response, - Err(e) => { - drop(subscription); - subscriber.send_err(&e.to_string()).await; - return Err(e); - } - }; - drop(subscription); - if let Ok(json_response) = serde_json::to_string(&response) { - if subscriber.send_msg(json_response).await.is_err() { - subscriber.send_err("Could not send prices.").await; - } - } else { - subscriber.send_err("Could not serialize prices.").await; - } - Ok(()) - } -} - -impl WsEntriesHandler { - /// Get the current median entries for the subscribed pairs and sign them as Pragma. - #[tracing::instrument( - skip(self, state, subscription), - fields( - spot_pairs = ?subscription.get_subscribed_spot_pairs().len(), - perp_pairs = ?subscription.get_subscribed_perp_pairs().len() - ) - )] - async fn get_subscribed_pairs_medians( - &self, - state: &AppState, - subscription: &SubscriptionState, - ) -> Result { - let median_entries = self.get_all_entries(state, subscription).await?; - - let mut response: SubscribeToEntryResponse = Default::default(); - let now = chrono::Utc::now().timestamp(); - - let pragma_signer = state - .pragma_signer - .as_ref() - // Should not happen, as the endpoint is disabled if the signer is not found. - .ok_or(EntryError::InternalServerError)?; - - for entry in median_entries { - let pair_id = entry.pair_id.clone(); - // Scale price from 8 decimals to 18 decimals for StarkEx - // TODO: dont hardcode the decimals, deduce it from the currency decimals - let price_with_18_decimals = - entry.median_price.clone() * BigDecimal::from(10_u64.pow(10)); - - let starkex_price = StarkexPrice { - oracle_name: PRAGMA_ORACLE_NAME_FOR_STARKEX.to_string(), - pair_id: pair_id.clone(), - timestamp: now as u64, - price: price_with_18_decimals.clone(), - }; - let signature = - sign_data(pragma_signer, &starkex_price).map_err(|_| EntryError::InvalidSigner)?; - - // Create AssetOraclePrice with the original entry (it will be scaled in the TryFrom implementation) - let mut oracle_price: AssetOraclePrice = entry - .try_into() - .map_err(|_| EntryError::InternalServerError)?; - oracle_price.signature = signature; - response.oracle_prices.push(oracle_price); - } - response.timestamp = now; - Ok(response) - } - - /// Get index & mark prices for the subscribed pairs. - #[tracing::instrument(skip(self, state, subscription))] - async fn get_all_entries( - &self, - state: &AppState, - subscription: &SubscriptionState, - ) -> Result, EntryError> { - let index_pricer = IndexPricer::new( - subscription.get_subscribed_spot_pairs(), - DataType::SpotEntry, - ); - - let (usd_pairs, non_usd_pairs): (Vec, Vec) = subscription - .get_subscribed_perp_pairs() - .into_iter() - .partition(|pair| { - tracing::debug!("Checking pair for USD: {}", pair); - pair.ends_with("USD") - }); - tracing::debug!( - "USD pairs: {:?}, non-USD pairs: {:?}", - usd_pairs, - non_usd_pairs - ); - let mark_pricer_usd = IndexPricer::new(usd_pairs, DataType::PerpEntry); - let mark_pricer_non_usd = MarkPricer::new(non_usd_pairs, DataType::PerpEntry); - - // Compute entries concurrently - let (index_entries, usd_mark_entries, non_usd_mark_entries) = tokio::join!( - index_pricer.compute(&state.offchain_pool), - mark_pricer_usd.compute(&state.offchain_pool), - mark_pricer_non_usd.compute(&state.offchain_pool) - ); - - let mut median_entries = vec![]; - median_entries.extend(index_entries.unwrap_or_default()); - - // Add :MARK suffix to mark prices - let mut usd_mark_entries = usd_mark_entries.unwrap_or_default(); - for entry in &mut usd_mark_entries { - entry.pair_id = format!("{}:MARK", entry.pair_id); - } - median_entries.extend(usd_mark_entries); - - let mut non_usd_mark_entries = non_usd_mark_entries.unwrap_or_default(); - for entry in &mut non_usd_mark_entries { - entry.pair_id = format!("{}:MARK", entry.pair_id); - } - median_entries.extend(non_usd_mark_entries); - - Ok(median_entries) - } -} - -#[derive(Debug, Serialize, Deserialize)] -struct SubscriptionRequest { - msg_type: SubscriptionType, - pairs: Vec, -} - -#[derive(Debug, Serialize, Deserialize)] -struct SubscriptionAck { - msg_type: SubscriptionType, - pairs: Vec, -} - -#[derive(Debug, Default, Serialize, Deserialize)] -struct SubscriptionState { - spot_pairs: HashSet, - perp_pairs: HashSet, -} - -impl SubscriptionState { - fn is_empty(&self) -> bool { - self.spot_pairs.is_empty() && self.perp_pairs.is_empty() - } - - fn add_spot_pairs(&mut self, pairs: Vec) { - self.spot_pairs.extend(pairs); - } - - fn add_perp_pairs(&mut self, pairs: Vec) { - self.perp_pairs.extend(pairs); - } - - fn remove_spot_pairs(&mut self, pairs: &[String]) { - for pair in pairs { - self.spot_pairs.remove(pair); - } - } - - fn remove_perp_pairs(&mut self, pairs: &[String]) { - for pair in pairs { - self.perp_pairs.remove(pair); - } - } - - /// Get the subscribed spot pairs. - fn get_subscribed_spot_pairs(&self) -> Vec { - self.spot_pairs.iter().cloned().collect() - } - - /// Get the subscribed perps pairs (without suffix). - fn get_subscribed_perp_pairs(&self) -> Vec { - self.perp_pairs.iter().cloned().collect() - } - - /// Get the subscribed perps pairs with the MARK suffix. - fn get_fmt_subscribed_perp_pairs(&self) -> Vec { - self.perp_pairs - .iter() - .map(|pair| format!("{pair}:MARK")) - .collect() - } - - /// Get all the currently subscribed pairs. - /// (Spot and Perp pairs with the suffix) - fn get_fmt_subscribed_pairs(&self) -> Vec { - let mut spot_pairs = self.get_subscribed_spot_pairs(); - let perp_pairs = self.get_fmt_subscribed_perp_pairs(); - spot_pairs.extend(perp_pairs); - spot_pairs - } -} diff --git a/pragma-node/src/handlers/websocket/mod.rs b/pragma-node/src/handlers/websocket/mod.rs new file mode 100644 index 00000000..bf237351 --- /dev/null +++ b/pragma-node/src/handlers/websocket/mod.rs @@ -0,0 +1,154 @@ +use std::collections::{HashMap, HashSet}; + +use pragma_common::{AggregationMode, InstrumentType, Interval, pair::Pair}; +use pragma_entities::EntryError; +use serde::{Deserialize, Serialize}; + +use crate::{ + infra::repositories::entry_repository::{self, MedianEntry}, + state::AppState, + utils::SubscriptionType, +}; + +use super::get_entry::EntryParams; + +pub mod subscribe_to_entry; +pub mod subscribe_to_price; +pub use subscribe_to_entry::subscribe_to_entry; +pub use subscribe_to_price::subscribe_to_price; + +#[derive(Debug, Serialize, Deserialize)] +pub struct SubscriptionRequest { + pub msg_type: SubscriptionType, + pub pairs: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct SubscriptionAck { + pub msg_type: SubscriptionType, + pub pairs: Vec, +} + +#[derive(Debug, Default, Serialize, Deserialize)] +pub struct SubscriptionState { + pub spot_pairs: HashSet, + pub perp_pairs: HashSet, +} + +impl SubscriptionState { + pub fn is_empty(&self) -> bool { + self.spot_pairs.is_empty() && self.perp_pairs.is_empty() + } + + pub fn add_spot_pairs(&mut self, pairs: Vec) { + self.spot_pairs.extend(pairs); + } + + pub fn add_perp_pairs(&mut self, pairs: Vec) { + self.perp_pairs.extend(pairs); + } + + pub fn remove_spot_pairs(&mut self, pairs: &[String]) { + for pair in pairs { + self.spot_pairs.remove(pair); + } + } + + pub fn remove_perp_pairs(&mut self, pairs: &[String]) { + for pair in pairs { + self.perp_pairs.remove(pair); + } + } + + /// Get the subscribed spot pairs. + pub fn get_subscribed_spot_pairs(&self) -> Vec { + self.spot_pairs.iter().cloned().collect() + } + + /// Get the subscribed perps pairs (without suffix). + pub fn get_subscribed_perp_pairs(&self) -> Vec { + self.perp_pairs.iter().cloned().collect() + } + + /// Get the subscribed perps pairs with the MARK suffix. + pub fn get_fmt_subscribed_perp_pairs(&self) -> Vec { + self.perp_pairs + .iter() + .map(|pair| format!("{pair}:MARK")) + .collect() + } + + /// Get all the currently subscribed pairs. + /// (Spot and Perp pairs with the suffix) + pub fn get_fmt_subscribed_pairs(&self) -> Vec { + let mut spot_pairs = self.get_subscribed_spot_pairs(); + let perp_pairs = self.get_fmt_subscribed_perp_pairs(); + spot_pairs.extend(perp_pairs); + spot_pairs + } +} + +pub async fn get_latest_entry( + state: &AppState, + pair: &Pair, + is_routing: bool, + entry_params: &EntryParams, +) -> Result { + // We have to update the timestamp to now every tick + let mut new_routing = entry_params.clone(); + new_routing.timestamp = chrono::Utc::now().timestamp(); + + let entry = entry_repository::routing(&state.offchain_pool, is_routing, pair, &new_routing) + .await + .map_err(EntryError::from)?; + + Ok(entry) +} + +pub async fn get_latest_entries_multi_pair( + state: &AppState, + pairs: &[Pair], + is_routing: bool, + entry_params: &EntryParams, +) -> Result, EntryError> { + let mut latest_entries = HashMap::new(); + + for pair in pairs { + match get_latest_entry(state, pair, is_routing, entry_params).await { + Ok(entry) => { + // Add :MARK suffix to the key if it's a perp pair + let key = if entry_params.data_type == InstrumentType::Perp { + format!("{}:MARK", pair.to_pair_id()) + } else { + pair.to_pair_id() + }; + latest_entries.insert(key, entry); + } + Err(e) => { + tracing::error!("❌ Failed to process message: {}", e); + } + } + } + + // Return error only if we couldn't get any entries + if latest_entries.is_empty() { + return Err(EntryError::HistoryNotFound); + } + + Ok(latest_entries) +} + +pub fn get_params_for_websocket(is_perp: bool) -> EntryParams { + EntryParams { + interval: Interval::OneHundredMillisecond, + timestamp: chrono::Utc::now().timestamp_millis(), + aggregation_mode: AggregationMode::Median, + data_type: if is_perp { + InstrumentType::Perp + } else { + InstrumentType::Spot + }, + expiry: String::default(), + with_components: true, + } +} diff --git a/pragma-node/src/handlers/websocket/subscribe_to_entry.rs b/pragma-node/src/handlers/websocket/subscribe_to_entry.rs new file mode 100644 index 00000000..d91aa8e8 --- /dev/null +++ b/pragma-node/src/handlers/websocket/subscribe_to_entry.rs @@ -0,0 +1,383 @@ +use std::collections::HashMap; +use std::net::SocketAddr; +use std::str::FromStr as _; +use std::sync::Arc; + +use axum::extract::ws::{WebSocket, WebSocketUpgrade}; +use axum::extract::{ConnectInfo, State}; +use axum::http::StatusCode; +use axum::response::IntoResponse; +use pragma_common::Pair; +use pragma_common::starknet::ConversionError; +use pragma_entities::models::entries::timestamp::UnixTimestamp; +use serde::{Deserialize, Serialize}; +use starknet::signers::SigningKey; +use utoipa::{ToResponse, ToSchema}; + +use pragma_entities::EntryError; + +use crate::constants::starkex_ws::PRAGMA_ORACLE_NAME_FOR_STARKEX; +use crate::handlers::websocket::SubscriptionState; +use crate::infra::repositories::entry_repository::MedianEntry; +use crate::state::AppState; +use crate::utils::signing::sign_data; +use crate::utils::starkex::StarkexPrice; +use crate::utils::{ChannelHandler, Subscriber, SubscriptionType}; +use crate::utils::{hex_string_to_bigdecimal, only_existing_pairs}; + +use super::{ + SubscriptionAck, SubscriptionRequest, get_latest_entries_multi_pair, get_params_for_websocket, +}; + +/// Response format for `StarkEx` price subscriptions +#[derive(Debug, Clone, Default, Serialize, Deserialize, ToSchema)] +pub struct SignedPublisherPrice { + /// StarkEx-specific asset identifier in hex format + /// Format: 00..00PRAGMA00 + #[schema(example = "0x534f4c55534400000000000000000000505241474d4100")] + pub oracle_asset_id: String, + + /// Price in `StarkEx` 18 decimals + #[schema(example = "128065038090000000000")] + pub oracle_price: String, + + /// Public key of the price signer (Pragma's `StarkEx` key) + #[schema(example = "0x624EBFB99865079BD58CFCFB925B6F5CE940D6F6E41E118B8A72B7163FB435C")] + pub signing_key: String, + + /// Unix timestamp as string + #[schema(example = "1741594457")] + pub timestamp: String, +} + +/// Price data structure for `StarkEx` oracle integration +#[derive(Debug, Default, Serialize, Deserialize, ToSchema)] +pub struct AssetOraclePrice { + /// Global asset identifier in `StarkEx` hex format + /// Format: --00..00 + #[schema(example = "0x534f4c2d5553442d38000000000000")] + pub global_asset_id: String, + + /// Median price in `StarkEx` 18 decimals format + #[schema(example = "128065038090000007168")] + pub median_price: String, + + /// Pragma's signature of the price data in `StarkEx` format + #[schema( + example = "0x02ba39e956bb5b29a0fab31d61c7678228f79dddee2998b4ff3de5c7a6ae1e770636712af81b0506749555e1439004b4ce905419d2ba946b9bd06eb87de7a167" + )] + pub signature: String, + + /// Individual signed prices from publishers + pub signed_prices: Vec, +} + +/// WebSocket response message for `StarkEx` price updates +#[derive(Debug, Default, Serialize, Deserialize, ToResponse, ToSchema)] +#[schema(example = json!({ + "oracle_prices": [{ + "global_asset_id": "0x534f4c2d5553442d38000000000000", + "median_price": "128065038090000007168", + "signature": "0x02ba39e956bb5b29a0fab31d61c7678228f79dddee2998b4ff3de5c7a6ae1e770636712af81b0506749555e1439004b4ce905419d2ba946b9bd06eb87de7a167", + "signed_prices": [{ + "oracle_asset_id": "0x534f4c55534400000000000000000000505241474d4100", + "oracle_price": "128065038090000000000", + "signing_key": "0x624EBFB99865079BD58CFCFB925B6F5CE940D6F6E41E118B8A72B7163FB435C", + "timestamp": "1741594457" + }] + }], + "timestamp": 1_741_594_458 +}))] +pub struct SubscribeToEntryResponse { + /// Array of price data for subscribed assets + pub oracle_prices: Vec, + + /// Unix timestamp of the update + #[schema(value_type = i64, example = 1_741_594_458)] + pub timestamp: UnixTimestamp, +} + +#[utoipa::path( + get, + path = "/node/v1/data/subscribe", + tag = "StarkEx Oracle", + responses( + (status = 101, description = "WebSocket connection upgraded successfully"), + (status = 403, description = "Forbidden - Rate limit exceeded", body = EntryError), + (status = 500, description = "Internal server error", body = EntryError, + example = json!({"error": "Locked: Pragma signer not found"})) + ), +)] +#[tracing::instrument(skip(state, ws), fields(endpoint_name = "subscribe_to_entry"))] +pub async fn subscribe_to_entry( + ws: WebSocketUpgrade, + State(state): State, + ConnectInfo(client_addr): ConnectInfo, +) -> impl IntoResponse { + if state.pragma_signer.is_none() { + return (StatusCode::LOCKED, "Locked: Pragma signer not found").into_response(); + } + ws.on_upgrade(move |socket| create_new_subscriber(socket, state, client_addr)) +} + +#[tracing::instrument( + skip(socket, app_state), + fields( + subscriber_id, + client_ip = %client_addr.ip() + ) +)] +async fn create_new_subscriber(socket: WebSocket, app_state: AppState, client_addr: SocketAddr) { + /// Interval in milliseconds that the channel will update the client with the latest prices. + const CHANNEL_UPDATE_INTERVAL_IN_MS: u64 = 500; + + let mut subscriber = match Subscriber::::new( + "subscribe_to_entry".into(), + socket, + client_addr.ip(), + Arc::new(app_state), + None, + CHANNEL_UPDATE_INTERVAL_IN_MS, + None, + ) { + Ok(subscriber) => subscriber, + Err(e) => { + tracing::error!("Failed to register subscriber: {}", e); + return; + } + }; + + // Main event loop for the subscriber + let handler = WsEntriesHandler; + let status = subscriber.listen(handler).await; + if let Err(e) = status { + tracing::error!( + "[{}] Error occurred while listening to the subscriber: {:?}", + subscriber.id, + e + ); + } +} + +struct WsEntriesHandler; + +#[async_trait::async_trait] +impl ChannelHandler for WsEntriesHandler { + #[tracing::instrument( + skip(self, subscriber), + fields( + subscriber_id = %subscriber.id, + msg_type = ?request.msg_type, + pairs = ?request.pairs + ) + )] + async fn handle_client_msg( + &mut self, + subscriber: &mut Subscriber, + request: SubscriptionRequest, + ) -> Result<(), EntryError> { + let (existing_spot_pairs, existing_perp_pairs) = + only_existing_pairs(&subscriber.app_state.offchain_pool, request.pairs).await; + let mut state = subscriber.state.write().await; + match request.msg_type { + SubscriptionType::Subscribe => { + state.add_spot_pairs(existing_spot_pairs); + state.add_perp_pairs(existing_perp_pairs); + } + SubscriptionType::Unsubscribe => { + state.remove_spot_pairs(&existing_spot_pairs); + state.remove_perp_pairs(&existing_perp_pairs); + } + } + let subscribed_pairs = state.get_fmt_subscribed_pairs(); + drop(state); + // We send an ack message to the client with the subscribed pairs (so + // the client knows which pairs are successfully subscribed). + let ack = SubscriptionAck { + msg_type: request.msg_type, + pairs: subscribed_pairs, + }; + if let Err(e) = subscriber.send_msg(ack).await { + let error_msg = format!("Message received but could not send ack message: {e}"); + subscriber.send_err(&error_msg).await; + } + Ok(()) + } + + #[tracing::instrument( + skip(self, subscriber), + fields( + subscriber_id = %subscriber.id + ) + )] + async fn periodic_interval( + &mut self, + subscriber: &mut Subscriber, + ) -> Result<(), EntryError> { + let subscription = subscriber.state.read().await; + + if subscription.is_empty() { + return Ok(()); + } + let response = match self + .get_subscribed_pairs_medians(&subscriber.app_state, &subscription) + .await + { + Ok(response) => response, + Err(e) => { + drop(subscription); + subscriber.send_err(&e.to_string()).await; + return Err(e); + } + }; + drop(subscription); + if let Err(e) = subscriber.send_msg(response).await { + subscriber + .send_err(&format!("Could not send prices: {e}")) + .await; + } + Ok(()) + } +} + +impl WsEntriesHandler { + /// Get the current median entries for the subscribed pairs and sign them as Pragma. + #[tracing::instrument( + skip(self, state, subscription), + fields( + spot_pairs = ?subscription.get_subscribed_spot_pairs().len(), + perp_pairs = ?subscription.get_subscribed_perp_pairs().len() + ) + )] + async fn get_subscribed_pairs_medians( + &self, + state: &AppState, + subscription: &SubscriptionState, + ) -> Result { + let spot_pairs: Vec = subscription + .get_subscribed_spot_pairs() + .iter() + .map(|s| Pair::from_str(s).unwrap()) + .collect(); + let perp_pairs: Vec = subscription + .get_subscribed_perp_pairs() + .iter() + .map(|s| Pair::from_str(s).unwrap()) + .collect(); + let number_of_spot_pairs = spot_pairs.len(); + let number_of_perp_pairs = perp_pairs.len(); + + let mut all_entries = if number_of_spot_pairs == 0 { + HashMap::new() + } else { + let params = get_params_for_websocket(false); + let entries = get_latest_entries_multi_pair(state, &spot_pairs, false, ¶ms) + .await + .map_err(|e| { + EntryError::DatabaseError(format!("Failed to fetch spot data: {e}")) + })?; + + // Check if we got entries for all requested spot pairs + if entries.len() < number_of_spot_pairs { + tracing::error!( + "Missing spot prices for some pairs. Found {} of {} requested pairs.", + entries.len(), + number_of_spot_pairs + ); + } + + entries + }; + + if number_of_perp_pairs != 0 { + let params = get_params_for_websocket(true); + let perp_entries = get_latest_entries_multi_pair(state, &perp_pairs, false, ¶ms) + .await + .map_err(|e| { + EntryError::DatabaseError(format!("Failed to fetch perp data: {e}")) + })?; + + // Check if we got entries for all requested perp pairs + if perp_entries.len() < number_of_perp_pairs { + tracing::error!( + "Missing perp prices for some pairs. Found {} of {} requested pairs.", + perp_entries.len(), + number_of_perp_pairs + ); + } + + // Merge the results + all_entries.extend(perp_entries); + } + + let mut response: SubscribeToEntryResponse = Default::default(); + let now = chrono::Utc::now().timestamp_millis(); + + let pragma_signer = state + .pragma_signer + .as_ref() + // Should not happen, as the endpoint is disabled if the signer is not found. + .ok_or(EntryError::InternalServerError( + "No Signer for Pragma".into(), + ))?; + + for (pair_id, entry) in all_entries { + let starkex_price = StarkexPrice { + oracle_name: PRAGMA_ORACLE_NAME_FOR_STARKEX.to_string(), + pair_id: pair_id.clone(), + timestamp: now as u64, + price: entry.median_price.clone(), + }; + let signature = + sign_data(pragma_signer, &starkex_price).map_err(|_| EntryError::InvalidSigner)?; + + let mut oracle_price: AssetOraclePrice = + AssetOraclePrice::try_from((pair_id, entry, pragma_signer.clone())) // TODO: remove clone + .map_err(|_| { + EntryError::InternalServerError("Could not create Oracle price".into()) + })?; + oracle_price.signature = signature; + response.oracle_prices.push(oracle_price); + } + response.timestamp = now; + Ok(response) + } +} + +impl TryFrom<(String, MedianEntry, SigningKey)> for AssetOraclePrice { + type Error = ConversionError; + + fn try_from(value: (String, MedianEntry, SigningKey)) -> Result { + let (pair_id, entry, signing_key) = value; + + // Computes IDs + let global_asset_id = StarkexPrice::get_global_asset_id(&pair_id)?; + let oracle_asset_id = + StarkexPrice::get_oracle_asset_id(PRAGMA_ORACLE_NAME_FOR_STARKEX, &pair_id)?; + + let signed_prices_result: Result, ConversionError> = entry + .components + .unwrap_or_default() + .into_iter() + .map(|comp| { + let timestamp = comp.timestamp.and_utc().timestamp_millis() as u64; + let price = hex_string_to_bigdecimal(&comp.price) + .map_err(|_| ConversionError::StringPriceConversion)?; + Ok(SignedPublisherPrice { + oracle_asset_id: format!("0x{oracle_asset_id}"), + oracle_price: price.to_string(), + signing_key: signing_key.verifying_key().scalar().to_hex_string(), + timestamp: timestamp.to_string(), + }) + }) + .collect(); + let signed_prices = signed_prices_result?; + + Ok(Self { + global_asset_id: format!("0x{global_asset_id}"), + median_price: entry.median_price.to_string(), + signature: String::new(), + signed_prices, + }) + } +} diff --git a/pragma-node/src/handlers/subscribe_to_price.rs b/pragma-node/src/handlers/websocket/subscribe_to_price.rs similarity index 56% rename from pragma-node/src/handlers/subscribe_to_price.rs rename to pragma-node/src/handlers/websocket/subscribe_to_price.rs index f20adaa9..e69406ce 100644 --- a/pragma-node/src/handlers/subscribe_to_price.rs +++ b/pragma-node/src/handlers/websocket/subscribe_to_price.rs @@ -1,22 +1,25 @@ -use std::collections::HashSet; +use std::collections::HashMap; use std::net::SocketAddr; +use std::str::FromStr as _; use std::sync::Arc; use axum::extract::ws::{WebSocket, WebSocketUpgrade}; use axum::extract::{ConnectInfo, State}; use axum::response::IntoResponse; +use pragma_common::Pair; +use pragma_entities::models::entries::timestamp::UnixTimestamp; use serde::{Deserialize, Serialize}; -use pragma_common::types::DataType; +use crate::state::AppState; +use crate::utils::only_existing_pairs; +use crate::utils::ws::{ChannelHandler, Subscriber, SubscriptionType}; use pragma_entities::EntryError; use utoipa::{ToResponse, ToSchema}; -use crate::infra::repositories::entry_repository::MedianEntryWithComponents; -use crate::utils::only_existing_pairs; -use crate::utils::pricer::{IndexPricer, Pricer}; -use crate::utils::ws::{ChannelHandler, Subscriber, SubscriptionType}; -use crate::AppState; -use pragma_common::types::timestamp::UnixTimestamp; +use super::{ + SubscriptionAck, SubscriptionRequest, SubscriptionState, get_latest_entries_multi_pair, + get_params_for_websocket, +}; #[derive(Debug, Default, Serialize, Deserialize, ToResponse, ToSchema)] pub struct AssetOraclePrice { @@ -52,16 +55,15 @@ const CHANNEL_UPDATE_INTERVAL_IN_MS: u64 = 500; ) )] async fn create_new_subscriber(socket: WebSocket, app_state: AppState, client_addr: SocketAddr) { - let (mut subscriber, _) = match Subscriber::::new( + let mut subscriber = match Subscriber::::new( "subscribe_to_price".into(), socket, client_addr.ip(), Arc::new(app_state), None, CHANNEL_UPDATE_INTERVAL_IN_MS, - ) - .await - { + None, + ) { Ok(subscriber) => subscriber, Err(e) => { tracing::error!("Failed to register subscriber: {}", e); @@ -98,32 +100,30 @@ impl ChannelHandler for WsEn subscriber: &mut Subscriber, request: SubscriptionRequest, ) -> Result<(), EntryError> { - let (existing_spot_pairs, _existing_perp_pairs) = + let (existing_spot_pairs, existing_perp_pairs) = only_existing_pairs(&subscriber.app_state.offchain_pool, request.pairs).await; - let mut state = subscriber.state.lock().await; + let mut state = subscriber.state.write().await; match request.msg_type { SubscriptionType::Subscribe => { state.add_spot_pairs(existing_spot_pairs); + state.add_perp_pairs(existing_perp_pairs); } SubscriptionType::Unsubscribe => { state.remove_spot_pairs(&existing_spot_pairs); + state.remove_perp_pairs(&existing_perp_pairs); } - }; - let subscribed_pairs = state.get_subscribed_spot_pairs(); + } + let subscribed_pairs = state.get_fmt_subscribed_pairs(); drop(state); // We send an ack message to the client with the subscribed pairs (so // the client knows which pairs are successfully subscribed). - if let Ok(ack_message) = serde_json::to_string(&SubscriptionAck { + let ack = SubscriptionAck { msg_type: request.msg_type, pairs: subscribed_pairs, - }) { - if subscriber.send_msg(ack_message).await.is_err() { - let error_msg = "Message received but could not send ack message."; - subscriber.send_err(error_msg).await; - } - } else { - let error_msg = "Could not serialize ack message."; - subscriber.send_err(error_msg).await; + }; + if let Err(e) = subscriber.send_msg(ack).await { + let error_msg = format!("Message received but could not send ack message: {e}"); + subscriber.send_err(&error_msg).await; } Ok(()) } @@ -138,7 +138,7 @@ impl ChannelHandler for WsEn &mut self, subscriber: &mut Subscriber, ) -> Result<(), EntryError> { - let subscription = subscriber.state.lock().await; + let subscription = subscriber.state.read().await; if subscription.is_empty() { return Ok(()); } @@ -154,12 +154,10 @@ impl ChannelHandler for WsEn } }; drop(subscription); - if let Ok(json_response) = serde_json::to_string(&response) { - if subscriber.send_msg(json_response).await.is_err() { - subscriber.send_err("Could not send prices.").await; - } - } else { - subscriber.send_err("Could not serialize prices.").await; + if let Err(e) = subscriber.send_msg(response).await { + subscriber + .send_err(&format!("Could not send prices: {e}")) + .await; } Ok(()) } @@ -170,7 +168,8 @@ impl WsEntriesHandler { #[tracing::instrument( skip(self, state, subscription), fields( - subscribed_pairs = ?subscription.get_subscribed_spot_pairs().len() + spot_pairs_count = ?subscription.get_subscribed_spot_pairs().len(), + perp_pairs_count = ?subscription.get_subscribed_perp_pairs().len() ) )] async fn get_subscribed_pairs_medians( @@ -178,77 +177,75 @@ impl WsEntriesHandler { state: &AppState, subscription: &SubscriptionState, ) -> Result { - let median_entries = self.get_all_entries(state, subscription).await?; + // safe to unwrap, cannot fail + let spot_pairs: Vec = subscription + .get_subscribed_spot_pairs() + .iter() + .map(|s| Pair::from_str(s).unwrap()) + .collect(); + let perp_pairs: Vec = subscription + .get_subscribed_perp_pairs() + .iter() + .map(|s| Pair::from_str(s).unwrap()) + .collect(); + let number_of_spot_pairs = spot_pairs.len(); + let number_of_perp_pairs = perp_pairs.len(); - let now = chrono::Utc::now().timestamp(); + // Get spot prices + let mut median_entries = if number_of_spot_pairs == 0 { + HashMap::new() + } else { + let params = get_params_for_websocket(false); + let entries = get_latest_entries_multi_pair(state, &spot_pairs, false, ¶ms) + .await + .map_err(|e| { + EntryError::DatabaseError(format!("Failed to fetch spot price data: {e}")) + })?; + // Check if we got entries for all requested spot pairs + if entries.len() < number_of_spot_pairs { + tracing::debug!( + "Missing spot prices for some pairs. Found {} of {} requested pairs.", + entries.len(), + number_of_spot_pairs + ); + } + entries + }; + // Get perp prices and extend the HashMap + if number_of_perp_pairs != 0 { + let params = get_params_for_websocket(true); + let perp_entries = get_latest_entries_multi_pair(state, &perp_pairs, false, ¶ms) + .await + .map_err(|e| { + EntryError::DatabaseError(format!("Failed to fetch perp price data: {e}")) + })?; + // Check if we got entries for all requested perp pairs + if perp_entries.len() < number_of_perp_pairs { + tracing::debug!( + "Missing perp prices for some pairs. Found {} of {} requested pairs.", + perp_entries.len(), + number_of_perp_pairs + ); + } + + // Add perp entries to the result + median_entries.extend(perp_entries); + } + + // Convert HashMap entries to the expected response format let oracle_prices = median_entries .into_iter() - .map(|entry| AssetOraclePrice { - num_sources_aggregated: entry.components.len(), - pair_id: entry.pair_id, + .map(|(pair_id, entry)| AssetOraclePrice { + num_sources_aggregated: entry.num_sources as usize, + pair_id, price: entry.median_price.to_string(), }) .collect(); Ok(SubscribeToPriceResponse { - timestamp: now, + timestamp: chrono::Utc::now().timestamp_millis(), oracle_prices, }) } - - /// Get index & mark prices for the subscribed pairs. - #[tracing::instrument(skip(self, state, subscription))] - async fn get_all_entries( - &self, - state: &AppState, - subscription: &SubscriptionState, - ) -> Result, EntryError> { - let index_pricer = IndexPricer::new( - subscription.get_subscribed_spot_pairs(), - DataType::SpotEntry, - ); - - let median_entries = index_pricer.compute(&state.offchain_pool).await?; - - Ok(median_entries) - } -} - -#[derive(Debug, Serialize, Deserialize)] -struct SubscriptionRequest { - msg_type: SubscriptionType, - pairs: Vec, -} - -#[derive(Debug, Serialize, Deserialize)] -struct SubscriptionAck { - msg_type: SubscriptionType, - pairs: Vec, -} - -#[derive(Debug, Default, Serialize, Deserialize)] -struct SubscriptionState { - spot_pairs: HashSet, -} - -impl SubscriptionState { - fn is_empty(&self) -> bool { - self.spot_pairs.is_empty() - } - - fn add_spot_pairs(&mut self, pairs: Vec) { - self.spot_pairs.extend(pairs); - } - - fn remove_spot_pairs(&mut self, pairs: &[String]) { - for pair in pairs { - self.spot_pairs.remove(pair); - } - } - - /// Get the subscribed spot pairs. - fn get_subscribed_spot_pairs(&self) -> Vec { - self.spot_pairs.iter().cloned().collect() - } } diff --git a/pragma-node/src/utils/aws.rs b/pragma-node/src/infra/cloud/aws.rs similarity index 86% rename from pragma-node/src/utils/aws.rs rename to pragma-node/src/infra/cloud/aws.rs index e6eac81e..4f803b38 100644 --- a/pragma-node/src/utils/aws.rs +++ b/pragma-node/src/infra/cloud/aws.rs @@ -5,35 +5,35 @@ const AWS_PRAGMA_PRIVATE_KEY_SECRET: &str = "pragma-secret-key"; const AWS_JSON_STARK_PRIVATE_KEY_FIELD: &str = "STARK_PRIVATE_KEY"; #[derive(Debug)] -pub enum AwsError { +enum AwsError { NoSecretFound, DeserializationError, } -pub struct PragmaSignerBuilder { +pub(super) struct PragmaSignerBuilder { is_production: bool, } impl PragmaSignerBuilder { - pub const fn new() -> Self { + pub(super) const fn new() -> Self { Self { is_production: false, } } #[must_use] - pub const fn production_mode(mut self) -> Self { + pub(super) const fn production_mode(mut self) -> Self { self.is_production = true; self } #[must_use] - pub const fn non_production_mode(mut self) -> Self { + pub(super) const fn non_production_mode(mut self) -> Self { self.is_production = false; self } - pub async fn build(self) -> Option { + pub(super) async fn build(self) -> Option { if self.is_production { build_pragma_signer_from_aws().await } else { @@ -48,7 +48,7 @@ impl Default for PragmaSignerBuilder { } } -pub async fn build_pragma_signer_from_aws() -> Option { +async fn build_pragma_signer_from_aws() -> Option { let aws_client = get_aws_client().await; let secret_json_response = get_aws_secret(&aws_client, AWS_PRAGMA_PRIVATE_KEY_SECRET) .await diff --git a/pragma-node/src/infra/cloud/gcp.rs b/pragma-node/src/infra/cloud/gcp.rs new file mode 100644 index 00000000..fb1c1625 --- /dev/null +++ b/pragma-node/src/infra/cloud/gcp.rs @@ -0,0 +1,256 @@ +use google_secretmanager1::hyper_rustls::HttpsConnector; +use google_secretmanager1::{SecretManager, hyper_rustls, hyper_util, yup_oauth2}; +use hyper_util::client::legacy::connect::HttpConnector; +use hyper_util::rt::TokioExecutor; +use starknet::{core::types::Felt, signers::SigningKey}; +use tracing::{debug, error, info}; + +const GCP_PRAGMA_PRIVATE_KEY_SECRET: &str = "pragma-secret-key"; +const GCP_JSON_STARK_PRIVATE_KEY_FIELD: &str = "STARK_PRIVATE_KEY"; + +type GcpManager = SecretManager>; + +#[derive(Debug)] +enum GcpError { + NoSecretFound, + DeserializationError, + #[allow(unused)] + ConnectionError(String), +} + +pub(super) struct PragmaSignerBuilder { + is_production: bool, +} + +impl PragmaSignerBuilder { + pub(super) const fn new() -> Self { + Self { + is_production: false, + } + } + + #[must_use] + pub(super) const fn production_mode(mut self) -> Self { + self.is_production = true; + self + } + + #[must_use] + pub(super) const fn non_production_mode(mut self) -> Self { + self.is_production = false; + self + } + + pub(super) async fn build(self) -> Option { + if self.is_production { + build_pragma_signer_from_gcp().await + } else { + Some(SigningKey::from_random()) + } + } +} + +impl Default for PragmaSignerBuilder { + fn default() -> Self { + Self::new() + } +} + +#[allow(clippy::cognitive_complexity)] +async fn build_pragma_signer_from_gcp() -> Option { + debug!("Starting to build pragma signer from GCP"); + let gcp_client = match get_gcp_client().await { + Ok(client) => { + debug!("Successfully created GCP client"); + client + } + Err(e) => { + error!("Failed to create GCP client: {:?}", e); + return None; + } + }; + + let secret_json_response = + match get_gcp_secret(&gcp_client, GCP_PRAGMA_PRIVATE_KEY_SECRET).await { + Ok(secret) => { + debug!("Successfully retrieved secret from GCP"); + secret + } + Err(e) => { + error!("Failed to get GCP secret: {:?}", e); + return None; + } + }; + + let pragma_secret_key = match get_pragma_secret_key(secret_json_response) { + Ok(key) => { + debug!("Successfully extracted pragma secret key from JSON"); + key + } + Err(e) => { + error!("Failed to extract pragma secret key: {:?}", e); + return None; + } + }; + + let pragma_secret_key = match Felt::from_hex(&pragma_secret_key) { + Ok(felt) => { + debug!("Successfully converted secret key to Felt"); + felt + } + Err(e) => { + error!("Failed to convert secret key to Felt: {:?}", e); + return None; + } + }; + + info!("Successfully built pragma signer from GCP"); + Some(SigningKey::from_secret_scalar(pragma_secret_key)) +} + +#[allow(clippy::cognitive_complexity)] +async fn get_gcp_client() -> Result { + debug!("Attempting to create GCP client"); + // Check if service account credentials are provided + let auth = if let Ok(service_account_json) = std::env::var("GOOGLE_APPLICATION_CREDENTIALS") { + debug!( + "Found GOOGLE_APPLICATION_CREDENTIALS environment variable: {}", + service_account_json + ); + // Use service account credentials from file + let service_account_key = match yup_oauth2::read_service_account_key(&service_account_json) + .await + { + Ok(key) => { + debug!("Successfully read service account key file"); + key + } + Err(e) => { + error!( + "Failed to read service account file at {}: {}", + service_account_json, e + ); + error!( + "Service account file should contain: type, project_id, private_key_id, private_key, client_email, client_id, auth_uri, token_uri, auth_provider_x509_cert_url, client_x509_cert_url" + ); + return Err(GcpError::ConnectionError(format!( + "Failed to read service account: {e}" + ))); + } + }; + + match yup_oauth2::ServiceAccountAuthenticator::builder(service_account_key) + .build() + .await + { + Ok(authenticator) => { + debug!("Successfully created service account authenticator"); + authenticator + } + Err(e) => { + error!("Failed to create service account authenticator: {e}"); + return Err(GcpError::ConnectionError(format!( + "Failed to create service account authenticator: {e}" + ))); + } + } + } else { + debug!("No GOOGLE_APPLICATION_CREDENTIALS found, using application default credentials"); + // Fall back to application default credentials + yup_oauth2::InstalledFlowAuthenticator::builder( + yup_oauth2::ApplicationSecret::default(), + yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect, + ) + .build() + .await + .map_err(|e| { + error!("Failed to create authenticator: {e}"); + GcpError::ConnectionError(format!("Failed to create authenticator: {e}")) + })? + }; + + // Create a properly configured connector + let https_connector = hyper_rustls::HttpsConnectorBuilder::new() + .with_native_roots() + .map_err(|e| { + error!("Failed to create HTTPS connector: {}", e); + GcpError::ConnectionError(e.to_string()) + })? + .https_or_http() + .enable_http1() + .build(); + + // Create a client with the correct type + let client = + google_secretmanager1::hyper_util::client::legacy::Client::builder(TokioExecutor::new()) + .build(https_connector); + // Create the SecretManager with the client and authenticator + debug!("Successfully created GCP client"); + Ok(SecretManager::new(client, auth)) +} + +async fn get_gcp_secret(client: &GcpManager, secret_name: &str) -> Result { + debug!("Attempting to retrieve secret: {}", secret_name); + let project_id = std::env::var("GCP_PROJECT_ID").map_err(|_| { + error!("GCP_PROJECT_ID environment variable not set"); + GcpError::NoSecretFound + })?; + let secret_path = format!("projects/{project_id}/secrets/{secret_name}/versions/latest"); + debug!("Secret path: {}", secret_path); + + let result = client + .projects() + .secrets_versions_access(&secret_path) + .doit() + .await + .map_err(|e| { + error!("Failed to access secret: {:?}", e); + GcpError::NoSecretFound + })?; + + // Get the payload from the result + let payload = result.1.payload.ok_or_else(|| { + error!("No payload found in secret response"); + GcpError::NoSecretFound + })?; + + // Get the data from the payload + let data = payload.data.ok_or_else(|| { + error!("No data found in secret payload"); + GcpError::NoSecretFound + })?; + + debug!("Successfully retrieved secret data"); + // Convert the data to a string + String::from_utf8(data).map_err(|e| { + error!("Failed to convert secret data to UTF-8: {}", e); + GcpError::DeserializationError + }) +} + +fn get_pragma_secret_key(secret_json_response: String) -> Result { + debug!("Attempting to parse secret JSON response"); + let secret_json: serde_json::Value = + serde_json::from_str(&secret_json_response).map_err(|e| { + error!("Failed to parse secret JSON: {}", e); + GcpError::DeserializationError + })?; + + let pragma_secret_key = secret_json + .get(GCP_JSON_STARK_PRIVATE_KEY_FIELD) + .ok_or_else(|| { + error!( + "Field {} not found in secret JSON", + GCP_JSON_STARK_PRIVATE_KEY_FIELD + ); + GcpError::DeserializationError + })? + .as_str() + .ok_or_else(|| { + error!("Secret key is not a string"); + GcpError::DeserializationError + })?; + + debug!("Successfully extracted secret key from JSON"); + Ok(pragma_secret_key.to_string()) +} diff --git a/pragma-node/src/infra/cloud/mod.rs b/pragma-node/src/infra/cloud/mod.rs new file mode 100644 index 00000000..2a1e0a8c --- /dev/null +++ b/pragma-node/src/infra/cloud/mod.rs @@ -0,0 +1,43 @@ +mod aws; +mod gcp; + +use starknet::signers::SigningKey; + +use crate::config::CloudEnv; + +use aws::PragmaSignerBuilder as AwsPragmaSignerBuilder; +use gcp::PragmaSignerBuilder as GcpPragmaSignerBuilder; + +pub async fn build_signer(cloud_env: CloudEnv, is_production: bool) -> Option { + if is_production { + match cloud_env { + CloudEnv::Aws => { + AwsPragmaSignerBuilder::new() + .production_mode() + .build() + .await + } + CloudEnv::Gcp => { + GcpPragmaSignerBuilder::new() + .production_mode() + .build() + .await + } + } + } else { + match cloud_env { + CloudEnv::Aws => { + AwsPragmaSignerBuilder::new() + .non_production_mode() + .build() + .await + } + CloudEnv::Gcp => { + GcpPragmaSignerBuilder::new() + .non_production_mode() + .build() + .await + } + } + } +} diff --git a/pragma-node/src/infra/kafka/mod.rs b/pragma-node/src/infra/kafka/mod.rs index 61c084ac..ac45b254 100644 --- a/pragma-node/src/infra/kafka/mod.rs +++ b/pragma-node/src/infra/kafka/mod.rs @@ -1,18 +1,17 @@ -use lazy_static::lazy_static; use rdkafka::config::ClientConfig; use rdkafka::producer::future_producer::OwnedDeliveryResult; use rdkafka::producer::{FutureProducer, FutureRecord}; +use std::sync::LazyLock; -lazy_static! { - static ref KAFKA_PRODUCER: FutureProducer = { - let brokers = - std::env::var("KAFKA_BROKERS").expect("can't load kafka brokers list from env"); - ClientConfig::new() - .set("bootstrap.servers", &brokers) - .create() - .expect("can't create kafka producer") - }; -} +pub static KAFKA_PRODUCER: LazyLock = LazyLock::new(|| { + let brokers = std::env::var("KAFKA_BROKERS").expect("can't load kafka brokers"); + let producer: FutureProducer = ClientConfig::new() + .set("bootstrap.servers", &brokers) + .set("message.timeout.ms", "5000") + .create() + .expect("Producer creation error"); + producer +}); pub async fn send_message(topic: &str, message: &[u8], key: &str) -> OwnedDeliveryResult { let delivery_status = KAFKA_PRODUCER.send( diff --git a/pragma-node/src/infra/mod.rs b/pragma-node/src/infra/mod.rs index 4080565e..971c1c25 100644 --- a/pragma-node/src/infra/mod.rs +++ b/pragma-node/src/infra/mod.rs @@ -1,3 +1,4 @@ +pub mod cloud; pub mod kafka; -pub mod redis; pub mod repositories; +pub mod rpc; diff --git a/pragma-node/src/infra/redis/mod.rs b/pragma-node/src/infra/redis/mod.rs deleted file mode 100644 index 5f2321e8..00000000 --- a/pragma-node/src/infra/redis/mod.rs +++ /dev/null @@ -1,180 +0,0 @@ -use std::sync::Arc; - -use moka::future::Cache; -use redis::{AsyncCommands, JsonAsyncCommands}; -use serde::{Deserialize, Serialize}; -use starknet::core::types::Felt; - -use pragma_common::types::{ - block_id::{BlockId, BlockTag}, - merkle_tree::{MerkleTree, MerkleTreeError}, - options::OptionData, - Network, -}; -use pragma_entities::error::RedisError; - -pub async fn get_option_data( - redis_client: Arc, - network: Network, - block_id: BlockId, - instrument_name: String, -) -> Result { - let block_number = get_block_number_from_id(&redis_client, &network, &block_id).await?; - - let mut conn = redis_client - .get_multiplexed_async_connection() - .await - .map_err(|_| RedisError::Connection)?; - - let instrument_key = format!("{network}/{block_number}/options/{instrument_name}"); - - let result: String = conn - .json_get(instrument_key, "$") - .await - .map_err(|_| RedisError::OptionNotFound(block_number, instrument_name.clone()))?; - - // Redis [json_get] method returns a list of objects - let mut option_response: Vec = serde_json::from_str(&result).map_err(|e| { - tracing::error!("Error while deserialzing: {e}"); - RedisError::InternalServerError - })?; - - if option_response.len() != 1 { - return Err(RedisError::OptionNotFound(block_number, instrument_name)); - } - - // Safe to unwrap, see condition above - Ok(option_response.pop().unwrap()) -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct RawMerkleTree { - leaves: Vec, - root_hash: String, - levels: Vec>, - hash_method: String, -} - -impl TryFrom for MerkleTree { - type Error = MerkleTreeError; - - fn try_from(serialized_tree: RawMerkleTree) -> Result { - let leaves: Vec = serialized_tree - .leaves - .into_iter() - .map(|leaf| Felt::from_hex(&leaf)) - .collect::, _>>() - .map_err(|e| MerkleTreeError::BuildFailed(e.to_string()))?; - - let merkle_tree = Self::new(leaves)?; - - let expected_hash = Felt::from_hex(&serialized_tree.root_hash) - .map_err(|e| MerkleTreeError::BuildFailed(e.to_string()))?; - - if merkle_tree.root_hash != expected_hash { - return Err(MerkleTreeError::BuildFailed(format!( - "Invalid built hash, found {}, expected {}.", - merkle_tree.root_hash, expected_hash - ))); - } - - Ok(merkle_tree) - } -} - -pub async fn get_merkle_tree( - redis_client: Arc, - network: Network, - block_id: BlockId, - merkle_tree_cache: Cache, -) -> Result { - let block_number = get_block_number_from_id(&redis_client, &network, &block_id).await?; - - // Try to retrieve the latest available cached value, and return it if it exists - let maybe_cached_value = merkle_tree_cache.get(&block_number).await; - if let Some(cached_value) = maybe_cached_value { - tracing::debug!("Found a cached value for merkle tree at block {block_number} - using it."); - return Ok(cached_value); - } - tracing::debug!( - "No cache found for merkle tree at block {block_number}, fetching it from Redis." - ); - - let mut conn = redis_client - .get_multiplexed_async_connection() - .await - .map_err(|_| RedisError::Connection)?; - - let instrument_key = format!("{network}/{block_number}/merkle_tree"); - - let result: String = conn - .json_get(instrument_key, "$") - .await - .map_err(|_| RedisError::MerkleTreeNotFound(block_number))?; - - // Redis [json_get] method returns a list of objects - let mut tree_response: Vec = serde_json::from_str(&result).map_err(|e| { - tracing::error!("Error while deserialzing: {e}"); - RedisError::TreeDeserialization - })?; - - if tree_response.len() != 1 { - return Err(RedisError::MerkleTreeNotFound(block_number)); - } - - // Safe to unwrap, see condition above - let merkle_tree = MerkleTree::try_from(tree_response.pop().unwrap()) - .map_err(|_| RedisError::TreeDeserialization)?; - - // Update the cache with the merkle tree for the current block - merkle_tree_cache - .insert(block_number, merkle_tree.clone()) - .await; - - Ok(merkle_tree) -} - -/// Converts a `BlockId` to a block number. -async fn get_block_number_from_id( - redis_client: &Arc, - network: &Network, - block_id: &BlockId, -) -> Result { - let block_number = match block_id { - BlockId::Number(nbr) => *nbr, - BlockId::Tag(tag) => get_block_number_for_tag(redis_client, network, tag).await?, - }; - Ok(block_number) -} - -/// Retrieve the block number corresponding to the block tag. -/// For us, the pending block is the latest block available in Redis, -/// and the latest is the one before. -async fn get_block_number_for_tag( - redis_client: &Arc, - network: &Network, - tag: &BlockTag, -) -> Result { - let mut conn = redis_client - .get_multiplexed_async_connection() - .await - .map_err(|_| RedisError::Connection)?; - - let key = format!("{network}/latest_published_block"); - let latest_published_block: Option = - conn.get(key).await.map_err(|_| RedisError::Connection)?; - - latest_published_block.map_or_else( - || Err(RedisError::NoBlocks(network.to_string())), - |latest| match tag { - BlockTag::Pending => Ok(latest), - BlockTag::Latest => { - if latest > 0 { - Ok(latest - 1) - } else { - Err(RedisError::NoBlocks(network.to_string())) - } - } - }, - ) -} diff --git a/pragma-node/src/infra/repositories/entry_repository.rs b/pragma-node/src/infra/repositories/entry_repository.rs index ae8a9524..84b82adb 100644 --- a/pragma-node/src/infra/repositories/entry_repository.rs +++ b/pragma-node/src/infra/repositories/entry_repository.rs @@ -1,49 +1,75 @@ -use std::collections::{HashMap, HashSet}; - -use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; +use bigdecimal::{BigDecimal, FromPrimitive}; use chrono::{DateTime, NaiveDateTime}; use diesel::prelude::QueryableByName; -use diesel::sql_types::{Double, Jsonb, VarChar}; -use diesel::{ExpressionMethods, QueryDsl, Queryable, RunQueryDsl}; -use pragma_common::errors::ConversionError; -use pragma_common::types::pair::Pair; +use diesel::sql_types::{Double, Jsonb, Record, VarChar}; +use diesel::{Queryable, RunQueryDsl}; +use pragma_common::starknet::ConversionError; use serde::{Deserialize, Serialize}; use utoipa::ToSchema; +use pragma_common::{AggregationMode, Interval, Pair}; +use pragma_entities::models::entries::timestamp::TimestampError; +use pragma_entities::{Entry, error::InfraError}; + +use crate::constants::EIGHTEEN_DECIMALS; +use crate::constants::currencies::ABSTRACT_CURRENCIES; use crate::constants::others::ROUTING_FRESHNESS_THRESHOLD; -use crate::constants::starkex_ws::{ - INITAL_INTERVAL_IN_MS, INTERVAL_INCREMENT_IN_MS, MAX_INTERVAL_WITHOUT_ENTRIES, - MINIMUM_NUMBER_OF_PUBLISHERS, -}; -use crate::handlers::get_entry::RoutingParams; -use crate::handlers::subscribe_to_entry::{AssetOraclePrice, SignedPublisherPrice}; -use crate::utils::sql::{ - get_expiration_timestamp_filter, get_interval_specifier, get_table_suffix, -}; -use crate::utils::{convert_via_quote, normalize_to_decimals}; -use pragma_common::signing::starkex::StarkexPrice; -use pragma_common::types::{AggregationMode, DataType, Interval}; -use pragma_entities::{ - error::{adapt_infra_error, InfraError}, - schema::currencies, - Currency, Entry, -}; +use crate::handlers::get_entry::EntryParams; +use crate::utils::convert_via_quote; +use crate::utils::sql::{get_interval_specifier, get_table_suffix}; + +use super::utils::HexFormat; #[derive(Debug, Serialize, Queryable)] pub struct MedianEntry { pub time: NaiveDateTime, pub median_price: BigDecimal, pub num_sources: i64, + pub components: Option>, +} + +#[derive(Serialize, QueryableByName, Clone, Debug)] +pub struct MedianEntryRawBase { + #[diesel(sql_type = diesel::sql_types::Timestamptz)] + pub time: NaiveDateTime, + #[diesel(sql_type = diesel::sql_types::Numeric)] + pub median_price: BigDecimal, + #[diesel(sql_type = diesel::sql_types::BigInt)] + pub num_sources: i64, +} + +// Extended struct with components (non-optional) +#[derive(Serialize, QueryableByName, Clone, Debug)] +pub struct MedianEntryRawWithComponents { + #[diesel(sql_type = diesel::sql_types::Timestamptz)] + pub time: NaiveDateTime, + + #[diesel(sql_type = diesel::sql_types::Numeric)] + pub median_price: BigDecimal, + + #[diesel(sql_type = diesel::sql_types::BigInt)] + pub num_sources: i64, + + #[diesel(sql_type = diesel::sql_types::Array>)] + pub components: Vec<(String, BigDecimal, NaiveDateTime)>, } +// Extended struct with components (non-optional) #[derive(Serialize, QueryableByName, Clone, Debug)] -pub struct MedianEntryRaw { +pub struct ExtendedMedianEntryRaw { + #[diesel(sql_type = diesel::sql_types::Text)] + pub pair_id: String, #[diesel(sql_type = diesel::sql_types::Timestamptz)] pub time: NaiveDateTime, + #[diesel(sql_type = diesel::sql_types::Numeric)] pub median_price: BigDecimal, + #[diesel(sql_type = diesel::sql_types::BigInt)] pub num_sources: i64, + + #[diesel(sql_type = diesel::sql_types::Array>)] + pub components: Vec<(String, BigDecimal, NaiveDateTime)>, } #[derive(Serialize, QueryableByName, Clone, Debug)] @@ -56,296 +82,399 @@ pub async fn routing( pool: &deadpool_diesel::postgres::Pool, is_routing: bool, pair: &Pair, - routing_params: &RoutingParams, -) -> Result<(MedianEntry, u32), InfraError> { + entry_params: &EntryParams, +) -> Result { // If we have entries for the pair_id and the latest entry is fresh enough, // Or if we are not routing, we can return the price directly. if !is_routing || (pair_id_exist(pool, pair).await? - && get_last_updated_timestamp(pool, pair.to_pair_id(), routing_params.timestamp) + && get_last_updated_timestamp(pool, pair.to_pair_id(), entry_params.timestamp) .await? - .unwrap_or(NaiveDateTime::default()) + .unwrap_or(chrono::Utc::now().naive_utc()) .and_utc() .timestamp() - >= routing_params.timestamp - ROUTING_FRESHNESS_THRESHOLD) + >= entry_params.timestamp - ROUTING_FRESHNESS_THRESHOLD) { - return get_price_and_decimals(pool, pair, routing_params).await; + return get_price(pool, pair, entry_params).await; } - (find_alternative_pair_price(pool, pair, routing_params).await) - .map_or_else(|_| Err(InfraError::NotFound), Ok) + (find_alternative_pair_price(pool, pair, entry_params).await) + .map_or_else(|_| Err(InfraError::RoutingError(pair.to_pair_id())), Ok) } pub fn calculate_rebased_price( - base_result: (MedianEntry, u32), - quote_result: (MedianEntry, u32), -) -> Result<(MedianEntry, u32), InfraError> { - let (base_entry, base_decimals) = base_result; - let (quote_entry, quote_decimals) = quote_result; - + base_entry: MedianEntry, + quote_entry: MedianEntry, +) -> Result { if quote_entry.median_price == BigDecimal::from(0) { return Err(InfraError::InternalServerError); } - let (rebase_price, decimals) = if base_decimals < quote_decimals { - let normalized_base_price = - normalize_to_decimals(base_entry.median_price, base_decimals, quote_decimals); - ( - convert_via_quote( - normalized_base_price, - quote_entry.median_price, - quote_decimals, - )?, - quote_decimals, - ) - } else { - let normalized_quote_price = - normalize_to_decimals(quote_entry.median_price, quote_decimals, base_decimals); - ( - convert_via_quote( - base_entry.median_price, - normalized_quote_price, - base_decimals, - )?, - base_decimals, - ) - }; + let rebase_price = convert_via_quote( + base_entry.median_price, + quote_entry.median_price, + EIGHTEEN_DECIMALS, + )?; + let max_timestamp = std::cmp::max( base_entry.time.and_utc().timestamp(), quote_entry.time.and_utc().timestamp(), ); let num_sources = std::cmp::max(base_entry.num_sources, quote_entry.num_sources); let new_timestamp = DateTime::from_timestamp(max_timestamp, 0) - .ok_or(InfraError::InvalidTimestamp(format!( - "Cannot convert to DateTime: {max_timestamp}" - )))? + .ok_or(InfraError::InvalidTimestamp( + TimestampError::ToDatetimeErrorI64(max_timestamp), + ))? .naive_utc(); let median_entry = MedianEntry { time: new_timestamp, median_price: rebase_price, num_sources, + components: None, }; - Ok((median_entry, decimals)) + Ok(median_entry) } async fn find_alternative_pair_price( pool: &deadpool_diesel::postgres::Pool, pair: &Pair, - routing_params: &RoutingParams, -) -> Result<(MedianEntry, u32), InfraError> { - let conn = pool.get().await.map_err(adapt_infra_error)?; - - let alternative_currencies = conn - .interact(Currency::get_abstract_all) - .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; - - for alt_currency in alternative_currencies { - let base_alt_pair = Pair::from((pair.base.clone(), alt_currency.clone())); - let alt_quote_pair = Pair::from((pair.quote.clone(), alt_currency.clone())); + entry_params: &EntryParams, +) -> Result { + for alt_currency in ABSTRACT_CURRENCIES { + let base_alt_pair = Pair::from((pair.base.clone(), alt_currency.to_string())); + let alt_quote_pair = Pair::from((pair.quote.clone(), alt_currency.to_string())); if pair_id_exist(pool, &base_alt_pair.clone()).await? && pair_id_exist(pool, &alt_quote_pair.clone()).await? { - let base_alt_result = - get_price_and_decimals(pool, &base_alt_pair, routing_params).await?; - let alt_quote_result = - get_price_and_decimals(pool, &alt_quote_pair, routing_params).await?; + let base_alt_result = get_price(pool, &base_alt_pair, entry_params).await?; + let alt_quote_result = get_price(pool, &alt_quote_pair, entry_params).await?; return calculate_rebased_price(base_alt_result, alt_quote_result); } } - Err(InfraError::NotFound) + Err(InfraError::RoutingError(pair.to_pair_id())) } async fn pair_id_exist( pool: &deadpool_diesel::postgres::Pool, pair: &Pair, ) -> Result { - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let pair_str = pair.to_string(); let res = conn .interact(move |conn| Entry::exists(conn, pair_str)) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; Ok(res) } -async fn get_price_and_decimals( +async fn get_price( pool: &deadpool_diesel::postgres::Pool, pair: &Pair, - routing_params: &RoutingParams, -) -> Result<(MedianEntry, u32), InfraError> { - let entry = match routing_params.aggregation_mode { - AggregationMode::Median => { - get_median_price(pool, pair.to_pair_id(), routing_params).await? - } - AggregationMode::Twap => get_twap_price(pool, pair.to_pair_id(), routing_params).await?, - AggregationMode::Mean => Err(InfraError::InternalServerError)?, + entry_params: &EntryParams, +) -> Result { + let entry = match entry_params.aggregation_mode { + AggregationMode::Median => get_median_price(pool, pair.to_pair_id(), entry_params).await?, + AggregationMode::Twap => get_twap_price(pool, pair.to_pair_id(), entry_params).await?, }; - let decimals = get_decimals(pool, pair).await?; - - Ok((entry, decimals)) + Ok(entry) } -pub async fn get_all_currencies_decimals( +pub async fn get_twap_price( pool: &deadpool_diesel::postgres::Pool, -) -> Result, InfraError> { - let conn = pool.get().await.map_err(adapt_infra_error)?; - let result_vec = conn - .interact(Currency::get_decimals_all) - .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; - - let mut currencies_decimals_map = HashMap::new(); - for (name, decimals) in result_vec { - currencies_decimals_map.insert(name, decimals); + pair_id: String, + entry_params: &EntryParams, +) -> Result { + if entry_params.with_components { + get_twap_price_with_components(pool, pair_id, entry_params).await + } else { + get_twap_price_without_components(pool, pair_id, entry_params).await } - - Ok(currencies_decimals_map) } -pub async fn get_twap_price( +pub async fn get_twap_price_without_components( pool: &deadpool_diesel::postgres::Pool, pair_id: String, - routing_params: &RoutingParams, + entry_params: &EntryParams, ) -> Result { - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let sql_request: String = format!( - r#" + r" -- query the materialized realtime view SELECT bucket AS time, - price_twap AS median_price, + twap_price AS median_price, num_sources FROM - twap_{}_agg{} + twap_{}_{} WHERE pair_id = $1 AND bucket <= $2 - {} ORDER BY time DESC LIMIT 1; - "#, - get_interval_specifier(routing_params.interval, true)?, - get_table_suffix(routing_params.data_type)?, - get_expiration_timestamp_filter(routing_params.data_type, &routing_params.expiry)?, + ", + get_interval_specifier(entry_params.interval, true)?, + get_table_suffix(entry_params.data_type)?, ); - let date_time = DateTime::from_timestamp(routing_params.timestamp, 0).ok_or( - InfraError::InvalidTimestamp(format!( - "Cannot convert to DateTime: {}", - routing_params.timestamp - )), + let date_time = DateTime::from_timestamp(entry_params.timestamp, 0).ok_or( + InfraError::InvalidTimestamp(TimestampError::ToDatetimeErrorI64(entry_params.timestamp)), )?; + let p = pair_id.clone(); let raw_entry = conn .interact(move |conn| { diesel::sql_query(&sql_request) - .bind::(pair_id) + .bind::(p) .bind::(date_time) - .load::(conn) + .load::(conn) }) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; - let raw_entry = raw_entry.first().ok_or(InfraError::NotFound)?; + let raw_entry = raw_entry + .first() + .ok_or(InfraError::EntryNotFound(pair_id))?; let entry: MedianEntry = MedianEntry { time: raw_entry.time, median_price: raw_entry.median_price.clone(), num_sources: raw_entry.num_sources, + components: None, }; Ok(entry) } +// Function to get TWAP price with components +pub async fn get_twap_price_with_components( + pool: &deadpool_diesel::postgres::Pool, + pair_id: String, + entry_params: &EntryParams, +) -> Result { + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; + + let sql_request: String = format!( + r" + SELECT + bucket AS time, + price_twap AS median_price, + num_sources, + components + FROM + twap_{}_{} + WHERE + pair_id = $1 + AND + bucket <= $2 + ORDER BY + time DESC + LIMIT 1; + ", + get_interval_specifier(entry_params.interval, true)?, + get_table_suffix(entry_params.data_type)?, + ); + + let date_time = DateTime::from_timestamp(entry_params.timestamp, 0).ok_or( + InfraError::InvalidTimestamp(TimestampError::ToDatetimeErrorI64(entry_params.timestamp)), + )?; + + let p = pair_id.clone(); + let raw_entry = conn + .interact(move |conn| { + diesel::sql_query(&sql_request) + .bind::(p) + .bind::(date_time) + .load::(conn) + }) + .await + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; + + let raw_entry = raw_entry + .first() + .ok_or(InfraError::EntryNotFound(pair_id))?; + + // Convert components if they exist + let components = (!raw_entry.components.is_empty()).then(|| { + raw_entry + .components + .iter() + .map(ComponentConverter::to_component) + .collect() + }); + + Ok(MedianEntry { + time: raw_entry.time, + median_price: raw_entry.median_price.clone(), + num_sources: raw_entry.num_sources, + components, + }) +} + +// Wrapper function for backward compatibility pub async fn get_median_price( pool: &deadpool_diesel::postgres::Pool, pair_id: String, - routing_params: &RoutingParams, + entry_params: &EntryParams, ) -> Result { - let conn = pool.get().await.map_err(adapt_infra_error)?; + if entry_params.with_components { + get_median_price_with_components(pool, pair_id, entry_params).await + } else { + get_median_price_without_components(pool, pair_id, entry_params).await + } +} + +// Function to get median price without components +pub async fn get_median_price_without_components( + pool: &deadpool_diesel::postgres::Pool, + pair_id: String, + entry_params: &EntryParams, +) -> Result { + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let sql_request: String = format!( - r#" + r" -- query the materialized realtime view SELECT bucket AS time, median_price, num_sources FROM - price_{}_agg{} + median_{}_{} WHERE pair_id = $1 AND bucket <= $2 - {} ORDER BY time DESC LIMIT 1; - "#, - get_interval_specifier(routing_params.interval, false)?, - get_table_suffix(routing_params.data_type)?, - get_expiration_timestamp_filter(routing_params.data_type, &routing_params.expiry)?, + ", + get_interval_specifier(entry_params.interval, false)?, + get_table_suffix(entry_params.data_type)?, ); - let date_time = DateTime::from_timestamp(routing_params.timestamp, 0).ok_or( - InfraError::InvalidTimestamp(format!( - "Cannot convert to DateTime: {}", - routing_params.timestamp - )), + let date_time = DateTime::from_timestamp(entry_params.timestamp, 0).ok_or( + InfraError::InvalidTimestamp(TimestampError::ToDatetimeErrorI64(entry_params.timestamp)), )?; + let p = pair_id.clone(); let raw_entry = conn .interact(move |conn| { diesel::sql_query(&sql_request) - .bind::(pair_id) + .bind::(p) .bind::(date_time) - .load::(conn) + .load::(conn) }) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; - let raw_entry = raw_entry.first().ok_or(InfraError::NotFound)?; + let raw_entry = raw_entry + .first() + .ok_or(InfraError::EntryNotFound(pair_id))?; let entry: MedianEntry = MedianEntry { time: raw_entry.time, median_price: raw_entry.median_price.clone(), num_sources: raw_entry.num_sources, + components: None, }; Ok(entry) } -pub async fn get_median_entries_1_min_between( +// Function to get median price with components +pub async fn get_median_price_with_components( + pool: &deadpool_diesel::postgres::Pool, + pair_id: String, + entry_params: &EntryParams, +) -> Result { + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; + let sql_request: String = format!( + r" + SELECT + bucket AS time, + median_price, + num_sources, + components + FROM + median_{}_{} + WHERE + pair_id = $1 + AND + bucket <= $2 + ORDER BY + time DESC + LIMIT 1; + ", + get_interval_specifier(entry_params.interval, false)?, + get_table_suffix(entry_params.data_type)?, + ); + + let date_time = DateTime::from_timestamp(entry_params.timestamp, 0).ok_or( + InfraError::InvalidTimestamp(TimestampError::ToDatetimeErrorI64(entry_params.timestamp)), + )?; + + let p = pair_id.clone(); + let raw_entry = conn + .interact(move |conn| { + diesel::sql_query(&sql_request) + .bind::(p) + .bind::(date_time) + .load::(conn) + }) + .await + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; + + let raw_entry = raw_entry + .first() + .ok_or(InfraError::EntryNotFound(pair_id))?; + + // Convert components if they exist + let components = (!raw_entry.components.is_empty()).then(|| { + raw_entry + .components + .iter() + .map(ComponentConverter::to_component) + .collect() + }); + + Ok(MedianEntry { + time: raw_entry.time, + median_price: raw_entry.median_price.clone(), + num_sources: raw_entry.num_sources, + components, + }) +} + +pub async fn get_spot_median_entries_1_min_between( pool: &deadpool_diesel::postgres::Pool, pair_id: String, start_timestamp: u64, end_timestamp: u64, ) -> Result, InfraError> { - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; #[allow(clippy::cast_possible_wrap)] let start_datetime = DateTime::from_timestamp(start_timestamp as i64, 0).ok_or( - InfraError::InvalidTimestamp(format!("Cannot convert to DateTime: {start_timestamp}")), + InfraError::InvalidTimestamp(TimestampError::ToDatetimeErrorU64(start_timestamp)), )?; #[allow(clippy::cast_possible_wrap)] let end_datetime = DateTime::from_timestamp(end_timestamp as i64, 0).ok_or( - InfraError::InvalidTimestamp(format!("Cannot convert to DateTime: {start_timestamp}")), + InfraError::InvalidTimestamp(TimestampError::ToDatetimeErrorU64(start_timestamp)), )?; let raw_sql = r" @@ -353,7 +482,7 @@ pub async fn get_median_entries_1_min_between( bucket AS time, median_price, num_sources - FROM price_1_min_agg + FROM median_1_min_spot WHERE pair_id = $1 AND @@ -368,11 +497,11 @@ pub async fn get_median_entries_1_min_between( .bind::(pair_id) .bind::(start_datetime) .bind::(end_datetime) - .load::(conn) + .load::(conn) }) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; let entries: Vec = raw_entries .into_iter() @@ -380,6 +509,7 @@ pub async fn get_median_entries_1_min_between( time: raw_entry.time, median_price: raw_entry.median_price, num_sources: raw_entry.num_sources, + components: None, }) .collect(); @@ -389,40 +519,39 @@ pub async fn get_median_entries_1_min_between( pub async fn get_median_prices_between( pool: &deadpool_diesel::postgres::Pool, pair_id: String, - routing_params: RoutingParams, + entry_params: EntryParams, start_timestamp: u64, end_timestamp: u64, ) -> Result, InfraError> { - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; #[allow(clippy::cast_possible_wrap)] let start_datetime = DateTime::from_timestamp(start_timestamp as i64, 0).ok_or( - InfraError::InvalidTimestamp(format!("Cannot convert to DateTime: {start_timestamp}")), + InfraError::InvalidTimestamp(TimestampError::ToDatetimeErrorU64(start_timestamp)), )?; #[allow(clippy::cast_possible_wrap)] let end_datetime = DateTime::from_timestamp(end_timestamp as i64, 0).ok_or( - InfraError::InvalidTimestamp(format!("Cannot convert to DateTime: {end_timestamp}")), + InfraError::InvalidTimestamp(TimestampError::ToDatetimeErrorU64(end_timestamp)), )?; let sql_request: String = format!( - r#" + r" -- query the materialized realtime view SELECT bucket AS time, median_price, - num_sources + num_sources, + components FROM - price_{}_agg{} + median_{}_{} WHERE pair_id = $1 AND bucket BETWEEN $2 AND $3 - {} ORDER BY time DESC; - "#, - get_interval_specifier(routing_params.interval, false)?, - get_table_suffix(routing_params.data_type)?, - get_expiration_timestamp_filter(routing_params.data_type, &routing_params.expiry)?, + ", + get_interval_specifier(entry_params.interval, false)?, + get_table_suffix(entry_params.data_type)?, ); let raw_entries = conn @@ -431,18 +560,30 @@ pub async fn get_median_prices_between( .bind::(pair_id) .bind::(start_datetime) .bind::(end_datetime) - .load::(conn) + .load::(conn) }) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; let entries: Vec = raw_entries .into_iter() - .map(|raw_entry| MedianEntry { - time: raw_entry.time, - median_price: raw_entry.median_price, - num_sources: raw_entry.num_sources, + .map(|raw_entry| { + // Process components only if they exist + let components = (!raw_entry.components.is_empty()).then(|| { + raw_entry + .components + .iter() + .map(ComponentConverter::to_component) + .collect() + }); + + MedianEntry { + time: raw_entry.time, + median_price: raw_entry.median_price, + num_sources: raw_entry.num_sources, + components, + } }) .collect(); @@ -452,40 +593,39 @@ pub async fn get_median_prices_between( pub async fn get_twap_prices_between( pool: &deadpool_diesel::postgres::Pool, pair_id: String, - routing_params: RoutingParams, + entry_params: EntryParams, start_timestamp: u64, end_timestamp: u64, ) -> Result, InfraError> { - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; #[allow(clippy::cast_possible_wrap)] let start_datetime = DateTime::from_timestamp(start_timestamp as i64, 0).ok_or( - InfraError::InvalidTimestamp(format!("Cannot convert to DateTime: {start_timestamp}")), + InfraError::InvalidTimestamp(TimestampError::ToDatetimeErrorU64(start_timestamp)), )?; #[allow(clippy::cast_possible_wrap)] let end_datetime = DateTime::from_timestamp(end_timestamp as i64, 0).ok_or( - InfraError::InvalidTimestamp(format!("Cannot convert to DateTime: {end_timestamp}")), + InfraError::InvalidTimestamp(TimestampError::ToDatetimeErrorU64(end_timestamp)), )?; let sql_request: String = format!( - r#" + r" -- query the materialized realtime view SELECT bucket AS time, price_twap AS median_price, - num_sources + num_sources, + components FROM - twap_{}_agg{} + twap_{}_{} WHERE pair_id = $1 AND bucket BETWEEN $2 AND $3 - {} ORDER BY time DESC; - "#, - get_interval_specifier(routing_params.interval, true)?, - get_table_suffix(routing_params.data_type)?, - get_expiration_timestamp_filter(routing_params.data_type, &routing_params.expiry)?, + ", + get_interval_specifier(entry_params.interval, true)?, + get_table_suffix(entry_params.data_type)?, ); let raw_entries = conn @@ -494,61 +634,91 @@ pub async fn get_twap_prices_between( .bind::(pair_id) .bind::(start_datetime) .bind::(end_datetime) - .load::(conn) + .load::(conn) }) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; let entries: Vec = raw_entries .into_iter() - .map(|raw_entry| MedianEntry { - time: raw_entry.time, - median_price: raw_entry.median_price, - num_sources: raw_entry.num_sources, + .map(|raw_entry| { + let components = (!raw_entry.components.is_empty()).then(|| { + raw_entry + .components + .iter() + .map(ComponentConverter::to_component) + .collect() + }); + MedianEntry { + time: raw_entry.time, + median_price: raw_entry.median_price, + num_sources: raw_entry.num_sources, + components, + } }) .collect(); Ok(entries) } -pub async fn get_decimals( - pool: &deadpool_diesel::postgres::Pool, - pair: &Pair, -) -> Result { - let conn = pool.get().await.map_err(adapt_infra_error)?; +// struct to hold the individual price data +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct Component { + pub source: String, + pub price: String, + pub timestamp: NaiveDateTime, +} - let (quote, base) = pair.as_tuple(); +impl From for crate::handlers::get_entry::EntryComponent { + fn from(individual: Component) -> Self { + Self { + source: individual.source, + price: individual.price, + timestamp: individual.timestamp.and_utc().timestamp_millis() as u64, + } + } +} - // Fetch currency in DB - let quote_decimals: BigDecimal = conn - .interact(move |conn| { - currencies::table - .filter(currencies::name.eq(quote)) - .select(currencies::decimals) - .first::(conn) - }) - .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; - let base_decimals: BigDecimal = conn - .interact(move |conn| { - currencies::table - .filter(currencies::name.eq(base)) - .select(currencies::decimals) - .first::(conn) +// Reverse conversion +impl TryFrom for Component { + type Error = InfraError; + + fn try_from( + component: crate::handlers::get_entry::EntryComponent, + ) -> Result { + let price = component + .price + .parse::() + .map_err(|_| InfraError::InternalServerError)?; + #[allow(clippy::cast_possible_wrap)] + let timestamp = DateTime::from_timestamp_millis(component.timestamp as i64) + .ok_or(InfraError::InvalidTimestamp( + #[allow(clippy::cast_possible_wrap)] + TimestampError::ToDatetimeErrorI64(component.timestamp as i64), + ))? + .naive_utc(); + + Ok(Self { + source: component.source, + price: price.to_hex_string(), + timestamp, }) - .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + } +} - // Take the minimum of the two - let decimals = std::cmp::min( - quote_decimals.to_u32().unwrap(), - base_decimals.to_u32().unwrap(), - ); +trait ComponentConverter { + fn to_component(&self) -> Component; +} - Ok(decimals) +impl ComponentConverter for (String, BigDecimal, NaiveDateTime) { + fn to_component(&self) -> Component { + Component { + source: self.0.clone(), + price: self.1.to_hex_string(), + timestamp: self.2, + } + } } pub async fn get_last_updated_timestamp( @@ -556,11 +726,11 @@ pub async fn get_last_updated_timestamp( pair_id: String, max_timestamp: i64, ) -> Result, InfraError> { - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; conn.interact(move |conn| Entry::get_last_updated_timestamp(conn, pair_id, max_timestamp)) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error) + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError) } #[derive(Debug, Clone, Serialize, Deserialize, Queryable, ToSchema)] @@ -608,16 +778,16 @@ impl FromIterator for Vec { } } -pub async fn get_ohlc( +pub async fn get_spot_ohlc( pool: &deadpool_diesel::postgres::Pool, pair_id: String, interval: Interval, time: i64, ) -> Result, InfraError> { - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let raw_sql = format!( - r#" + r" -- query the materialized realtime view SELECT ohlc_bucket AS time, @@ -626,7 +796,7 @@ pub async fn get_ohlc( low, close FROM - new_{}_candle + candle_{}_spot WHERE pair_id = $1 AND @@ -634,12 +804,12 @@ pub async fn get_ohlc( ORDER BY time DESC LIMIT 10000; - "#, + ", get_interval_specifier(interval, false)? ); let date_time = DateTime::from_timestamp(time, 0).ok_or(InfraError::InvalidTimestamp( - format!("Cannot convert to DateTime: {time}"), + TimestampError::ToDatetimeErrorI64(time), ))?; let raw_entries = conn @@ -650,8 +820,8 @@ pub async fn get_ohlc( .load::(conn) }) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; let entries: Vec = raw_entries .into_iter() @@ -667,6 +837,38 @@ pub async fn get_ohlc( Ok(entries) } +pub async fn get_expiries_list( + pool: &deadpool_diesel::postgres::Pool, + pair_id: String, +) -> Result, InfraError> { + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; + + let sql_request: String = r" + SELECT DISTINCT expiration_timestamp + FROM future_entries + WHERE pair_id = $1 AND expiration_timestamp IS NOT NULL + ORDER BY expiration_timestamp; + " + .to_string(); + + let raw_exp = conn + .interact(move |conn| { + diesel::sql_query(&sql_request) + .bind::(pair_id) + .load::(conn) + }) + .await + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; + + let expiries: Vec = raw_exp + .into_iter() + .map(|r| r.expiration_timestamp) + .collect(); + + Ok(expiries) +} + #[derive(Debug, Queryable, QueryableByName, Deserialize, Serialize)] struct RawMedianEntryWithComponents { #[diesel(sql_type = VarChar)] @@ -720,292 +922,9 @@ pub struct EntryComponent { pub publisher_signature: String, } -impl TryFrom for SignedPublisherPrice { - type Error = ConversionError; - - fn try_from(component: EntryComponent) -> Result { - let asset_id = StarkexPrice::get_oracle_asset_id(&component.publisher, &component.pair_id)?; - - // Scale price from 8 decimals to 18 decimals for StarkEx - let price_with_18_decimals = component.price * BigDecimal::from(10_u64.pow(10)); - - Ok(Self { - oracle_asset_id: format!("0x{asset_id}"), - oracle_price: price_with_18_decimals.to_string(), - timestamp: component.timestamp.to_string(), - signing_key: component.publisher_address, - }) - } -} - #[derive(Debug, Deserialize, Serialize)] pub struct MedianEntryWithComponents { pub pair_id: String, pub median_price: BigDecimal, pub components: Vec, } - -impl TryFrom for AssetOraclePrice { - type Error = ConversionError; - - fn try_from(median_entry: MedianEntryWithComponents) -> Result { - let signed_prices: Result, ConversionError> = median_entry - .components - .into_iter() - .map(SignedPublisherPrice::try_from) - .collect(); - - let global_asset_id = StarkexPrice::get_global_asset_id(&median_entry.pair_id)?; - - // Scale price from 8 decimals to 18 decimals for StarkEx - let price_with_18_decimals = median_entry.median_price * BigDecimal::from(10_u64.pow(10)); - - Ok(Self { - global_asset_id: format!("0x{global_asset_id}"), - median_price: price_with_18_decimals.to_string(), - signed_prices: signed_prices?, - signature: Default::default(), - }) - } -} - -/// Convert a list of raw entries into a list of valid median entries. -/// For each `pair_id`, check if it has a valid median price with enough unique publishers. -/// Returns the valid entries, filtering out any invalid ones. -fn get_median_entries_response( - raw_entries: Vec, -) -> Option> { - if raw_entries.is_empty() { - return None; - } - - let mut valid_entries = Vec::new(); - - for raw_entry in raw_entries { - let pair_id = raw_entry.pair_id.clone(); - let median_entry = match MedianEntryWithComponents::try_from(raw_entry) { - Ok(entry) => entry, - Err(e) => { - tracing::error!( - "Cannot convert raw median entry to median entry for pair {}: {:?}", - pair_id, - e - ); - continue; - } - }; - - let num_unique_publishers = median_entry - .components - .iter() - .map(|c| &c.publisher) - .collect::>() - .len(); - - if num_unique_publishers >= MINIMUM_NUMBER_OF_PUBLISHERS { - valid_entries.push(median_entry); - } else { - tracing::warn!( - "Insufficient unique publishers for pair {}: got {}, need {}", - median_entry.pair_id, - num_unique_publishers, - MINIMUM_NUMBER_OF_PUBLISHERS - ); - } - } - - (!valid_entries.is_empty()).then_some(valid_entries) -} - -/// Retrieves the timescale table name for the given entry type. -const fn get_table_name_from_type(entry_type: DataType) -> &'static str { - match entry_type { - DataType::SpotEntry => "entries", - DataType::FutureEntry | DataType::PerpEntry => "future_entries", - } -} - -/// We exclude PRAGMA publisher for starkex endpoint as data is not reliable for that use case. -/// One solution would be to adapt the price-pusher to push prices as soon as they are available. -/// For now, we prefer to just work with data from 1st party sources. -const EXCLUDED_PUBLISHER: &str = ""; - -/// Builds a SQL query that will fetch the recent prices between now and -/// the given interval for each unique tuple (`pair_id`, publisher, source) -/// and then calculate the median price for each `pair_id`. -/// We also return in a JSON string the components that were used to calculate -/// the median price. -fn build_sql_query_for_median_with_components( - pair_ids: &[String], - interval_in_ms: u64, - entry_type: DataType, -) -> String { - format!( - r#" - WITH last_prices AS ( - SELECT - e.pair_id, - e.publisher, - p.account_address AS publisher_account_address, - e.source, - e.price, - e.timestamp, - e.publisher_signature, - ROW_NUMBER() OVER (PARTITION BY e.pair_id, e.publisher, e.source ORDER BY e.timestamp DESC) AS rn - FROM - {table_name} e - JOIN - publishers p ON e.publisher = p.name - WHERE - e.pair_id IN ({pairs_list}) - AND e.timestamp >= NOW() - INTERVAL '{interval_in_ms} milliseconds' - AND e.publisher != '{excluded_publisher}' - {perp_filter} - ), - filtered_last_prices AS ( - SELECT - pair_id, - publisher, - publisher_account_address, - source, - price, - timestamp, - publisher_signature - FROM - last_prices - WHERE - rn = 1 - ) - SELECT - pair_id, - percentile_cont(0.5) WITHIN GROUP (ORDER BY price) AS median_price, - jsonb_agg( - jsonb_build_object( - 'pair_id', pair_id, - 'price', price, - 'timestamp', timestamp, - 'publisher', publisher, - 'publisher_address', publisher_account_address, - 'publisher_signature', publisher_signature - ) - ) AS components - FROM - filtered_last_prices - GROUP BY - pair_id; - "#, - table_name = get_table_name_from_type(entry_type), - pairs_list = pair_ids - .iter() - .map(|pair_id| format!("'{pair_id}'")) - .collect::>() - .join(", "), - interval_in_ms = interval_in_ms, - excluded_publisher = EXCLUDED_PUBLISHER, - perp_filter = match entry_type { - DataType::PerpEntry => "AND e.expiration_timestamp IS NULL", - _ => "", - } - ) -} - -/// Compute the median price for each `pair_id` in the given list of `pair_ids` -/// over an interval of time. -/// -/// The interval is increased until we have valid entries with enough publishers. -/// Returns any pairs that have valid data, even if some pairs are invalid. -pub async fn get_current_median_entries_with_components( - pool: &deadpool_diesel::postgres::Pool, - pair_ids: &[String], - entry_type: DataType, -) -> Result, InfraError> { - let conn = pool.get().await.map_err(adapt_infra_error)?; - let mut interval_in_ms = INITAL_INTERVAL_IN_MS; - let mut last_valid_entries = Vec::new(); - - loop { - let raw_sql = - build_sql_query_for_median_with_components(pair_ids, interval_in_ms, entry_type); - - let raw_median_entries = conn - .interact(move |conn| { - diesel::sql_query(raw_sql).load::(conn) - }) - .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; - - if let Some(valid_entries) = get_median_entries_response(raw_median_entries) { - // Keep track of the valid entries we've found - last_valid_entries = valid_entries; - - // If we have valid entries for all pairs, we can return early - let found_pairs: HashSet<_> = last_valid_entries.iter().map(|e| &e.pair_id).collect(); - let requested_pairs: HashSet<_> = pair_ids.iter().collect(); - if found_pairs == requested_pairs { - break; - } - } - - interval_in_ms += INTERVAL_INCREMENT_IN_MS; - - if interval_in_ms >= MAX_INTERVAL_WITHOUT_ENTRIES { - // Log which pairs we couldn't get valid data for - let found_pairs: HashSet<_> = last_valid_entries - .iter() - .map(|e| e.pair_id.clone()) - .collect(); - let missing_pairs: Vec<_> = pair_ids - .iter() - .filter(|p| !found_pairs.contains(*p)) - .collect(); - - if !missing_pairs.is_empty() { - tracing::warn!( - "Could not compute valid median entries for pairs: {}, [{:?}]", - missing_pairs - .iter() - .map(|s| s.as_str()) - .collect::>() - .join(", "), - entry_type - ); - } - break; - } - } - - Ok(last_valid_entries) -} - -pub async fn get_expiries_list( - pool: &deadpool_diesel::postgres::Pool, - pair_id: String, -) -> Result, InfraError> { - let conn = pool.get().await.map_err(adapt_infra_error)?; - - let sql_request: String = r" - SELECT DISTINCT expiration_timestamp - FROM future_entries - WHERE pair_id = $1 AND expiration_timestamp IS NOT NULL - ORDER BY expiration_timestamp; - " - .to_string(); - - let raw_exp = conn - .interact(move |conn| { - diesel::sql_query(&sql_request) - .bind::(pair_id) - .load::(conn) - }) - .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; - - let expiries: Vec = raw_exp - .into_iter() - .map(|r| r.expiration_timestamp) - .collect(); - - Ok(expiries) -} diff --git a/pragma-node/src/infra/repositories/mod.rs b/pragma-node/src/infra/repositories/mod.rs index ca379504..a5ea0a7a 100644 --- a/pragma-node/src/infra/repositories/mod.rs +++ b/pragma-node/src/infra/repositories/mod.rs @@ -1,4 +1,4 @@ pub mod entry_repository; pub mod onchain_repository; -pub mod oo_repository; pub mod publisher_repository; +pub mod utils; diff --git a/pragma-node/src/infra/repositories/onchain_repository/checkpoint.rs b/pragma-node/src/infra/repositories/onchain_repository/checkpoint.rs index 7b0115d3..cd8b54de 100644 --- a/pragma-node/src/infra/repositories/onchain_repository/checkpoint.rs +++ b/pragma-node/src/infra/repositories/onchain_repository/checkpoint.rs @@ -3,8 +3,8 @@ use deadpool_diesel::postgres::Pool; use diesel::sql_types::{Numeric, Timestamp, VarChar}; use diesel::{Queryable, QueryableByName, RunQueryDsl}; -use pragma_common::types::Network; -use pragma_entities::error::{adapt_infra_error, InfraError}; +use pragma_common::starknet::StarknetNetwork; +use pragma_entities::error::InfraError; use crate::handlers::onchain::get_checkpoints::Checkpoint; use crate::utils::format_bigdecimal_price; @@ -35,17 +35,17 @@ impl RawCheckpoint { #[allow(clippy::cast_possible_wrap)] pub async fn get_checkpoints( pool: &Pool, - network: Network, + network: StarknetNetwork, pair_id: String, decimals: u32, limit: u64, ) -> Result, InfraError> { let table_name = match network { - Network::Mainnet => "mainnet_spot_checkpoints", - Network::Sepolia => "spot_checkpoints", + StarknetNetwork::Mainnet => "mainnet_spot_checkpoints", + StarknetNetwork::Sepolia => "spot_checkpoints", }; let raw_sql = format!( - r#" + r" SELECT transaction_hash, price, @@ -57,10 +57,10 @@ pub async fn get_checkpoints( pair_id = $1 ORDER BY timestamp DESC LIMIT $2; - "#, + ", ); - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let raw_checkpoints = conn .interact(move |conn| { diesel::sql_query(raw_sql) @@ -69,8 +69,8 @@ pub async fn get_checkpoints( .load::(conn) }) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; let checkpoints: Vec = raw_checkpoints .into_iter() diff --git a/pragma-node/src/infra/repositories/onchain_repository/entry.rs b/pragma-node/src/infra/repositories/onchain_repository/entry.rs index 3fa01d67..1151c089 100644 --- a/pragma-node/src/infra/repositories/onchain_repository/entry.rs +++ b/pragma-node/src/infra/repositories/onchain_repository/entry.rs @@ -4,30 +4,30 @@ use bigdecimal::{BigDecimal, ToPrimitive, Zero}; use deadpool_diesel::postgres::Pool; use diesel::sql_types::{Numeric, Text, Timestamp, VarChar}; use diesel::{Queryable, QueryableByName, RunQueryDsl}; +use moka::future::Cache; -use pragma_common::types::pair::Pair; -use pragma_common::types::{AggregationMode, DataType, Interval, Network}; -use pragma_entities::error::{adapt_infra_error, InfraError}; -use pragma_entities::Currency; +use pragma_common::Pair; +use pragma_common::{AggregationMode, InstrumentType, Interval, starknet::StarknetNetwork}; +use pragma_entities::error::InfraError; use pragma_monitoring::models::SpotEntry; +use crate::constants::currencies::ABSTRACT_CURRENCIES; use crate::handlers::onchain::get_entry::OnchainEntry; +use crate::infra::rpc::RpcClients; use crate::utils::{ big_decimal_price_to_hex, convert_via_quote, get_mid_price, normalize_to_decimals, }; -use super::{get_onchain_ohlc_table_name, get_onchain_table_name}; - -use crate::infra::repositories::entry_repository::get_decimals; +use super::{get_onchain_decimals, get_onchain_ohlc_table_name, get_onchain_table_name}; // Means that we only consider the entries for the last hour when computing the aggregation & // retrieving the sources. pub const ENTRIES_BACKWARD_INTERVAL: &str = "1 hour"; #[derive(Debug)] -pub struct OnchainRoutingArguments { +pub struct OnchainEntryArguments { pub pair_id: String, - pub network: Network, + pub network: StarknetNetwork, pub timestamp: u64, pub aggregation_mode: AggregationMode, pub is_routing: bool, @@ -72,10 +72,12 @@ impl From<&SpotEntryWithAggregatedPrice> for OnchainEntry { } } +#[allow(clippy::implicit_hasher)] pub async fn routing( onchain_pool: &Pool, - offchain_pool: &Pool, - routing_args: OnchainRoutingArguments, + routing_args: OnchainEntryArguments, + rpc_clients: &RpcClients, + decimals_cache: &Cache>, ) -> Result, InfraError> { let pair_id = routing_args.pair_id; let is_routing = routing_args.is_routing; @@ -94,7 +96,9 @@ pub async fn routing( .await?; if !prices_and_entries.is_empty() { let pair = Pair::from(pair_id.clone()); - let decimal = get_decimals(offchain_pool, &pair).await?; + let decimal = + get_onchain_decimals(decimals_cache, rpc_clients, routing_args.network, &pair) + .await?; for row in prices_and_entries { result.push(RawOnchainData { price: row.aggregated_price, @@ -107,21 +111,13 @@ pub async fn routing( } } if !is_routing { - return Err(InfraError::NotFound); + return Err(InfraError::EntryNotFound(pair_id)); } - let offchain_conn = offchain_pool.get().await.map_err(adapt_infra_error)?; - - let alternative_currencies = offchain_conn - .interact(Currency::get_abstract_all) - .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; - // safe unwrap since we construct the pairs string in calling function let (base, quote) = pair_id.split_once('/').unwrap(); - for alt_currency in alternative_currencies { + for alt_currency in ABSTRACT_CURRENCIES { let base_alt_pair = format!("{base}/{alt_currency}"); let alt_quote_pair = format!("{quote}/{alt_currency}"); @@ -136,8 +132,13 @@ pub async fn routing( routing_args.aggregation_mode, ) .await?; - let base_alt_decimal = - get_decimals(offchain_pool, &Pair::from(base_alt_pair.clone())).await?; + let base_alt_decimal = get_onchain_decimals( + decimals_cache, + rpc_clients, + routing_args.network, + &Pair::from(base_alt_pair.clone()), + ) + .await?; let quote_alt_result = get_sources_and_aggregate( onchain_pool, routing_args.network, @@ -146,8 +147,17 @@ pub async fn routing( routing_args.aggregation_mode, ) .await?; - let quote_alt_decimal = - get_decimals(offchain_pool, &Pair::from(alt_quote_pair.clone())).await?; + let quote_alt_decimal = get_onchain_decimals( + decimals_cache, + rpc_clients, + routing_args.network, + &Pair::from(alt_quote_pair.clone()), + ) + .await?; + + if quote_alt_result.len() != base_alt_result.len() { + return Err(InfraError::RoutingError(pair_id)); + } let result = compute_multiple_rebased_price( &mut base_alt_result, @@ -160,20 +170,20 @@ pub async fn routing( return result; } } - Err(InfraError::NotFound) + Err(InfraError::RoutingError(pair_id)) } fn build_sql_query( - network: Network, + network: StarknetNetwork, aggregation_mode: AggregationMode, timestamp: u64, ) -> Result { - let table_name = get_onchain_table_name(network, DataType::SpotEntry)?; + let table_name = get_onchain_table_name(network, InstrumentType::Spot); let complete_sql_query = { let aggregation_query = get_aggregation_subquery(aggregation_mode)?; format!( - r#" + r" WITH RankedEntries AS ( SELECT *, @@ -201,7 +211,7 @@ fn build_sql_query( AggregatedPrice AP ORDER BY FE.timestamp DESC; - "#, + ", ) }; Ok(complete_sql_query) @@ -209,7 +219,6 @@ fn build_sql_query( fn get_aggregation_subquery(aggregation_mode: AggregationMode) -> Result<&'static str, InfraError> { let query = match aggregation_mode { - AggregationMode::Mean => "AVG(price) AS aggregated_price", AggregationMode::Median => { "( SELECT AVG(price) @@ -266,14 +275,14 @@ pub struct AggPriceAndEntries { // TODO(akhercha): Only works for Spot entries pub async fn get_sources_and_aggregate( pool: &Pool, - network: Network, + network: StarknetNetwork, pair_id: String, timestamp: u64, aggregation_mode: AggregationMode, ) -> Result, InfraError> { let raw_sql = build_sql_query(network, aggregation_mode, timestamp)?; - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let raw_entries = conn .interact(move |conn| { diesel::sql_query(raw_sql) @@ -281,8 +290,8 @@ pub async fn get_sources_and_aggregate( .load::(conn) }) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; Ok(group_entries_per_aggprice(raw_entries)) } @@ -319,10 +328,6 @@ fn compute_multiple_rebased_price( base_alt_decimal: u32, quote_alt_decimal: u32, ) -> Result, InfraError> { - if quote_alt_result.len() != base_alt_result.len() { - return Err(InfraError::RoutingError); - } - let mut result: Vec = Vec::new(); for (i, base) in base_alt_result.iter_mut().enumerate() { @@ -351,12 +356,12 @@ struct EntryTimestamp { pub async fn get_last_updated_timestamp( pool: &Pool, - network: Network, + network: StarknetNetwork, pairs: Vec, ) -> Result { let pair_list = format!("('{}')", pairs.join("','")); let raw_sql = format!( - r#" + r" SELECT timestamp FROM @@ -365,18 +370,21 @@ pub async fn get_last_updated_timestamp( pair_id IN {} ORDER BY timestamp DESC LIMIT 1; - "#, - get_onchain_table_name(network, DataType::SpotEntry)?, + ", + get_onchain_table_name(network, InstrumentType::Spot), pair_list, ); - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let raw_entry = conn .interact(move |conn| diesel::sql_query(raw_sql).load::(conn)) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; + + let most_recent_entry = raw_entry + .first() + .ok_or(InfraError::EntryNotFound(pair_list))?; - let most_recent_entry = raw_entry.first().ok_or(InfraError::NotFound)?; Ok(most_recent_entry.timestamp.and_utc().timestamp() as u64) } @@ -390,7 +398,7 @@ struct VariationEntry { pub async fn get_variations( pool: &Pool, - network: Network, + network: StarknetNetwork, pair_id: String, ) -> Result, InfraError> { let intervals = vec![Interval::OneHour, Interval::OneDay, Interval::OneWeek]; @@ -398,9 +406,9 @@ pub async fn get_variations( let mut variations = HashMap::new(); for interval in intervals { - let ohlc_table_name = get_onchain_ohlc_table_name(network, DataType::SpotEntry, interval)?; + let ohlc_table_name = get_onchain_ohlc_table_name(network, InstrumentType::Spot, interval)?; let raw_sql = format!( - r#" + r" WITH recent_entries AS ( SELECT ohlc_bucket AS time, @@ -424,16 +432,16 @@ pub async fn get_variations( rn IN (1, 2) ORDER BY rn ASC; - "#, + ", ); - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let p = pair_id.clone(); let raw_entries: Vec = conn .interact(move |conn| diesel::sql_query(raw_sql).bind::(p).load(conn)) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; if raw_entries.len() == 2 { let current_open = get_mid_price(&raw_entries[0].open, &raw_entries[0].close); @@ -476,24 +484,24 @@ pub fn onchain_pair_exist(existing_pair_list: &[EntryPairId], pair_id: &str) -> // TODO(0xevolve): Only works for Spot entries pub async fn get_existing_pairs( pool: &Pool, - network: Network, + network: StarknetNetwork, ) -> Result, InfraError> { let raw_sql = format!( - r#" + r" SELECT DISTINCT pair_id FROM {table_name}; - "#, - table_name = get_onchain_table_name(network, DataType::SpotEntry)? + ", + table_name = get_onchain_table_name(network, InstrumentType::Spot) ); - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let raw_entries = conn .interact(move |conn| diesel::sql_query(raw_sql).load::(conn)) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; Ok(raw_entries) } diff --git a/pragma-node/src/infra/repositories/onchain_repository/history.rs b/pragma-node/src/infra/repositories/onchain_repository/history.rs index cdf786a8..6f480e11 100644 --- a/pragma-node/src/infra/repositories/onchain_repository/history.rs +++ b/pragma-node/src/infra/repositories/onchain_repository/history.rs @@ -1,30 +1,35 @@ +use std::collections::HashMap; + use bigdecimal::BigDecimal; use chrono::{DateTime, NaiveDateTime}; use deadpool_diesel::postgres::Pool; -use diesel::{prelude::QueryableByName, RunQueryDsl}; - -use pragma_common::types::pair::Pair; -use pragma_common::types::{DataType, Interval, Network}; -use pragma_entities::error::{adapt_infra_error, InfraError}; -use pragma_entities::Currency; +use diesel::{RunQueryDsl, prelude::QueryableByName}; +use moka::future::Cache; +use pragma_entities::models::entries::timestamp::TimestampRange; use serde::Serialize; -use crate::infra::repositories::entry_repository::get_decimals; -use crate::utils::{convert_via_quote, normalize_to_decimals}; -use pragma_common::types::timestamp::TimestampRange; +use pragma_common::starknet::StarknetNetwork; +use pragma_common::{InstrumentType, Interval, Pair}; +use pragma_entities::TimestampError; +use pragma_entities::error::InfraError; use super::entry::{get_existing_pairs, onchain_pair_exist}; -use super::get_onchain_aggregate_table_name; +use super::{get_onchain_aggregate_table_name, get_onchain_decimals}; +use crate::constants::currencies::ABSTRACT_CURRENCIES; +use crate::infra::rpc::RpcClients; +use crate::utils::{convert_via_quote, normalize_to_decimals}; /// Query the onchain database for historical entries and if entries /// are found, query the offchain database to get the pair decimals. +#[allow(clippy::implicit_hasher)] pub async fn get_historical_entries_and_decimals( onchain_pool: &Pool, - offchain_pool: &Pool, - network: Network, + network: StarknetNetwork, pair: &Pair, timestamp_range: &TimestampRange, chunk_interval: Interval, + decimals_cache: &Cache>, + rpc_clients: &RpcClients, ) -> Result<(Vec, u32), InfraError> { let raw_entries: Vec = get_historical_aggregated_entries( onchain_pool, @@ -36,10 +41,11 @@ pub async fn get_historical_entries_and_decimals( .await?; if raw_entries.is_empty() { - return Err(InfraError::NotFound); + return Err(InfraError::EntryNotFound(pair.to_pair_id())); } - let decimals = get_decimals(offchain_pool, pair).await?; + let decimals = get_onchain_decimals(decimals_cache, rpc_clients, network, pair).await?; + Ok((raw_entries, decimals)) } @@ -59,7 +65,7 @@ pub struct HistoricalEntryRaw { /// NOTE: Only works for `SpotEntry` at the moment, `DataType` is hard coded. async fn get_historical_aggregated_entries( pool: &Pool, - network: Network, + network: StarknetNetwork, pair: &Pair, timestamp: &TimestampRange, chunk_interval: Interval, @@ -70,7 +76,7 @@ async fn get_historical_aggregated_entries( }; let raw_sql = format!( - r#" + r" SELECT pair_id, bucket AS timestamp, @@ -84,14 +90,14 @@ async fn get_historical_aggregated_entries( AND bucket <= to_timestamp($3) ORDER BY bucket ASC - "#, + ", table_name = - get_onchain_aggregate_table_name(network, DataType::SpotEntry, chunk_interval)?, + get_onchain_aggregate_table_name(network, InstrumentType::Spot, chunk_interval)?, ); let pair_id = pair.to_string(); - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let raw_entries = conn .interact(move |conn| { diesel::sql_query(raw_sql) @@ -101,8 +107,8 @@ async fn get_historical_aggregated_entries( .load::(conn) }) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; Ok(raw_entries) } @@ -114,58 +120,99 @@ async fn get_historical_aggregated_entries( /// once we have proper E2E tests, we should try to merge the code. /// NOTE: We let the possibility to try 1min intervals but they rarely works. /// Entries rarely align perfectly, causing insufficient data for routing. +#[allow(clippy::implicit_hasher)] pub async fn retry_with_routing( onchain_pool: &Pool, - offchain_pool: &Pool, - network: Network, + network: StarknetNetwork, pair: &Pair, timestamp_range: &TimestampRange, chunk_interval: Interval, + decimals_cache: &Cache>, + rpc_clients: &RpcClients, ) -> Result<(Vec, u32), InfraError> { - let offchain_conn = offchain_pool.get().await.map_err(adapt_infra_error)?; - let alternative_currencies = offchain_conn - .interact(Currency::get_abstract_all) - .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; - let existing_pairs = get_existing_pairs(onchain_pool, network).await?; + let mut routing_attempts = Vec::new(); + + for alt_currency in ABSTRACT_CURRENCIES { + let base_alt_pair = Pair::from((pair.base.clone(), alt_currency.to_string())); + let alt_quote_pair = Pair::from((pair.quote.clone(), alt_currency.to_string())); + let base_alt_pair_str = base_alt_pair.to_string(); + let alt_quote_pair_str = alt_quote_pair.to_string(); + + // Check if both required pairs exist + let base_alt_exists = onchain_pair_exist(&existing_pairs, &base_alt_pair_str); + let alt_quote_exists = onchain_pair_exist(&existing_pairs, &alt_quote_pair_str); + + if !base_alt_exists || !alt_quote_exists { + routing_attempts.push(format!( + "Route via {alt_currency}: base pair '{base_alt_pair_str}' exists: {base_alt_exists}, quote pair '{alt_quote_pair_str}' exists: {alt_quote_exists}", + )); + continue; + } + + // Both pairs exist, try to get their historical entries + let base_alt_result = get_historical_entries_and_decimals( + onchain_pool, + network, + &base_alt_pair, + timestamp_range, + chunk_interval, + decimals_cache, + rpc_clients, + ) + .await; + + if let Err(e) = &base_alt_result { + routing_attempts.push(format!( + "Route via {alt_currency}: failed to get history for '{base_alt_pair_str}': {e}", + )); + continue; + } + + let alt_quote_result = get_historical_entries_and_decimals( + onchain_pool, + network, + &alt_quote_pair, + timestamp_range, + chunk_interval, + decimals_cache, + rpc_clients, + ) + .await; + + if let Err(e) = &alt_quote_result { + routing_attempts.push(format!( + "Route via {alt_currency}: failed to get history for '{alt_quote_pair_str}': {e}", + )); + continue; + } + + let base_alt_result = base_alt_result.unwrap(); + let alt_quote_result = alt_quote_result.unwrap(); - for alt_currency in alternative_currencies { - let base_alt_pair = Pair::from((pair.base.clone(), alt_currency.clone())); - let alt_quote_pair = Pair::from((pair.quote.clone(), alt_currency.clone())); - - if onchain_pair_exist(&existing_pairs, &base_alt_pair.to_string()) - && onchain_pair_exist(&existing_pairs, &alt_quote_pair.to_string()) - { - let base_alt_result = get_historical_entries_and_decimals( - onchain_pool, - offchain_pool, - network, - &base_alt_pair, - timestamp_range, - chunk_interval, - ) - .await?; - let alt_quote_result = get_historical_entries_and_decimals( - onchain_pool, - offchain_pool, - network, - &alt_quote_pair, - timestamp_range, - chunk_interval, - ) - .await?; - - if base_alt_result.0.len() != alt_quote_result.0.len() { - continue; - } - - return calculate_rebased_prices(base_alt_result, alt_quote_result); + if base_alt_result.0.len() != alt_quote_result.0.len() { + routing_attempts.push(format!( + "Route via {alt_currency}: mismatched entries count: {} vs {}", + base_alt_result.0.len(), + alt_quote_result.0.len() + )); + continue; } + + return calculate_rebased_prices(base_alt_result, alt_quote_result); } - Err(InfraError::RoutingError) + // Construct detailed error message + let attempts_info = if routing_attempts.is_empty() { + "No routing pairs found".to_string() + } else { + format!("Attempted routes:\n- {}", routing_attempts.join("\n- ")) + }; + + Err(InfraError::RoutingError(format!( + "{}; {attempts_info}", + pair.to_pair_id(), + ))) } /// Given two vector of entries, compute a new vector containing the routed prices. @@ -249,9 +296,9 @@ fn combine_entries( quote_entry.nb_sources_aggregated, ); let new_timestamp = DateTime::from_timestamp(max_timestamp, 0) - .ok_or(InfraError::InvalidTimestamp(format!( - "Cannot convert to DateTime: {max_timestamp}" - )))? + .ok_or(InfraError::InvalidTimestamp( + TimestampError::ToDatetimeErrorI64(max_timestamp), + ))? .naive_utc(); let base_pair = Pair::from(base_entry.pair_id.clone()); diff --git a/pragma-node/src/infra/repositories/onchain_repository/mod.rs b/pragma-node/src/infra/repositories/onchain_repository/mod.rs index 2adff0ea..ab12788a 100644 --- a/pragma-node/src/infra/repositories/onchain_repository/mod.rs +++ b/pragma-node/src/infra/repositories/onchain_repository/mod.rs @@ -4,66 +4,117 @@ pub mod history; pub mod ohlc; pub mod publisher; -use crate::is_enum_variant; -use crate::utils::sql::get_interval_specifier; -use pragma_common::types::{DataType, Interval, Network}; +use std::collections::HashMap; + +use moka::future::Cache; + +use pragma_common::starknet::StarknetNetwork; +use pragma_common::{InstrumentType, Interval, Pair}; use pragma_entities::error::InfraError; +use crate::{ + infra::rpc::{RpcClients, call_get_decimals}, + utils::sql::get_interval_specifier, +}; + +/// Retrieves the on-chain decimals for the provided network & pair. +pub(crate) async fn get_onchain_decimals( + decimals_cache: &Cache>, + rpc_clients: &RpcClients, + network: StarknetNetwork, + pair: &Pair, +) -> Result { + let pair_id = pair.to_pair_id(); + + // Try to get decimals from cache first + if let Some(network_decimals) = decimals_cache.get(&network).await { + if let Some(decimals) = network_decimals.get(&pair_id) { + return Ok(*decimals); + } + } + + // If not found in cache, call RPC + let Some(rpc_client) = rpc_clients.get(&network) else { + return Err(InfraError::NoRpcAvailable(network)); + }; + + let decimals = match call_get_decimals(rpc_client, pair, network).await { + Ok(decimals) => decimals, + Err(e) => { + tracing::error!("Could not get on-chain decimals for {pair}: {e}"); + 0 + } + }; + + // Update cache with the new decimals + if decimals != 0 { + let network_decimals = decimals_cache.get(&network).await.unwrap_or_default(); + let mut updated_network_decimals = network_decimals.clone(); + updated_network_decimals.insert(pair_id, decimals); + decimals_cache + .insert(network, updated_network_decimals) + .await; + } + + Ok(decimals) +} + /// Retrieve the onchain table name based on the network and data type. pub(crate) const fn get_onchain_table_name( - network: Network, - data_type: DataType, -) -> Result<&'static str, InfraError> { - let table = match (network, data_type) { - (Network::Sepolia, DataType::SpotEntry) => "spot_entry", - (Network::Mainnet, DataType::SpotEntry) => "mainnet_spot_entry", - (Network::Sepolia, DataType::FutureEntry) => "future_entry", - (Network::Mainnet, DataType::FutureEntry) => "mainnet_future_entry", - _ => return Err(InfraError::InternalServerError), - }; - Ok(table) + network: StarknetNetwork, + data_type: InstrumentType, +) -> &'static str { + match (network, data_type) { + (StarknetNetwork::Sepolia, InstrumentType::Spot) => "spot_entry", + (StarknetNetwork::Mainnet, InstrumentType::Spot) => "mainnet_spot_entry", + (StarknetNetwork::Sepolia, InstrumentType::Perp) => "future_entry", + (StarknetNetwork::Mainnet, InstrumentType::Perp) => "mainnet_future_entry", + } } /// Retrieve the onchain table name for the OHLC based on network, datatype & interval. pub(crate) fn get_onchain_ohlc_table_name( - network: Network, - data_type: DataType, + network: StarknetNetwork, + data_type: InstrumentType, interval: Interval, ) -> Result { let prefix_name = match (network, data_type) { - (Network::Sepolia, DataType::SpotEntry) => "spot", - (Network::Mainnet, DataType::SpotEntry) => "mainnet_spot", - (Network::Sepolia, DataType::FutureEntry) => "future", - (Network::Mainnet, DataType::FutureEntry) => "mainnet_future", - _ => return Err(InfraError::InternalServerError), + (StarknetNetwork::Sepolia, InstrumentType::Spot) => "spot_candle", + (StarknetNetwork::Mainnet, InstrumentType::Spot) => "mainnet_spot_candle", + (StarknetNetwork::Sepolia, InstrumentType::Perp) => "perp_candle", + (StarknetNetwork::Mainnet, InstrumentType::Perp) => "mainnet_perp_candle", }; - let interval_specifier = get_interval_specifier(interval, true)?; - let table_name = format!("{prefix_name}_{interval_specifier}_candle"); + let interval_specifier = match interval { + Interval::TenSeconds => Ok("10_s"), + Interval::OneMinute => Ok("1_min"), + Interval::FiveMinutes => Ok("5_min"), + Interval::FifteenMinutes => Ok("15_min"), + Interval::OneHour => Ok("1_h"), + Interval::OneDay => Ok("1_day"), + Interval::OneWeek => Ok("1_week"), + // We support less intervals for candles + _ => Err(InfraError::UnsupportedOnchainInterval(interval)), + }?; + let table_name = format!("{prefix_name}_{interval_specifier}"); Ok(table_name) } /// Retrieve the onchain table name for Timescale aggregates (medians) based on the network, /// datatype & interval. pub(crate) fn get_onchain_aggregate_table_name( - network: Network, - data_type: DataType, + network: StarknetNetwork, + data_type: InstrumentType, interval: Interval, ) -> Result { let prefix_name = match (network, data_type) { - (Network::Sepolia, DataType::SpotEntry) => "spot_price", - (Network::Mainnet, DataType::SpotEntry) => "mainnet_spot_price", - (Network::Sepolia, DataType::FutureEntry) => "future_price", - (Network::Mainnet, DataType::FutureEntry) => "mainnet_future_price", - _ => return Err(InfraError::InternalServerError), + (StarknetNetwork::Sepolia, InstrumentType::Spot) => "spot_median", + (StarknetNetwork::Mainnet, InstrumentType::Spot) => "mainnet_spot_median", + (StarknetNetwork::Sepolia, InstrumentType::Perp) => "perp_median", + (StarknetNetwork::Mainnet, InstrumentType::Perp) => "mainnet_perp_median", }; - // NOTE: Special case because there is a mistake and we forgot the "s" on 2_hour - let interval_specifier = if is_enum_variant!(interval, Interval::TwoHours) { - "2_hour" - } else { - get_interval_specifier(interval, true)? - }; + let interval_specifier = get_interval_specifier(interval, false)?; + let table_name = format!("{prefix_name}_{interval_specifier}"); - let table_name = format!("{prefix_name}_{interval_specifier}_agg"); Ok(table_name) } diff --git a/pragma-node/src/infra/repositories/onchain_repository/ohlc.rs b/pragma-node/src/infra/repositories/onchain_repository/ohlc.rs index ac7d0701..03110529 100644 --- a/pragma-node/src/infra/repositories/onchain_repository/ohlc.rs +++ b/pragma-node/src/infra/repositories/onchain_repository/ohlc.rs @@ -1,8 +1,8 @@ use deadpool_diesel::postgres::Pool; use diesel::RunQueryDsl; -use pragma_common::types::{DataType, Interval, Network}; -use pragma_entities::error::{adapt_infra_error, InfraError}; +use pragma_common::{InstrumentType, Interval, starknet::StarknetNetwork}; +use pragma_entities::error::InfraError; use crate::infra::repositories::entry_repository::{OHLCEntry, OHLCEntryRaw}; @@ -11,13 +11,13 @@ use super::get_onchain_ohlc_table_name; // Only works for Spot for now - since we only store spot entries on chain. pub async fn get_ohlc( pool: &Pool, - network: Network, + network: StarknetNetwork, pair_id: String, interval: Interval, data_to_retrieve: u64, ) -> Result, InfraError> { let raw_sql = format!( - r#" + r" SELECT ohlc_bucket AS time, open, @@ -31,11 +31,11 @@ pub async fn get_ohlc( ORDER BY time DESC LIMIT {data_to_retrieve}; - "#, - table_name = get_onchain_ohlc_table_name(network, DataType::SpotEntry, interval)?, + ", + table_name = get_onchain_ohlc_table_name(network, InstrumentType::Spot, interval)?, ); - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let raw_entries = conn .interact(move |conn| { diesel::sql_query(raw_sql) @@ -43,8 +43,8 @@ pub async fn get_ohlc( .load::(conn) }) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; let entries: Vec = raw_entries .into_iter() diff --git a/pragma-node/src/infra/repositories/onchain_repository/publisher.rs b/pragma-node/src/infra/repositories/onchain_repository/publisher.rs index 03b12b67..43b95442 100644 --- a/pragma-node/src/infra/repositories/onchain_repository/publisher.rs +++ b/pragma-node/src/infra/repositories/onchain_repository/publisher.rs @@ -4,15 +4,17 @@ use bigdecimal::BigDecimal; use deadpool_diesel::postgres::Pool; use diesel::sql_types::{BigInt, Integer, Numeric, Timestamp, VarChar}; use diesel::{Queryable, QueryableByName, RunQueryDsl}; - +use futures::future::try_join_all; use moka::future::Cache; -use pragma_common::types::{DataType, Network}; -use pragma_entities::error::{adapt_infra_error, InfraError}; + +use pragma_common::{InstrumentType, Pair, starknet::StarknetNetwork}; +use pragma_entities::error::InfraError; use crate::handlers::onchain::get_publishers::{Publisher, PublisherEntry}; -use crate::utils::{big_decimal_price_to_hex, get_decimals_for_pair}; +use crate::infra::rpc::RpcClients; +use crate::utils::big_decimal_price_to_hex; -use super::get_onchain_table_name; +use super::{get_onchain_decimals, get_onchain_table_name}; #[derive(Debug, Queryable, QueryableByName)] pub struct RawPublisher { @@ -26,14 +28,14 @@ pub struct RawPublisher { pub async fn get_publishers( pool: &Pool, - network: Network, + network: StarknetNetwork, ) -> Result, InfraError> { let address_column = match network { - Network::Mainnet => "mainnet_address", - Network::Sepolia => "testnet_address", + StarknetNetwork::Mainnet => "mainnet_address", + StarknetNetwork::Sepolia => "testnet_address", }; let raw_sql = format!( - r#" + r" SELECT name, website_url, @@ -44,15 +46,15 @@ pub async fn get_publishers( {address_column} IS NOT NULL ORDER BY name ASC; - "#, + ", ); - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let raw_publishers = conn .interact(move |conn| diesel::sql_query(raw_sql).load::(conn)) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; Ok(raw_publishers) } @@ -72,18 +74,25 @@ pub struct RawLastPublisherEntryForPair { } impl RawLastPublisherEntryForPair { - pub fn to_publisher_entry( + pub async fn to_publisher_entry( &self, - currencies: &HashMap, - ) -> PublisherEntry { - PublisherEntry { + network: StarknetNetwork, + decimals_cache: &Cache>, + rpc_clients: &RpcClients, + ) -> Result { + let pair = Pair::from(self.pair_id.as_str()); + let decimals = get_onchain_decimals(decimals_cache, rpc_clients, network, &pair).await?; + + let entry = PublisherEntry { pair_id: self.pair_id.clone(), last_updated_timestamp: self.last_updated_timestamp.and_utc().timestamp() as u64, price: big_decimal_price_to_hex(&self.price), source: self.source.clone(), - decimals: get_decimals_for_pair(currencies, &self.pair_id), + decimals, daily_updates: self.daily_updates as u32, - } + }; + + Ok(entry) } } @@ -103,7 +112,7 @@ async fn get_all_publishers_updates( pool: &Pool, table_name: &str, publishers_names: Vec, - publishers_updates_cache: Cache>, + publishers_updates_cache: &Cache>, ) -> Result, InfraError> { let publishers_list = publishers_names.join("','"); @@ -117,7 +126,7 @@ async fn get_all_publishers_updates( // ... else, fetch the value from the database let raw_sql = format!( - r#" + r" SELECT publisher, COUNT(*) FILTER (WHERE timestamp >= NOW() - INTERVAL '1 day') AS daily_updates, @@ -129,15 +138,15 @@ async fn get_all_publishers_updates( publisher IN ('{publishers_list}') GROUP BY publisher; - "#, + ", ); - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let updates = conn .interact(move |conn| diesel::sql_query(raw_sql).load::(conn)) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; let updates: HashMap = updates .into_iter() @@ -152,15 +161,17 @@ async fn get_all_publishers_updates( Ok(updates) } -async fn get_publisher_with_components( +async fn get_publisher_with_components( pool: &Pool, + network: StarknetNetwork, table_name: &str, publisher: &RawPublisher, publisher_updates: &RawPublisherUpdates, - currencies: &HashMap, + decimals_cache: &Cache>, + rpc_clients: &RpcClients, ) -> Result { let raw_sql_entries = format!( - r#" + r" WITH recent_entries AS ( SELECT pair_id, @@ -196,31 +207,33 @@ async fn get_publisher_with_components( rn = 1 ORDER BY pair_id, source ASC; - "#, + ", table_name = table_name, publisher_name = publisher.name ); - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; let raw_components = conn .interact(move |conn| { diesel::sql_query(raw_sql_entries).load::(conn) }) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error)?; + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError)?; - let components: Vec = raw_components - .into_iter() - .map(|component| component.to_publisher_entry(currencies)) + let component_futures: Vec<_> = raw_components + .iter() + .map(|component| component.to_publisher_entry(network, decimals_cache, rpc_clients)) .collect(); + // Execute all futures concurrently and collect results + let components = try_join_all(component_futures).await?; + let last_updated_timestamp = components .iter() .map(|component| component.last_updated_timestamp) - .max() - .ok_or(InfraError::NotFound)?; + .max(); let publisher = Publisher { publisher: publisher.name.clone(), @@ -236,39 +249,51 @@ async fn get_publisher_with_components( } #[allow(clippy::implicit_hasher)] -pub async fn get_publishers_with_components( +pub async fn get_publishers_with_components( pool: &Pool, - network: Network, - data_type: DataType, - currencies: HashMap, + network: StarknetNetwork, + data_type: InstrumentType, publishers: Vec, - publishers_updates_cache: Cache>, + publishers_updates_cache: &Cache>, + decimals_cache: &Cache>, + rpc_clients: &RpcClients, ) -> Result, InfraError> { - let table_name = get_onchain_table_name(network, data_type)?; + let table_name = get_onchain_table_name(network, data_type); let publisher_names = publishers.iter().map(|p| p.name.clone()).collect(); let updates = get_all_publishers_updates(pool, table_name, publisher_names, publishers_updates_cache) .await?; - let mut publishers_response = Vec::with_capacity(publishers.len()); - for publisher in &publishers { - let Some(publisher_updates) = updates.get(&publisher.name) else { - continue; - }; - if publisher_updates.daily_updates == 0 { - continue; - } - let publisher_with_components = get_publisher_with_components( - pool, - table_name, - publisher, - publisher_updates, - ¤cies, - ) - .await?; - publishers_response.push(publisher_with_components); - } + // Create a vector of futures for each publisher that needs processing + let publisher_futures: Vec<_> = publishers + .iter() + .filter_map(|publisher| { + // Only process publishers with updates + let publisher_updates = updates.get(&publisher.name)?; + if publisher_updates.daily_updates == 0 { + return None; + } + + let table_name = table_name.to_string(); + let publisher_updates = publisher_updates.clone(); + Some(async move { + get_publisher_with_components( + pool, + network, + &table_name, + publisher, + &publisher_updates, + decimals_cache, + rpc_clients, + ) + .await + }) + }) + .collect(); + + // Execute all publisher futures concurrently + let publishers_response = try_join_all(publisher_futures).await?; Ok(publishers_response) } diff --git a/pragma-node/src/infra/repositories/oo_repository/assertions.rs b/pragma-node/src/infra/repositories/oo_repository/assertions.rs deleted file mode 100644 index 4dca0675..00000000 --- a/pragma-node/src/infra/repositories/oo_repository/assertions.rs +++ /dev/null @@ -1,234 +0,0 @@ -use crate::handlers::optimistic_oracle::types::{ - Assertion, AssertionDetails, DisputedAssertion, ResolvedAssertion, Status, -}; -#[allow(unused_imports)] -use diesel::prelude::*; -use diesel::sql_types::Bool; -use pragma_entities::models::optimistic_oracle_error::OptimisticOracleError; -use pragma_monitoring::{models::OORequest, schema::oo_requests}; - -// if no status provided, returns the list of all the available assertions -pub async fn get_assertions( - onchain_pool: &deadpool_diesel::postgres::Pool, - status: Option, - page: u32, - limit: u32, -) -> Result, OptimisticOracleError> { - let conn = onchain_pool - .get() - .await - .map_err(|_| OptimisticOracleError::DatabaseConnection)?; - - let status_clone = status.clone(); - - let results: Vec = conn - .interact(move |conn| { - let mut query = oo_requests::table.into_boxed(); - - if let Some(status) = status_clone { - match status.as_str() { - "settled" => query = query.filter(oo_requests::settled.eq(Some(true))), - "disputed" => query = query.filter(oo_requests::disputed.eq(Some(true))), - "active" => { - query = query.filter( - oo_requests::settled - .is_null() - .and(oo_requests::disputed.is_null()), - ); - } - _ => {} - } - }; - - query = query.filter(diesel::dsl::sql::("upper(_cursor) IS NULL")); - - query - .offset(i64::from((page - 1) * limit)) - .limit(i64::from(limit)) - .load(conn) - .map_err(|_| OptimisticOracleError::DatabaseConnection) - }) - .await - .map_err(|_| OptimisticOracleError::DatabaseConnection)??; - - let assertions: Vec = results - .into_iter() - .map(|request| Assertion { - assertion_id: request.assertion_id.to_string(), - claim: request.claim, - bond: request.bond, - expiration_time: request.expiration_timestamp, - identifier: request.identifier, - status: get_status(request.disputed, request.settled), - timestamp: request.updated_at, - currency: request.currency, - }) - .collect(); - - Ok(assertions) -} - -// Function to get assertion details -pub async fn get_assertion_details( - onchain_pool: &deadpool_diesel::postgres::Pool, - assertion_id: &str, -) -> Result { - let conn = onchain_pool - .get() - .await - .map_err(|_| OptimisticOracleError::DatabaseConnection)?; - - let assertion_id = assertion_id.to_string(); - - let request: OORequest = conn - .interact(move |conn| { - oo_requests::table - .filter(diesel::dsl::sql::("upper(_cursor) IS NULL")) - .filter(oo_requests::assertion_id.eq(&assertion_id)) - .first(conn) - .map_err(|_| OptimisticOracleError::AssertionDetailsIssue(assertion_id)) - }) - .await - .map_err(|_| OptimisticOracleError::DatabaseConnection)??; - - let status = get_status(request.disputed, request.settled); - Ok(AssertionDetails { - assertion: Assertion { - assertion_id: request.assertion_id.to_string(), - claim: request.claim, - bond: request.bond, - expiration_time: request.expiration_timestamp, - identifier: request.identifier, - status, - timestamp: request.updated_at, - currency: request.currency, - }, - domain_id: request.domain_id, - asserter: request.asserter, - disputer: request.disputer.unwrap_or("None".to_string()), - disputed: request.disputed.unwrap_or(false), - callback_recipient: request.callback_recipient, - dispute_id: request.dispute_id.unwrap_or("None".to_string()), - caller: request.caller, - settled: request.settled.unwrap_or(false), - settle_caller: request.settle_caller.unwrap_or("None".to_string()), - settlement_resolution: request.settlement_resolution.into(), - }) -} - -// Function to get disputed assertions -pub async fn get_disputed_assertions( - onchain_pool: &deadpool_diesel::postgres::Pool, - page: u32, - limit: u32, -) -> Result, OptimisticOracleError> { - let conn = onchain_pool - .get() - .await - .map_err(|_| OptimisticOracleError::DatabaseConnection)?; - - let results: Vec = conn - .interact(move |conn| { - let query = oo_requests::table - .filter(diesel::dsl::sql::("upper(_cursor) IS NULL")) - .filter(oo_requests::disputed.eq(true)) - .offset(i64::from((page - 1) * limit)) - .limit(i64::from(limit)); - - query - .load(conn) - .map_err(|_| OptimisticOracleError::DatabaseConnection) - }) - .await - .map_err(|_| OptimisticOracleError::DatabaseConnection)??; - - results - .into_iter() - .map(|request| { - let disputer = request.disputer.ok_or_else(|| { - OptimisticOracleError::DisputerNotSet(request.assertion_id.clone()) - })?; - - Ok(DisputedAssertion { - assertion: Assertion { - assertion_id: request.assertion_id.to_string(), - claim: request.claim, - bond: request.bond, - expiration_time: request.expiration_timestamp, - identifier: request.identifier, - status: Status::Disputed, - timestamp: request.updated_at, - currency: request.currency, - }, - disputer, - disputed_at: request.updated_at, - disputed_tx: request.updated_at_tx, - dispute_id: request.dispute_id.unwrap_or("None".to_string()), - }) - }) - .collect() -} - -// Function to get resolved assertions -pub async fn get_resolved_assertions( - onchain_pool: &deadpool_diesel::postgres::Pool, - page: u32, - limit: u32, -) -> Result, OptimisticOracleError> { - let conn = onchain_pool - .get() - .await - .map_err(|_| OptimisticOracleError::DatabaseConnection)?; - - let results: Vec = conn - .interact(move |conn| { - let query = oo_requests::table - .filter(diesel::dsl::sql::("upper(_cursor) IS NULL")) - .filter(oo_requests::settled.eq(true)) - .offset(i64::from((page - 1) * limit)) - .limit(i64::from(limit)); - - query - .load(conn) - .map_err(|_| OptimisticOracleError::DatabaseConnection) - }) - .await - .map_err(|_| OptimisticOracleError::DatabaseConnection)??; - - results - .into_iter() - .map(|request| { - let settled_address = request.settle_caller.clone().ok_or_else(|| { - OptimisticOracleError::SettlerNotSet(request.assertion_id.clone()) - })?; - - Ok(ResolvedAssertion { - assertion: Assertion { - assertion_id: request.assertion_id, - claim: request.claim, - bond: request.bond, - expiration_time: request.expiration_timestamp, - identifier: request.identifier, - status: Status::Settled, - timestamp: request.updated_at, - currency: request.currency, - }, - settled_address, - settlement_resolution: request.settlement_resolution.into(), - disputed: request.disputed.unwrap_or(false), - settled_at: request.updated_at, - settle_caller: request.settle_caller.unwrap_or("None".to_string()), - dispute_id: request.dispute_id.unwrap_or("None".to_string()), - settlement_tx: request.updated_at_tx, - }) - }) - .collect() -} - -const fn get_status(disputed: Option, settled: Option) -> Status { - match (disputed, settled) { - (Some(true), _) => Status::Disputed, - (_, Some(true)) => Status::Settled, - _ => Status::Active, - } -} diff --git a/pragma-node/src/infra/repositories/oo_repository/mod.rs b/pragma-node/src/infra/repositories/oo_repository/mod.rs deleted file mode 100644 index 5324bdaf..00000000 --- a/pragma-node/src/infra/repositories/oo_repository/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod assertions; diff --git a/pragma-node/src/infra/repositories/publisher_repository.rs b/pragma-node/src/infra/repositories/publisher_repository.rs index fb9f6c81..aa56e4af 100644 --- a/pragma-node/src/infra/repositories/publisher_repository.rs +++ b/pragma-node/src/infra/repositories/publisher_repository.rs @@ -1,17 +1,18 @@ -use pragma_entities::{adapt_infra_error, InfraError}; -use pragma_entities::{dto, Publishers}; +use pragma_entities::InfraError; +use pragma_entities::{Publishers, dto}; pub async fn get( pool: &deadpool_diesel::postgres::Pool, name: String, ) -> Result { - let conn = pool.get().await.map_err(adapt_infra_error)?; + let conn = pool.get().await.map_err(InfraError::DbPoolError)?; + let res = conn .as_ref() .interact(move |conn| Publishers::get_by_name(conn, name)) .await - .map_err(adapt_infra_error)? - .map_err(adapt_infra_error) + .map_err(InfraError::DbInteractionError)? + .map_err(InfraError::DbResultError) .map(dto::Publisher::from)?; Ok(res) diff --git a/pragma-node/src/infra/repositories/utils.rs b/pragma-node/src/infra/repositories/utils.rs new file mode 100644 index 00000000..1786fcbb --- /dev/null +++ b/pragma-node/src/infra/repositories/utils.rs @@ -0,0 +1,14 @@ +use bigdecimal::BigDecimal; +use bigdecimal::num_bigint::ToBigInt; + +pub trait HexFormat { + fn to_hex_string(&self) -> String; +} + +impl HexFormat for BigDecimal { + fn to_hex_string(&self) -> String { + let bigint = self.to_bigint().unwrap_or_default(); + + format!("0x{bigint:x}") + } +} diff --git a/pragma-node/src/infra/rpc/mod.rs b/pragma-node/src/infra/rpc/mod.rs new file mode 100644 index 00000000..4c311e9b --- /dev/null +++ b/pragma-node/src/infra/rpc/mod.rs @@ -0,0 +1,97 @@ +use std::{ + collections::HashMap, + sync::{Arc, LazyLock}, +}; + +use starknet::providers::Provider; +use starknet::{ + core::{ + types::{BlockId, BlockTag, FunctionCall}, + utils::{cairo_short_string_to_felt, get_selector_from_name}, + }, + macros::felt_hex, + providers::{JsonRpcClient, jsonrpc::HttpTransport}, +}; +use starknet_crypto::Felt; +use url::Url; + +use pragma_common::{Pair, starknet::StarknetNetwork}; +use pragma_entities::InfraError; + +pub const ENV_MAINNET_RPC_URL: &str = "MAINNET_RPC_URL"; +pub const ENV_SEPOLIA_RPC_URL: &str = "SEPOLIA_RPC_URL"; + +pub type RpcClients = HashMap>>; + +pub static ORACLE_ADDRESS_PER_NETWORK: LazyLock> = + LazyLock::new(|| { + let mut addresses = HashMap::new(); + addresses.insert( + StarknetNetwork::Mainnet, + felt_hex!("0x2a85bd616f912537c50a49a4076db02c00b29b2cdc8a197ce92ed1837fa875b"), + ); + addresses.insert( + StarknetNetwork::Sepolia, + felt_hex!("0x36031daa264c24520b11d93af622c848b2499b66b41d611bac95e13cfca131a"), + ); + addresses + }); + +/// Init the RPC clients based on the provided ENV variables. +/// Panics if the env are not correctly set. +pub fn init_rpc_clients() -> HashMap>> { + let mainnet_rpc_url: Url = std::env::var(ENV_MAINNET_RPC_URL) + .unwrap_or("https://free-rpc.nethermind.io/mainnet-juno".to_string()) + .parse() + .expect("Invalid MAINNET_RPC_URL provided."); + let mainnet_client = JsonRpcClient::new(HttpTransport::new(mainnet_rpc_url)); + + let sepolia_rpc_url: Url = std::env::var(ENV_SEPOLIA_RPC_URL) + .unwrap_or("https://free-rpc.nethermind.io/sepolia-juno".to_string()) + .parse() + .expect("Invalid SEPOLIA_RPC_URL provided."); + let sepolia_client = JsonRpcClient::new(HttpTransport::new(sepolia_rpc_url)); + + let mut rpc_clients = HashMap::new(); + rpc_clients.insert(StarknetNetwork::Mainnet, Arc::new(mainnet_client)); + rpc_clients.insert(StarknetNetwork::Sepolia, Arc::new(sepolia_client)); + + rpc_clients +} + +/// Calls the `get_decimals` endpoint of pragma oracle and returns the result. +pub async fn call_get_decimals( + rpc_client: &Arc>, + pair: &Pair, + network: StarknetNetwork, +) -> Result { + let pair_id = cairo_short_string_to_felt(&pair.to_pair_id()) + .map_err(|_| InfraError::PairNotFound(pair.to_pair_id()))?; + + let Some(pragma_oracle_address) = ORACLE_ADDRESS_PER_NETWORK.get(&network) else { + unreachable!() + }; + + let request = FunctionCall { + contract_address: *pragma_oracle_address, + entry_point_selector: get_selector_from_name("get_decimals") + .map_err(|e| InfraError::RpcError(format!("{e:?}")))?, + calldata: vec![Felt::ZERO, pair_id], + }; + + let call_result = rpc_client + .call(request, BlockId::Tag(BlockTag::Pending)) + .await + .map_err(|e| InfraError::RpcError(format!("{e:?}")))?; + + let Some(felt_decimals) = call_result.first() else { + return Err(InfraError::PairNotFound(pair.to_pair_id())); + }; + + let decimals: u32 = felt_decimals + .to_biguint() + .try_into() + .map_err(|_| InfraError::RpcError(format!("Converting {felt_decimals} to Biguint")))?; + + Ok(decimals) +} diff --git a/pragma-node/src/lib.rs b/pragma-node/src/lib.rs new file mode 100644 index 00000000..1d190db6 --- /dev/null +++ b/pragma-node/src/lib.rs @@ -0,0 +1,10 @@ +pub mod caches; +pub mod config; +pub mod constants; +pub mod errors; +pub mod handlers; +pub mod infra; +pub mod metrics; +pub mod server; +pub mod state; +pub mod utils; diff --git a/pragma-node/src/main.rs b/pragma-node/src/main.rs index b8c99085..87f8830d 100644 --- a/pragma-node/src/main.rs +++ b/pragma-node/src/main.rs @@ -1,67 +1,26 @@ -pub mod caches; -pub mod config; -pub mod constants; -pub mod errors; -pub mod handlers; -pub mod infra; -pub mod metrics; -pub mod server; -pub mod utils; - -use dashmap::DashMap; -use dotenvy::dotenv; -use handlers::publish_entry_ws::PublisherSession; -use metrics::MetricsRegistry; -use std::fmt; use std::sync::Arc; -use caches::CacheRegistry; -use deadpool_diesel::postgres::Pool; -use starknet::signers::SigningKey; +use dotenvy::dotenv; use pragma_entities::connection::{ENV_OFFCHAIN_DATABASE_URL, ENV_ONCHAIN_DATABASE_URL}; -use crate::config::config; -use crate::utils::PragmaSignerBuilder; - -#[derive(Clone)] -pub struct AppState { - // Databases pools - offchain_pool: Pool, - onchain_pool: Pool, - // Redis connection - redis_client: Option>, - // Database caches - caches: Arc, - // Pragma Signer used for StarkEx signing - pragma_signer: Option, - // Metrics - metrics: Arc, - // Publisher sessions - publisher_sessions: Arc>, -} - -impl fmt::Debug for AppState { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("AppState") - .field("redis_client", &self.redis_client) - .field("caches", &self.caches) - .field("pragma_signer", &self.pragma_signer) - .field("metrics", &self.metrics) - .finish_non_exhaustive() - } -} +use pragma_node::caches::CacheRegistry; +use pragma_node::config::config; +use pragma_node::infra::cloud::build_signer; +use pragma_node::infra::rpc::init_rpc_clients; +use pragma_node::metrics::MetricsRegistry; +use pragma_node::state::AppState; #[tokio::main] #[tracing::instrument] async fn main() -> Result<(), Box> { dotenv().ok(); - // We export our telemetry - so we can monitor the API through Signoz. - let otel_endpoint = std::env::var("OTEL_EXPORTER_OTLP_ENDPOINT") - .unwrap_or_else(|_| "http://signoz.dev.pragma.build:4317".to_string()); - pragma_common::telemetry::init_telemetry("pragma-node".into(), otel_endpoint, None)?; + let otel_endpoint = std::env::var("OTEL_EXPORTER_OTLP_ENDPOINT").ok(); + pragma_common::telemetry::init_telemetry("pragma-node", otel_endpoint) + .expect("Failed to initialize telemetry"); + // Init config from env variables let config = config().await; // Init the database pools @@ -76,38 +35,18 @@ async fn main() -> Result<(), Box> { // Init the database caches let caches = CacheRegistry::new(); - // Build the pragma signer - let signer_builder = if config.is_production_mode() { - PragmaSignerBuilder::new().production_mode() - } else { - PragmaSignerBuilder::new().non_production_mode() - }; - let pragma_signer = signer_builder.build().await; - - // Init the redis client - Optionnal, only for endpoints that interact with Redis, - // i.e just the Merkle Feeds endpoint for now. - let redis_client = - pragma_entities::connection::init_redis_client(config.redis_host(), config.redis_port()) - .map_or_else( - |_| { - tracing::warn!( - "⚠ Could not create the Redis client. Merkle feeds endpoints won't work." - ); - None - }, - |client| Some(Arc::new(client)), - ); + // Build the pragma signer based on cloud environment + let pragma_signer = build_signer(config.cloud_env(), config.is_production_mode()).await; let state = AppState { offchain_pool, onchain_pool, - redis_client, caches: Arc::new(caches), pragma_signer, metrics: MetricsRegistry::new(), - publisher_sessions: Arc::new(DashMap::new()), + rpc_clients: init_rpc_clients(), }; - server::run_api_server(config, state).await; + pragma_node::server::run_api_server(config, state).await; // Ensure that the tracing provider is shutdown correctly opentelemetry::global::shutdown_tracer_provider(); diff --git a/pragma-node/src/metrics.rs b/pragma-node/src/metrics.rs index b85825a6..cb2d3515 100644 --- a/pragma-node/src/metrics.rs +++ b/pragma-node/src/metrics.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use opentelemetry::{metrics::Counter, KeyValue}; +use opentelemetry::{KeyValue, metrics::Counter}; use strum::Display; #[derive(Debug)] diff --git a/pragma-node/src/server/mod.rs b/pragma-node/src/server/mod.rs index 96aca464..8b7dd90b 100644 --- a/pragma-node/src/server/mod.rs +++ b/pragma-node/src/server/mod.rs @@ -5,17 +5,17 @@ use axum_tracing_opentelemetry::middleware::{OtelAxumLayer, OtelInResponseLayer} use std::net::SocketAddr; use tower_http::cors::CorsLayer; use utoipa::{ + Modify, OpenApi, openapi::{ - security::{ApiKey, ApiKeyValue, SecurityScheme}, ServerBuilder, ServerVariableBuilder, + security::{ApiKey, ApiKeyValue, SecurityScheme}, }, - Modify, OpenApi, }; use utoipauto::utoipauto; use crate::errors::internal_error; use crate::server::middlewares::TimingLayer; -use crate::{config::Config, server::routes::app_router, AppState}; +use crate::{config::Config, server::routes::app_router, state::AppState}; struct SecurityAddon; @@ -34,22 +34,23 @@ struct ServerAddon; impl Modify for ServerAddon { fn modify(&self, openapi: &mut utoipa::openapi::OpenApi) { + // TODO: Add back enum_values with api.mainnet when we have production environment let server_variable = ServerVariableBuilder::new() - .default_value("api.dev") - .enum_values(Some(vec!["api.dev", "api.prod"])) + .default_value("api.devnet") + // .enum_values(Some(vec!["api.devnet", "api.mainnet"])) .build(); - openapi.servers = Some(vec![ServerBuilder::new() - .url("https://{environment}.pragma.build") - .parameter("environment", server_variable) - .build()]); + openapi.servers = Some(vec![ + ServerBuilder::new() + .url("https://{environment}.pragma.build") + .parameter("environment", server_variable) + .build(), + ]); } } #[tracing::instrument(skip(state))] pub async fn run_api_server(config: &Config, state: AppState) { - #[utoipauto( - paths = "./pragma-node/src, ./pragma-common/src from pragma_common, ./pragma-entities/src from pragma_entities" - )] + #[utoipauto(paths = "./pragma-node/src, ./pragma-entities/src from pragma_entities")] #[derive(OpenApi)] #[openapi( modifiers(&SecurityAddon, &ServerAddon), @@ -61,8 +62,8 @@ pub async fn run_api_server(config: &Config, state: AppState) { // Uncomment to generate openapi.json // TODO: move to a separate bin - // let json = ApiDoc::openapi().to_json().unwrap(); - // std::fs::write("openapi.json", json).unwrap(); + let json = ApiDoc::openapi().to_json().unwrap(); + std::fs::write("openapi.json", json).unwrap(); let app = app_router::(state.clone()) .with_state(state) diff --git a/pragma-node/src/server/routes.rs b/pragma-node/src/server/routes.rs index 0ca00e1d..141c83b2 100644 --- a/pragma-node/src/server/routes.rs +++ b/pragma-node/src/server/routes.rs @@ -1,43 +1,29 @@ +use axum::Router; use axum::http::StatusCode; use axum::response::IntoResponse; -use axum::routing::{get, post}; -use axum::Router; +use axum::routing::get; use utoipa::OpenApi as OpenApiT; -// use utoipa_swagger_ui::SwaggerUi; +use utoipa_swagger_ui::SwaggerUi; -use crate::handlers::merkle_feeds::{ - get_merkle_proof::get_merkle_feeds_proof, get_option::get_merkle_feeds_option, -}; use crate::handlers::onchain::{ get_checkpoints::get_onchain_checkpoints, get_entry::get_onchain_entry, get_history::get_onchain_history, get_publishers::get_onchain_publishers, subscribe_to_ohlc::subscribe_to_onchain_ohlc, }; -use crate::handlers::optimistic_oracle::{ - get_assertion_details::get_assertion_details, get_assertions::get_assertions, - get_disputed_assertions::get_disputed_assertions, - get_resolved_assertions::get_resolved_assertions, -}; -use crate::handlers::stream_entry::stream_entry; -use crate::handlers::{ - create_entries, create_future_entries, get_entry, get_expiries, get_ohlc, get_volatility, - publish_entry, subscribe_to_entry, subscribe_to_price, -}; -use crate::AppState; +use crate::handlers::stream::stream_multi::stream_entry_multi_pair; +use crate::handlers::websocket::{subscribe_to_entry, subscribe_to_price}; +use crate::handlers::{get_entry, get_ohlc}; +use crate::state::AppState; #[allow(clippy::extra_unused_type_parameters)] pub fn app_router(state: AppState) -> Router { - // let open_api = T::openapi(); - // TODO: Add swagger ui + let open_api = T::openapi(); Router::new() - // .merge(SwaggerUi::new("/node/swagger-ui").url("/node/api-docs/openapi.json", open_api)) + .merge(SwaggerUi::new("/node/v1/docs").url("/node/v1/docs/openapi.json", open_api)) .route("/node", get(root)) .nest("/node/v1/data", data_routes(state.clone())) .nest("/node/v1/onchain", onchain_routes(state.clone())) - .nest("/node/v1/aggregation", aggregation_routes(state.clone())) - .nest("/node/v1/volatility", volatility_routes(state.clone())) - .nest("/node/v1/merkle_feeds", merkle_feeds_routes(state.clone())) - .nest("/node/v1/optimistic", optimistic_oracle_routes(state)) + .nest("/node/v1/aggregation", aggregation_routes(state)) .fallback(handler_404) } @@ -54,14 +40,10 @@ async fn handler_404() -> impl IntoResponse { fn data_routes(state: AppState) -> Router { Router::new() - .route("/publish", post(create_entries)) - .route("/publish_future", post(create_future_entries)) - .route("/publish_ws", get(publish_entry)) .route("/{base}/{quote}", get(get_entry)) - .route("/{base}/{quote}/future_expiries", get(get_expiries)) .route("/subscribe", get(subscribe_to_entry)) .route("/price/subscribe", get(subscribe_to_price)) - .route("/{base}/{quote}/stream", get(stream_entry)) + .route("/multi/stream", get(stream_entry_multi_pair)) .with_state(state) } @@ -75,30 +57,8 @@ fn onchain_routes(state: AppState) -> Router { .with_state(state) } -fn volatility_routes(state: AppState) -> Router { - Router::new() - .route("/{base}/{quote}", get(get_volatility)) - .with_state(state) -} - fn aggregation_routes(state: AppState) -> Router { Router::new() .route("/candlestick/{base}/{quote}", get(get_ohlc)) .with_state(state) } - -fn merkle_feeds_routes(state: AppState) -> Router { - Router::new() - .route("/proof/{option_hash}", get(get_merkle_feeds_proof)) - .route("/options/{instrument}", get(get_merkle_feeds_option)) - .with_state(state) -} - -fn optimistic_oracle_routes(state: AppState) -> Router { - Router::new() - .route("/assertions/{assertion_id}", get(get_assertion_details)) - .route("/assertions", get(get_assertions)) - .route("/disputed-assertions", get(get_disputed_assertions)) - .route("/resolved-assertions", get(get_resolved_assertions)) - .with_state(state) -} diff --git a/pragma-node/src/state.rs b/pragma-node/src/state.rs new file mode 100644 index 00000000..738ac2c6 --- /dev/null +++ b/pragma-node/src/state.rs @@ -0,0 +1,33 @@ +use std::sync::Arc; + +use deadpool_diesel::postgres::Pool; +use starknet::signers::SigningKey; + +use crate::caches::CacheRegistry; +use crate::infra::rpc::RpcClients; +use crate::metrics::MetricsRegistry; + +#[derive(Clone)] +pub struct AppState { + // Databases pools + pub offchain_pool: Pool, + pub onchain_pool: Pool, + // Starknet RPC clients for mainnet & sepolia + pub rpc_clients: RpcClients, + // Database caches + pub caches: Arc, + // Pragma Signer used for StarkEx signing + pub pragma_signer: Option, + // Metrics + pub metrics: Arc, +} + +impl std::fmt::Debug for AppState { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("AppState") + .field("caches", &self.caches) + .field("pragma_signer", &self.pragma_signer) + .field("metrics", &self.metrics) + .finish_non_exhaustive() + } +} diff --git a/pragma-node/src/utils/conversion.rs b/pragma-node/src/utils/conversion.rs index 8d03e1bd..77e81a45 100644 --- a/pragma-node/src/utils/conversion.rs +++ b/pragma-node/src/utils/conversion.rs @@ -12,9 +12,7 @@ pub fn convert_via_quote( if b_price == BigDecimal::from(0) { return Err(InfraError::InternalServerError); } - let power = BigDecimal::from(10_i64.pow(output_decimals)); - Ok(a_price * power / b_price) } diff --git a/pragma-node/src/utils/kafka.rs b/pragma-node/src/utils/kafka.rs deleted file mode 100644 index b46b730a..00000000 --- a/pragma-node/src/utils/kafka.rs +++ /dev/null @@ -1,30 +0,0 @@ -use pragma_entities::{EntryError, NewEntry}; - -use crate::infra::kafka; - -/// Publish entries to Kafka -/// -/// Arguments: -/// * `entries`: Entries to publish -/// * `topic`: Kafka topic -/// * `publisher_name`: Publisher name -/// -/// Returns: -/// * `()`: Nothing -/// * `EntryError::PublishData`: Error if something goes wrong -pub async fn publish_to_kafka( - entries: Vec, - topic: String, - publisher_name: &str, -) -> Result<(), EntryError> { - let data = serde_json::to_vec(&entries).map_err(|e| EntryError::PublishData(e.to_string()))?; - - if let Err(e) = kafka::send_message(&topic, &data, publisher_name).await { - tracing::error!("Error sending message to kafka: {:?}", e); - return Err(EntryError::PublishData(String::from( - "Error sending message to kafka", - ))); - }; - - Ok(()) -} diff --git a/pragma-node/src/utils/mod.rs b/pragma-node/src/utils/mod.rs index d2cfe6c8..b261b17b 100644 --- a/pragma-node/src/utils/mod.rs +++ b/pragma-node/src/utils/mod.rs @@ -1,58 +1,30 @@ -pub mod aws; pub mod conversion; pub mod custom_extractors; -pub mod kafka; pub mod macros; -pub mod pricer; +pub mod signing; pub mod sql; +pub mod starkex; pub mod ws; -pub use aws::PragmaSignerBuilder; pub use conversion::{convert_via_quote, format_bigdecimal_price, normalize_to_decimals}; pub use custom_extractors::path_extractor::PathExtractor; -pub use kafka::publish_to_kafka; -use moka::future::Cache; -use pragma_common::types::entries::Entry; -use pragma_common::types::pair::Pair; -use pragma_entities::dto::Publisher; +use pragma_common::starknet::StarknetNetwork; pub use ws::*; -use std::collections::HashMap; - -use bigdecimal::num_bigint::ToBigInt; -use bigdecimal::{BigDecimal, ToPrimitive}; +use bigdecimal::BigDecimal; +use bigdecimal::num_bigint::{BigUint, ToBigInt}; use chrono::NaiveDateTime; use deadpool_diesel::postgres::Pool; -use pragma_common::types::Network; -use pragma_entities::{ - convert_timestamp_to_datetime, Entry as EntityEntry, EntryError, FutureEntry, NewEntry, - PublisherError, -}; -use starknet_crypto::{Felt, Signature}; +use moka::future::Cache; +use pragma_entities::dto::Publisher; +use pragma_entities::{Entry as EntityEntry, EntryError, FutureEntry, PublisherError}; +use starknet_crypto::Felt; use crate::infra::repositories::publisher_repository; use crate::infra::repositories::{ entry_repository::MedianEntry, onchain_repository::entry::get_existing_pairs, }; -const ONE_YEAR_IN_SECONDS: f64 = 3_153_600_f64; - -/// From a map of currencies and their decimals, returns the number of decimals for a given pair. -/// If the currency is not found in the map, the default value is 8. -pub(crate) fn get_decimals_for_pair( - currencies: &HashMap, - pair_id: &str, -) -> u32 { - let pair = Pair::from(pair_id); - let base_decimals = currencies - .get(&pair.base) - .map_or(8, |d| d.to_u32().unwrap_or(8)); - let quote_decimals = currencies - .get(&pair.quote) - .map_or(8, |d| d.to_u32().unwrap_or(8)); - std::cmp::min(base_decimals, quote_decimals) -} - /// Returns the mid price between two prices. pub fn get_mid_price(low: &BigDecimal, high: &BigDecimal) -> BigDecimal { (low + high) / BigDecimal::from(2) @@ -86,7 +58,11 @@ pub(crate) fn compute_median_price_and_time( /// Given a pair and a network, returns if it exists in the /// onchain database. -pub(crate) async fn is_onchain_existing_pair(pool: &Pool, pair: &String, network: Network) -> bool { +pub(crate) async fn is_onchain_existing_pair( + pool: &Pool, + pair: &String, + network: StarknetNetwork, +) -> bool { let existings_pairs = get_existing_pairs(pool, network) .await .expect("Couldn't get the existing pairs from the database."); @@ -94,52 +70,6 @@ pub(crate) async fn is_onchain_existing_pair(pool: &Pool, pair: &String, network existings_pairs.into_iter().any(|p| p.pair_id == *pair) } -pub fn convert_entry_to_db(entry: &Entry, signature: &Signature) -> Result { - let dt = convert_timestamp_to_datetime!(entry.base.timestamp)?; - - Ok(NewEntry { - pair_id: entry.pair_id.clone(), - publisher: entry.base.publisher.clone(), - source: entry.base.source.clone(), - timestamp: dt, - publisher_signature: format!("0x{signature}"), - price: entry.price.into(), - }) -} - -/// Computes the volatility from a list of entries. -/// The volatility is computed as the annualized standard deviation of the log returns. -/// The log returns are computed as the natural logarithm of the ratio between two consecutive median prices. -/// The annualized standard deviation is computed as the square root of the variance multiplied by 10^8. -pub(crate) fn compute_volatility(entries: &[MedianEntry]) -> f64 { - if entries.len() < 2 { - return 0.0; - } - let mut values = Vec::new(); - for i in 1..entries.len() { - if entries[i].median_price.to_f64().unwrap_or(0.0) > 0.0 - && entries[i - 1].median_price.to_f64().unwrap() > 0.0 - && (entries[i].time - entries[i - 1].time).num_seconds() > 0 - { - let log_return = (entries[i].median_price.to_f64().unwrap() - / entries[i - 1].median_price.to_f64().unwrap()) - .ln() - .powi(2); - - let time = (entries[i].time - entries[i - 1].time) - .num_seconds() - .to_f64() - .unwrap() - / ONE_YEAR_IN_SECONDS; - - values.push((log_return, time)); - } - } - - let variance: f64 = values.iter().map(|v| v.0 / v.1).sum::() / values.len() as f64; - variance.sqrt() * 10_f64.powi(8) -} - /// Converts a big decimal price to a hex string 0x prefixed. pub(crate) fn big_decimal_price_to_hex(price: &BigDecimal) -> String { format!( @@ -148,6 +78,22 @@ pub(crate) fn big_decimal_price_to_hex(price: &BigDecimal) -> String { ) } +pub(crate) fn hex_string_to_bigdecimal( + hex_str: &str, +) -> Result> { + // Remove "0x" prefix if present + let cleaned_hex = hex_str.trim_start_matches("0x"); + + // Parse hex string to BigUint + let parsed_big_int = + BigUint::parse_bytes(cleaned_hex.as_bytes(), 16).ok_or("Failed to parse hex string")?; + let big_int = parsed_big_int + .to_bigint() + .ok_or("Failed to convert to BigInt")?; + let decimal = BigDecimal::new(big_int, 0); + + Ok(decimal) +} /// Given a list of pairs, only return the ones that exists in the /// database in separate lists. /// TODO: handle future pairs? @@ -221,7 +167,7 @@ pub async fn validate_publisher( tracing::debug!("No cache found for publisher: {publisher_name}, fetching the database."); publisher_repository::get(pool, publisher_name.to_string()) .await - .map_err(EntryError::InfraError)? + .map_err(|_| EntryError::PublisherNotFound(publisher_name.into()))? }; publisher.assert_is_active()?; @@ -236,108 +182,3 @@ pub async fn validate_publisher( Ok((public_key, account_address)) } - -#[cfg(test)] -mod tests { - use super::*; - use chrono::{TimeZone, Utc}; - - fn new_entry(median_price: u32, timestamp: i64) -> MedianEntry { - MedianEntry { - time: Utc - .timestamp_opt(timestamp, 0) - .single() - .expect("Invalid timestamp") - .naive_utc(), - median_price: median_price.into(), - num_sources: 5, - } - } - - #[test] - fn test_compute_volatility_no_entries() { - let entries = vec![]; - let epsilon = 1e-10; - assert!((compute_volatility(&entries) - 0.0).abs() < epsilon); - } - - #[test] - fn test_compute_volatility_simple() { - let entries = vec![new_entry(100, 1_640_995_200), new_entry(110, 1_641_081_600)]; - - let expected_log_return = (110_f64 / 100_f64).ln().powi(2); - let expected_time = f64::from(1_641_081_600 - 1_640_995_200) / ONE_YEAR_IN_SECONDS; - let expected_variance = expected_log_return / expected_time; - let expected_volatility = expected_variance.sqrt() * 10_f64.powi(8); - let computed_volatility = compute_volatility(&entries); - let epsilon: f64 = 1e-6; - - assert!((computed_volatility - expected_volatility).abs() < epsilon); - } - - #[test] - fn test_compute_volatility() { - let entries = vec![ - new_entry(47_686, 1_640_995_200), - new_entry(47_345, 1_641_081_600), - new_entry(46_458, 1_641_168_000), - new_entry(45_897, 1_641_254_400), - new_entry(43_569, 1_641_340_800), - ]; - - let epsilon = 1e-10; - assert!((compute_volatility(&entries) - 17_264_357.963_673_33).abs() < epsilon); - } - - #[test] - fn test_compute_volatility_zero_price() { - let entries = vec![ - new_entry(47_686, 1_640_995_200), - new_entry(0, 1_641_081_600), - new_entry(46_458, 1_641_168_000), - ]; - assert!(f64::is_nan(compute_volatility(&entries))); - } - - #[test] - fn test_compute_volatility_constant_prices() { - let entries = vec![ - new_entry(47_686, 1_640_995_200), - new_entry(47_686, 1_641_081_600), - new_entry(47_686, 1_641_168_000), - new_entry(47_686, 1_641_254_400), - new_entry(47_686, 1_641_340_800), - ]; - - let epsilon = 1e-10; - assert!((compute_volatility(&entries) - 0.0).abs() < epsilon); - } - - #[test] - fn test_compute_volatility_increasing_prices() { - let entries = vec![ - new_entry(13_569, 1_640_995_200), - new_entry(15_897, 1_641_081_600), - new_entry(16_458, 1_641_168_000), - new_entry(17_345, 1_641_254_400), - new_entry(47_686, 1_641_340_800), - ]; - - let epsilon = 1e-10; - assert!((compute_volatility(&entries) - 309_805_011.672_835_77).abs() < epsilon); - } - - #[test] - fn test_compute_volatility_decreasing_prices() { - let entries = vec![ - new_entry(27_686, 1_640_995_200), - new_entry(27_345, 1_641_081_600), - new_entry(26_458, 1_641_168_000), - new_entry(25_897, 1_641_254_400), - new_entry(23_569, 1_641_340_800), - ]; - - let epsilon = 1e-10; - assert!((compute_volatility(&entries) - 31_060_897.843_919_14_f64).abs() < epsilon); - } -} diff --git a/pragma-node/src/utils/pricer.rs b/pragma-node/src/utils/pricer.rs deleted file mode 100644 index 998e328c..00000000 --- a/pragma-node/src/utils/pricer.rs +++ /dev/null @@ -1,218 +0,0 @@ -use std::collections::HashMap; - -use bigdecimal::{BigDecimal, ToPrimitive}; -use deadpool_diesel::postgres::Pool; -use pragma_common::types::DataType; -use pragma_entities::{Currency, EntryError}; - -use crate::infra::repositories::entry_repository::{ - get_current_median_entries_with_components, MedianEntryWithComponents, -}; - -#[async_trait::async_trait] -pub trait Pricer { - fn new(pairs: Vec, pair_type: DataType) -> Self; - async fn compute(&self, db_pool: &Pool) -> Result, EntryError>; -} - -// ======================================= - -pub struct IndexPricer { - pairs: Vec, - pair_type: DataType, -} - -/// Computes the most recent index price for a list of pairs. -/// The index price is the median of the pairs. -#[async_trait::async_trait] -impl Pricer for IndexPricer { - fn new(pairs: Vec, pair_type: DataType) -> Self { - Self { pairs, pair_type } - } - - #[tracing::instrument(skip(self, db_pool), fields( - pairs_count = self.pairs.len(), - pair_type = ?self.pair_type - ))] - async fn compute(&self, db_pool: &Pool) -> Result, EntryError> { - if self.pairs.is_empty() { - return Ok(vec![]); - } - get_current_median_entries_with_components(db_pool, &self.pairs, self.pair_type) - .await - .map_err(|e| e.to_entry_error(&self.pairs.join(","))) - } -} - -// ======================================= - -/// Computes the mark price for a list of pairs. -/// The mark price can be computed with two methods: -/// 1. if the quote asset is USD, we just return the median price of the recent -/// perp entries. -/// 2. if the quote asset is a stablecoin, we compute the median price of the -/// spot stablecoin/USD pairs and then we divide the median price of the perp -/// pairs by the median price of the stablecoin. -pub struct MarkPricer { - pairs: Vec, - pair_type: DataType, -} - -impl MarkPricer { - /// Builds the stablecoin/USD pairs from the non USD pairs. - /// Example: ["BTC/USDT", "ETH/USDT"] -> ["USDT/USD"] - #[tracing::instrument] - fn build_stable_to_usd_pairs(non_usd_pairs: &[String]) -> Vec { - non_usd_pairs - .iter() - .map(|pair| format!("{}/USD", pair.split('/').last().unwrap())) - .collect() - } - - /// Computes the stablecoin/USD pairs median entries. - #[tracing::instrument(skip(db_pool))] - async fn get_stablecoins_index_entries( - db_pool: &Pool, - stablecoin_pairs: &[String], - ) -> Result, EntryError> { - let stable_to_usd_pairs = Self::build_stable_to_usd_pairs(stablecoin_pairs); - let stablecoins_index_pricer = IndexPricer::new(stable_to_usd_pairs, DataType::SpotEntry); - stablecoins_index_pricer.compute(db_pool).await - } - - /// Retrieves the number of decimals for quote stablecoins. - /// Example: ["BTC/USDT", "ETH/USDT"] -> {"USDT": 6} - #[tracing::instrument(skip(db_pool))] - async fn get_stablecoins_decimals( - db_pool: &Pool, - stablecoin_pairs: Vec, - ) -> Result, EntryError> { - let conn = db_pool - .get() - .await - .map_err(|_| EntryError::InternalServerError)?; - let stablecoins_names: Vec = stablecoin_pairs - .iter() - // safe unwrap since we know the pairs are formatted "XXX/YYY" - .map(|pair| pair.split('/').last().unwrap().to_string()) - .collect(); - let decimals = conn - .interact(move |conn| Currency::get_decimals_for(conn, stablecoins_names)) - .await - .expect("Couldn't get the decimals for the stablecoins") - .expect("Couldn't get table result") - .into_iter() - .collect(); - Ok(decimals) - } - - /// Computes the non USD quoted pairs median entries. - #[tracing::instrument(skip(db_pool), fields(pairs_count = pairs.len()))] - async fn get_pairs_entries( - db_pool: &Pool, - pairs: &[String], - pair_type: DataType, - ) -> Result, EntryError> { - let pairs_entries = IndexPricer::new(pairs.to_vec(), pair_type); - pairs_entries.compute(db_pool).await - } - - /// Given the median price of a perp pair, the median price of the spot - /// stablecoin/USD pair and the number of decimals of the stablecoin, computes - /// the mark price. - #[tracing::instrument] - fn compute_mark_price( - perp_pair_price: &BigDecimal, - spot_usd_price: &BigDecimal, - decimals: &BigDecimal, - ) -> BigDecimal { - let decimals_as_u32 = decimals - .to_u32() - .ok_or(EntryError::InternalServerError) - .unwrap(); - let scaler = BigDecimal::from(10_u32.pow(decimals_as_u32)); - let spot_usd_price = spot_usd_price / scaler; - perp_pair_price / spot_usd_price - } - - /// Builds the complete list of entries from the median price of the spot - /// stablecoin/USD pairs and the median price of the perp pairs. - #[tracing::instrument( - skip(stablecoins_spot_entries, stablecoins_decimals, pairs_perp_entries), - fields( - spot_entries = stablecoins_spot_entries.len(), - perp_entries = pairs_perp_entries.len() - ) - )] - pub fn merge_entries_from( - stablecoins_spot_entries: Vec, - stablecoins_decimals: HashMap, - pairs_perp_entries: Vec, - ) -> Result, EntryError> { - let mut merged_entries = vec![]; - - for perp_median_entry in pairs_perp_entries { - // safe unwrap since we know the pairs are formatted "XXX/YYY" - let stable_coin_name = perp_median_entry.pair_id.split('/').last().unwrap(); - let related_usd_spot = format!("{stable_coin_name}/USD"); - - let spot_usd_median_entry = stablecoins_spot_entries - .iter() - .find(|spot_median_entry| spot_median_entry.pair_id == related_usd_spot) - .ok_or(EntryError::InternalServerError)?; - - let mark_price = Self::compute_mark_price( - &perp_median_entry.median_price, - &spot_usd_median_entry.median_price, - stablecoins_decimals - .get(stable_coin_name) - .ok_or(EntryError::InternalServerError)?, - ); - - let mut components = perp_median_entry.components; - components.extend(spot_usd_median_entry.components.clone()); - - let mark_median_entry = MedianEntryWithComponents { - pair_id: perp_median_entry.pair_id.clone(), - median_price: mark_price, - components, - }; - merged_entries.push(mark_median_entry); - } - - Ok(merged_entries) - } -} - -#[async_trait::async_trait] -impl Pricer for MarkPricer { - fn new(pairs: Vec, pair_type: DataType) -> Self { - Self { pairs, pair_type } - } - - #[tracing::instrument( - skip(self, db_pool), - fields( - pairs_count = self.pairs.len(), - pair_type = ?self.pair_type - ) - )] - async fn compute(&self, db_pool: &Pool) -> Result, EntryError> { - tracing::debug!("Computing mark prices for pairs: {:?}", self.pairs); - if self.pairs.is_empty() { - return Ok(vec![]); - } - let (stablecoins_spot_entries, stablecoins_decimals, pairs_perp_entries) = tokio::join!( - Self::get_stablecoins_index_entries(db_pool, &self.pairs), - // TODO: currently, we retrieve the decimals everytime for every loop - // but we should not: they won't change. - Self::get_stablecoins_decimals(db_pool, self.pairs.clone()), - Self::get_pairs_entries(db_pool, &self.pairs, self.pair_type) - ); - Self::merge_entries_from( - stablecoins_spot_entries?, - stablecoins_decimals?, - pairs_perp_entries?, - ) - } -} diff --git a/pragma-node/src/utils/signing.rs b/pragma-node/src/utils/signing.rs new file mode 100644 index 00000000..a1d5bf3d --- /dev/null +++ b/pragma-node/src/utils/signing.rs @@ -0,0 +1,14 @@ +use pragma_common::starknet::{ConversionError, SignerError}; +use starknet::signers::SigningKey; +use starknet_crypto::Felt; + +pub trait Signable { + fn try_get_hash(&self) -> Result; +} + +/// Sign the passed data with the signer & return the signature 0x prefixed. +pub fn sign_data(signer: &SigningKey, data: &impl Signable) -> Result { + let hash_to_sign = data.try_get_hash()?; + let signature = signer.sign(&hash_to_sign)?; + Ok(format!("0x{signature:}")) +} diff --git a/pragma-node/src/utils/sql.rs b/pragma-node/src/utils/sql.rs index a146fee9..94b683f4 100644 --- a/pragma-node/src/utils/sql.rs +++ b/pragma-node/src/utils/sql.rs @@ -1,31 +1,11 @@ -use pragma_common::types::{AggregationMode, DataType, Interval}; +use pragma_common::{AggregationMode, InstrumentType, Interval}; use pragma_entities::InfraError; -// SQL statement used to filter the expiration timestamp for future entries -pub fn get_expiration_timestamp_filter( - data_type: DataType, - expiry: &str, -) -> Result { - match data_type { - DataType::SpotEntry => Ok(String::default()), - // TODO: this is a perp? - DataType::FutureEntry if expiry.is_empty() => { - Ok(String::from("AND\n\t\texpiration_timestamp is null")) - } - DataType::FutureEntry if !expiry.is_empty() => { - Ok(format!("AND\n\texpiration_timestamp = '{expiry}'")) - } - _ => Err(InfraError::InternalServerError), - } -} - // Retrieve the timescale table based on the network and data type. -pub const fn get_table_suffix(data_type: DataType) -> Result<&'static str, InfraError> { +pub const fn get_table_suffix(data_type: InstrumentType) -> Result<&'static str, InfraError> { match data_type { - DataType::SpotEntry => Ok(""), - DataType::FutureEntry => Ok("_future"), - // TODO: Why does this return an Err? Should be "_future" too? - DataType::PerpEntry => Err(InfraError::InternalServerError), + InstrumentType::Spot => Ok("spot"), + InstrumentType::Perp => Ok("perp"), } } @@ -36,8 +16,12 @@ pub const fn get_interval_specifier( ) -> Result<&'static str, InfraError> { if is_twap { match interval { - Interval::OneHour => Ok("1_hour"), - Interval::TwoHours => Ok("2_hours"), + Interval::OneMinute => Ok("1_min"), + Interval::FiveMinutes => Ok("5_min"), + Interval::FifteenMinutes => Ok("15_min"), + Interval::OneHour => Ok("1_h"), + Interval::TwoHours => Ok("2_h"), + Interval::OneDay => Ok("1_day"), _ => Err(InfraError::UnsupportedInterval( interval, AggregationMode::Twap, @@ -45,14 +29,20 @@ pub const fn get_interval_specifier( } } else { match interval { + Interval::OneHundredMillisecond => Ok("100_ms"), Interval::OneSecond => Ok("1_s"), Interval::FiveSeconds => Ok("5_s"), + Interval::TenSeconds => Ok("10_s"), Interval::OneMinute => Ok("1_min"), Interval::FifteenMinutes => Ok("15_min"), Interval::OneHour => Ok("1_h"), Interval::TwoHours => Ok("2_h"), Interval::OneDay => Ok("1_day"), Interval::OneWeek => Ok("1_week"), + Interval::FiveMinutes => Err(InfraError::UnsupportedInterval( + interval, + AggregationMode::Median, + )), } } } diff --git a/pragma-common/src/signing/starkex.rs b/pragma-node/src/utils/starkex.rs similarity index 98% rename from pragma-common/src/signing/starkex.rs rename to pragma-node/src/utils/starkex.rs index b59285e6..47084ad9 100644 --- a/pragma-common/src/signing/starkex.rs +++ b/pragma-node/src/utils/starkex.rs @@ -1,8 +1,8 @@ -use crate::errors::ConversionError; use bigdecimal::{BigDecimal, ToPrimitive}; +use pragma_common::starknet::ConversionError; use starknet::core::{crypto::pedersen_hash, types::Felt, utils::cairo_short_string_to_felt}; -use super::Signable; +use super::signing::Signable; pub struct StarkexPrice { pub oracle_name: String, diff --git a/pragma-node/src/utils/ws.rs b/pragma-node/src/utils/ws.rs index 59208ac7..0fa91ab4 100644 --- a/pragma-node/src/utils/ws.rs +++ b/pragma-node/src/utils/ws.rs @@ -1,23 +1,23 @@ -use governor::{DefaultKeyedRateLimiter, Quota, RateLimiter}; -use nonzero_ext::nonzero; -use serde::{Deserialize, Serialize}; -use serde_json::json; -use std::fmt::Debug; use std::net::IpAddr; +use std::num::NonZeroU32; use std::sync::Arc; use std::time::Duration; -use tokio::sync::mpsc::{self, Receiver, Sender}; -use crate::metrics::{Interaction, Status}; -use crate::AppState; use axum::extract::ws::{Message, WebSocket}; -use futures_util::stream::{SplitSink, SplitStream}; +use futures::stream::{SplitSink, SplitStream}; use futures_util::{SinkExt, StreamExt}; -use thiserror::Error; -use tokio::sync::{watch, Mutex}; -use tokio::time::{interval, Interval}; +use governor::{Quota, RateLimiter}; +use nonzero_ext::nonzero; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use tokio::sync::{RwLock, mpsc, watch}; +use tokio::time::Interval; +use tokio_util::sync::CancellationToken; use uuid::Uuid; +use crate::metrics::{Interaction, Status}; +use crate::{metrics, state::AppState}; + #[derive(Default, Debug, Serialize, Deserialize)] pub enum SubscriptionType { #[serde(rename = "subscribe")] @@ -27,38 +27,55 @@ pub enum SubscriptionType { Unsubscribe, } -#[derive(Debug, Error)] +#[derive(Debug, thiserror::Error)] pub enum WebSocketError { - #[error("could not create a channel with the client")] - ChannelInit, - #[error("could not decode client message: {0}")] - MessageDecode(String), - #[error("could not close the channel")] - ChannelClose, + #[error("Rate limit exceeded")] + RateLimitExceeded, + + #[error("Failed to send message")] + SendError(#[from] mpsc::error::SendError), + + #[error("Internal error: {0}")] + InternalError(String), + + #[error("Connection closed")] + ConnectionClosed, + + #[error("Message serialization failed: {0}")] + Serialization(#[from] serde_json::Error), + + #[error("Failed to decode message: {0}")] + DecodingError(String), } -/// Subscriber is an actor that handles a single websocket connection. -/// It listens to the store for updates and sends them to the client. -#[allow(dead_code)] +// Subscriber struct managing WebSocket connections pub struct Subscriber { pub id: Uuid, - pub endpoint_name: String, - pub ip_address: IpAddr, - pub closed: bool, - pub state: Arc>, + pub state: Arc>, pub app_state: Arc, - pub sender: SplitSink, - pub receiver: SplitStream, - pub update_interval: Interval, - pub notify_receiver: Receiver, - pub rate_limiter: DefaultKeyedRateLimiter, - pub exit: (watch::Sender, watch::Receiver), + endpoint_name: String, + pub ip_address: IpAddr, + server_msg_sender: mpsc::Sender, + client_msg_receiver: mpsc::Receiver, + update_interval: Interval, + rate_limiter: RateLimiter< + IpAddr, + governor::state::keyed::DefaultKeyedStateStore, + governor::clock::DefaultClock, + >, + message_count_limiter: RateLimiter< + IpAddr, + governor::state::keyed::DefaultKeyedStateStore, + governor::clock::DefaultClock, + >, + exit: ( + tokio::sync::watch::Sender, + tokio::sync::watch::Receiver, + ), + last_activity: std::time::Instant, + tasks_cancellation: CancellationToken, } -/// The maximum number of bytes that can be sent per second per IP address. -/// If the limit is exceeded, the connection is closed. -const BYTES_LIMIT_PER_IP_PER_SECOND: u32 = 256 * 1024; // 256 KiB - #[async_trait::async_trait] pub trait ChannelHandler { /// Called after a message is received from the client. @@ -78,77 +95,170 @@ pub trait ChannelHandler { impl Subscriber where - ChannelState: Default + Debug, + ChannelState: Default + Send + Sync + 'static, { - /// Create a new subscriber tied to a websocket connection. - pub async fn new( + /// Creates a new Subscriber instance tied to a WebSocket connection. + /// + /// # Arguments + /// - `endpoint_name`: Name of the endpoint (e.g., `subscribe_to_onchain_ohlc`). + /// - `socket`: The WebSocket connection. + /// - `ip_address`: Client's IP address for rate limiting. + /// - `app_state`: Shared application state. + /// - `state`: Optional initial channel state. + /// - `update_interval_in_ms`: Interval (in milliseconds) for periodic updates. + /// - `rate_limit_quota`: Configurable rate limit quota for this endpoint. + /// + /// # Returns + /// A tuple containing the Subscriber and a Sender for sending messages to the client. + pub fn new( endpoint_name: String, socket: WebSocket, ip_address: IpAddr, app_state: Arc, state: Option, update_interval_in_ms: u64, - ) -> Result<(Self, Sender), WebSocketError> { + rate_limit_quota: Option, + ) -> Result { + /// The maximum number of bytes that can be sent per second per IP address. + /// If the limit is exceeded, the connection is closed. + const BYTES_LIMIT_PER_IP_PER_SECOND: u32 = 256 * 1024; // 256 KiB + /// The maximum number of messages send-able per second. + const MESSAGES_LIMIT_PER_IP_PER_SECOND: u32 = 64; + let id = Uuid::new_v4(); - let (sender, receiver) = socket.split(); - let (notify_sender, notify_receiver) = mpsc::channel::(32); + let (ws_sender, ws_receiver) = socket.split(); + let (server_msg_sender, server_msg_receiver) = mpsc::channel::(128); + let (client_msg_sender, client_msg_receiver) = mpsc::channel::(128); + + let rate_limit_quota = + rate_limit_quota.unwrap_or(Quota::per_second(nonzero!(BYTES_LIMIT_PER_IP_PER_SECOND))); + let msg_limit_quota = Quota::per_second(nonzero!(MESSAGES_LIMIT_PER_IP_PER_SECOND)); + + // Spawn sending and receiving tasks + let cancellation_token = Self::spawn_ws_tasks( + ws_sender, + ws_receiver, + server_msg_receiver, + client_msg_sender, + ); - let mut subscriber = Self { + let subscriber = Self { id, + state: Arc::new(RwLock::new(state.unwrap_or_default())), + app_state, endpoint_name, ip_address, - closed: false, - state: Arc::new(Mutex::new(state.unwrap_or_default())), - app_state, - sender, - receiver, - update_interval: interval(Duration::from_millis(update_interval_in_ms)), - notify_receiver, - rate_limiter: RateLimiter::dashmap(Quota::per_second(nonzero!( - BYTES_LIMIT_PER_IP_PER_SECOND - ))), + server_msg_sender, + client_msg_receiver, + update_interval: tokio::time::interval(Duration::from_millis(update_interval_in_ms)), + rate_limiter: RateLimiter::dashmap(rate_limit_quota), + message_count_limiter: RateLimiter::dashmap(msg_limit_quota), exit: watch::channel(false), + last_activity: std::time::Instant::now(), + tasks_cancellation: cancellation_token, }; - subscriber.assert_is_healthy().await?; + // Retain the recent rate limit data for the IP addresses to // prevent the rate limiter size from growing indefinitely. subscriber.rate_limiter.retain_recent(); - subscriber.record_metric(Interaction::NewConnection, Status::Success); - Ok((subscriber, notify_sender)) + + subscriber.record_metric( + metrics::Interaction::NewConnection, + metrics::Status::Success, + ); + + Ok(subscriber) } - /// Perform the initial handshake with the client - ensure the channel is healthy - async fn assert_is_healthy(&mut self) -> Result<(), WebSocketError> { - let ping_status = self.sender.send(Message::Ping(vec![1, 2, 3].into())).await; - if ping_status.is_err() { - self.record_metric(Interaction::NewConnection, Status::Error); - return Err(WebSocketError::ChannelInit); - } - Ok(()) + /// Spawns WebSocket tasks and returns a cancellation token. + /// + /// The tasks are responsible for sending & receiving message for the socket + /// that we after forward to the Subscriber. + fn spawn_ws_tasks( + mut ws_sender: SplitSink, + mut ws_receiver: SplitStream, + mut server_msg_receiver: mpsc::Receiver, + client_msg_sender: mpsc::Sender, + ) -> CancellationToken { + let token = CancellationToken::new(); + let task_token = token.clone(); + + tokio::spawn(async move { + loop { + tokio::select! { + // Handle cancellation + () = task_token.cancelled() => { + let _ = ws_sender.close().await; + break; + } + + // Send messages from server to client + Some(msg) = server_msg_receiver.recv() => { + if ws_sender.send(msg).await.is_err() { + break; + } + } + + // Receive messages from client to server + Some(result) = ws_receiver.next() => { + match result { + Ok(msg) => { + if client_msg_sender.send(msg).await.is_err() { + break; + } + } + Err(_) => { + break; + } + } + } + + // Ensure we don’t spin if no messages are available + else => break, + } + } + }); + + token } - /// Listen to messages from the client and the server. - /// The handler is responsible for processing the messages and updating the state. + /// Listens for client messages and invokes the handler periodically. pub async fn listen(&mut self, mut handler: H) -> Result<(), Err> where H: ChannelHandler, CM: for<'a> Deserialize<'a>, { + const INACTIVITY_CHECK_INTERVAL: Duration = Duration::from_secs(20); + let mut inactivity_timer = tokio::time::interval(INACTIVITY_CHECK_INTERVAL); + loop { tokio::select! { - // Messages from the client - maybe_client_msg = self.receiver.next() => { - match maybe_client_msg { - Some(Ok(client_msg)) => { - handler = self.decode_and_handle(handler, client_msg).await?; + // Check for inactivity + _ = inactivity_timer.tick() => { + if self.is_inactive() { + self.send_err("Connection timeout due to inactivity").await; + self.server_msg_sender.send(Message::Close(None)).await.ok(); + if self.exit.0.send(true).is_err() { + self.record_metric(Interaction::CloseConnection, Status::Error); + } else { + self.record_metric(Interaction::CloseConnection, Status::Success); } - Some(Err(_)) => { - return Ok(()); - }, - None => {} + return Ok(()); } }, - // Periodic updates + + // Messages from the client + Some(client_msg) = self.client_msg_receiver.recv() => { + // Check message frequency rate limit + if self.message_count_limiter.check_key(&self.ip_address).is_err() { + self.send_err("Too many messages. Please slow down.").await; + continue; + } + + handler = self.decode_and_handle(handler, client_msg).await?; + }, + + // Periodic updates in the channel _ = self.update_interval.tick() => { let status = handler.periodic_interval(self).await; match status { @@ -161,18 +271,11 @@ where return Err(e); } } - }, - // Messages from the server to the client - maybe_server_msg = self.notify_receiver.recv() => { - if let Some(server_msg) = maybe_server_msg { - let _ = self.sender.send(server_msg).await; - } - }, - // Exit signal + } + + // Check if the channel has been closed _ = self.exit.1.changed() => { if *self.exit.1.borrow() { - self.sender.close().await.ok(); - self.closed = true; self.record_metric(Interaction::CloseConnection, Status::Success); return Ok(()); } @@ -193,30 +296,31 @@ where H: ChannelHandler, CM: for<'a> Deserialize<'a>, { - let status_decoded_msg = self.decode_msg::(client_msg).await; - if let Ok(maybe_client_msg) = status_decoded_msg { - if let Some(client_msg) = maybe_client_msg { - self.record_metric(Interaction::ClientMessageDecode, Status::Success); - let status = handler.handle_client_msg(self, client_msg).await; - match status { - Ok(()) => { - self.record_metric(Interaction::ClientMessageProcess, Status::Success); - } - Err(e) => { - self.record_metric(Interaction::ClientMessageProcess, Status::Error); - self.record_metric(Interaction::CloseConnection, Status::Success); - return Err(e); - } - } - } - } else { + // Return early if the message could not be decoded + let Ok(Some(client_msg)) = self.decode_msg::(client_msg).await else { self.record_metric(Interaction::ClientMessageDecode, Status::Error); + return Ok(handler); + }; + + // Else, handle it + self.record_metric(Interaction::ClientMessageDecode, Status::Success); + let status = handler.handle_client_msg(self, client_msg).await; + match status { + Ok(()) => { + self.record_metric(Interaction::ClientMessageProcess, Status::Success); + } + Err(e) => { + self.record_metric(Interaction::ClientMessageProcess, Status::Error); + self.record_metric(Interaction::CloseConnection, Status::Success); + return Err(e); + } } + Ok(handler) } /// Decode the message into the expected type. - /// The message is expected to be in JSON format. + /// /// If the message is not in the expected format, it will return None. /// If the message is a close signal, it will return None and send a close signal to the client. async fn decode_msg Deserialize<'a>>( @@ -225,60 +329,137 @@ where ) -> Result, WebSocketError> { match msg { Message::Close(_) => { - if self.exit.0.send(true).is_ok() { - self.sender - .close() - .await - .map_err(|_| WebSocketError::ChannelClose)?; - self.closed = true; - } else { + if self.exit.0.send(true).is_err() { self.record_metric(Interaction::CloseConnection, Status::Error); } } + Message::Text(text) => { - let maybe_msg = serde_json::from_str::(&text); - if let Ok(msg) = maybe_msg { - return Ok(Some(msg)); + self.assert_client_message_size(text.len()).await?; + + match serde_json::from_str::(&text) { + Ok(msg) => { + self.last_activity = std::time::Instant::now(); + return Ok(Some(msg)); + } + Err(e) => { + self.send_err("Error parsing JSON into valid websocket request.") + .await; + return Err(WebSocketError::DecodingError(e.to_string())); + } } - tracing::error!("Failed to decode text message: {:?}", maybe_msg.err()); - self.send_err( - "⛔ Incorrect message. Please check the documentation for more information.", - ) - .await; - return Err(WebSocketError::MessageDecode(text.to_string())); } + Message::Binary(payload) => { - let maybe_msg = serde_json::from_slice::(&payload); - if let Ok(msg) = maybe_msg { - return Ok(Some(msg)); + self.assert_client_message_size(payload.len()).await?; + + match serde_json::from_slice::(&payload) { + Ok(msg) => { + self.last_activity = std::time::Instant::now(); + return Ok(Some(msg)); + } + Err(e) => { + self.send_err("Error parsing JSON into valid websocket request.") + .await; + return Err(WebSocketError::DecodingError(e.to_string())); + } } - self.send_err( - "⛔ Incorrect message. Please check the documentation for more information.", - ) - .await; - return Err(WebSocketError::MessageDecode(format!("{payload:?}"))); } - // Ignore pings and pongs messages - _ => {} + + Message::Ping(payload) => { + self.last_activity = std::time::Instant::now(); + let _ = self.server_msg_sender.send(Message::Pong(payload)).await; + } + + Message::Pong(_) => {} } Ok(None) } - /// Send a message to the client. - pub async fn send_msg(&mut self, msg: String) -> Result<(), axum::Error> { - self.sender.send(Message::Text(msg.into())).await + /// Sends a message to the client after checking the rate limit. + pub async fn send_msg(&mut self, message: T) -> Result<(), WebSocketError> + where + T: Sized + Serialize, + { + let message = serde_json::to_string(&message).map_err(WebSocketError::Serialization)?; + + let message_size = message.len(); + self.check_rate_limit(message_size).await?; + self.server_msg_sender + .send(Message::Text(message.into())) + .await + .map_err(WebSocketError::SendError)?; + + Ok(()) } - /// Send an error message to the client without closing the channel. - pub async fn send_err(&mut self, err: &str) { - let err = json!({"error": err}); + /// Sends an error message to the client. + pub async fn send_err(&self, msg: &str) { + let err = json!({ + "status": "error", + "error": msg, + "timestamp": chrono::Utc::now().timestamp_millis(), + }); let _ = self - .sender + .server_msg_sender .send(Message::Text(err.to_string().into())) .await; } - /// Records a web socket metric. + /// Checks the rate limit for the given message size. + /// + /// If the limit is exceeded, it calls `handle_rate_limit_exceeded` to close the connection. + async fn check_rate_limit(&self, message_size: usize) -> Result<(), WebSocketError> { + let burst_size = NonZeroU32::new(message_size as u32) + .ok_or(WebSocketError::InternalError("Invalid message size".into()))?; + + if self.rate_limiter.check_key_n(&self.ip_address, burst_size) != Ok(Ok(())) { + self.handle_rate_limit_exceeded().await?; + return Err(WebSocketError::RateLimitExceeded); + } + Ok(()) + } + + /// Handles the case when the rate limit is exceeded. + /// + /// Sends an error message to the client and closes the connection. + async fn handle_rate_limit_exceeded(&self) -> Result<(), WebSocketError> { + self.record_metric(metrics::Interaction::RateLimit, metrics::Status::Error); + self.send_err("Rate limit exceeded. Closing connection.") + .await; + + self.server_msg_sender + .send(Message::Close(None)) + .await + .map_err(WebSocketError::SendError)?; + + if self.exit.0.send(true).is_err() { + self.record_metric(Interaction::CloseConnection, Status::Error); + } + Ok(()) + } + + async fn assert_client_message_size(&self, len: usize) -> Result<(), WebSocketError> { + const MAX_MESSAGE_SIZE: usize = 1_048_576; // 1MB limit + + if len > MAX_MESSAGE_SIZE { + let err = "Message too large."; + self.send_err(err).await; + return Err(WebSocketError::DecodingError(err.into())); + } + + Ok(()) + } + + /// Checks if the client is inactive. + /// + /// A client is considered inactive after 30s without any message. + fn is_inactive(&self) -> bool { + const INACTIVITY_TIMEOUT: Duration = Duration::from_secs(30); + self.last_activity.elapsed() > INACTIVITY_TIMEOUT + } + + /// Records a metric for the subscriber's interactions. pub fn record_metric(&self, interaction: Interaction, status: Status) { self.app_state.metrics.ws_metrics.record_ws_interaction( &self.endpoint_name, @@ -287,3 +468,10 @@ where ); } } + +// Cancel all tasks when subscriber is dropped +impl Drop for Subscriber { + fn drop(&mut self) { + self.tasks_cancellation.cancel(); + } +} diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 7d857086..0d5d812e 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.82.0" +channel = "1.86.0" components = ["rustfmt", "clippy", "rust-analyzer"] targets = ["wasm32-unknown-unknown"] profile = "minimal" diff --git a/infra/pragma-node/postgres_migrations/01-init.sql b/sql/01-init.sql similarity index 63% rename from infra/pragma-node/postgres_migrations/01-init.sql rename to sql/01-init.sql index a7247147..8c587a79 100644 --- a/infra/pragma-node/postgres_migrations/01-init.sql +++ b/sql/01-init.sql @@ -7,7 +7,7 @@ CREATE TABLE mainnet_spot_entry ( block_timestamp timestamp without time zone, transaction_hash character varying(255), price numeric, - timestamp timestamp without time zone, + timestamp TIMESTAMPTZ, publisher character varying(255), source character varying(255), volume numeric, @@ -23,7 +23,7 @@ CREATE TABLE spot_entry ( block_timestamp timestamp without time zone, transaction_hash character varying(255), price numeric, - timestamp timestamp without time zone, + timestamp TIMESTAMPTZ, publisher character varying(255), source character varying(255), volume numeric, @@ -39,12 +39,12 @@ CREATE TABLE mainnet_future_entry ( block_timestamp timestamp without time zone, transaction_hash character varying(255), price numeric, - timestamp timestamp without time zone, + timestamp TIMESTAMPTZ, publisher character varying(255), source character varying(255), volume numeric, _cursor bigint, - expiration_timestamp timestamp without time zone + expiration_timestamp TIMESTAMPTZ ); CREATE TABLE future_entry ( @@ -56,12 +56,12 @@ CREATE TABLE future_entry ( block_timestamp timestamp without time zone, transaction_hash character varying(255), price numeric, - timestamp timestamp without time zone, + timestamp TIMESTAMPTZ, publisher character varying(255), source character varying(255), volume numeric, _cursor bigint, - expiration_timestamp timestamp without time zone + expiration_timestamp TIMESTAMPTZ ); CREATE TABLE mainnet_spot_checkpoints ( @@ -76,7 +76,7 @@ CREATE TABLE mainnet_spot_checkpoints ( sender_address character varying(255), aggregation_mode numeric, _cursor bigint, - timestamp timestamp without time zone, + timestamp TIMESTAMPTZ, nb_sources_aggregated numeric ); @@ -92,57 +92,20 @@ CREATE TABLE spot_checkpoints ( sender_address character varying(255), aggregation_mode numeric, _cursor bigint, - timestamp timestamp without time zone, + timestamp TIMESTAMPTZ, nb_sources_aggregated numeric ); -CREATE TABLE vrf_requests ( - network character varying(255), - request_id numeric, - seed numeric, - created_at timestamp without time zone, - created_at_tx character varying(255), - callback_address character varying(255), - callback_fee_limit numeric, - num_words numeric, - requestor_address character varying(255), - updated_at timestamp without time zone, - updated_at_tx character varying(255), - status numeric, - minimum_block_number numeric, - _cursor int8range, - data_id character varying(255) -); - CREATE TABLE publishers ( name VARCHAR NOT NULL, website_url VARCHAR NOT NULL, mainnet_address VARCHAR, testnet_address VARCHAR, - publisher_type INTEGER NOT NULL CHECK (publisher_type IN (0, 1)) -- 0 = first party, 1 = 3rd party + publisher_type INTEGER NOT NULL CHECK (publisher_type IN (0, 1)) ); -CREATE TABLE oo_requests ( - network character varying(255), - data_id VARCHAR, - assertion_id VARCHAR, - domain_id VARCHAR, - claim TEXT, - asserter character varying(255), - disputer character varying(255), - disputed BOOLEAN, - dispute_id character varying(255), - callback_recipient character varying(255), - escalation_manager character varying(255), - caller character varying(255), - expiration_timestamp timestamp without time zone, - settled BOOLEAN, - settlement_resolution BOOLEAN, - settle_caller character varying(255), - currency character varying(255), - bond NUMERIC, - _cursor int8range, - identifier VARCHAR, - updated_at timestamp without time zone, - updated_at_tx character varying(255) -); \ No newline at end of file +CREATE TYPE price_component AS ( + source text, + price numeric(1000,0), + "timestamp" TIMESTAMPTZ +); diff --git a/infra/pragma-node/postgres_migrations/02-add-publishers.sql b/sql/02-create-publishers.sql similarity index 75% rename from infra/pragma-node/postgres_migrations/02-add-publishers.sql rename to sql/02-create-publishers.sql index d10efb05..a6a152f8 100644 --- a/infra/pragma-node/postgres_migrations/02-add-publishers.sql +++ b/sql/02-create-publishers.sql @@ -48,4 +48,19 @@ VALUES null, '', 0 +), +( + 'ALENO', + 'https://aleno.ai/', + '0x71AD7B064FF7825AB11760207B16A28B54DECC5B02873AF6B1A2527E07925F0', + '0x06C58C048FC1483362D6AB56A542B74ADF5FD5C00706AEDA32EAD142E38B8646', + 0 +), +( + 'KAIKO', + 'https://www.kaiko.com/', + '0x01BF3CF61F2C609D42F12DA0A7E654D1ED790AFF431B7CA4C538FA22847EDD00', + '0x00AB1D128468130A5263BC32BCA3030202A16BBFFE69E8458571B40C0E53B38B', + 0 ); + diff --git a/sql/03-create-timescale-hypertables.sql b/sql/03-create-timescale-hypertables.sql new file mode 100644 index 00000000..da5f332a --- /dev/null +++ b/sql/03-create-timescale-hypertables.sql @@ -0,0 +1,22 @@ +SELECT create_hypertable('mainnet_spot_entry', by_range('timestamp', INTERVAL '7 days')); +SELECT create_hypertable('spot_entry', by_range('timestamp', INTERVAL '7 days')); +SELECT create_hypertable('mainnet_future_entry', by_range('timestamp', INTERVAL '7 days')); +SELECT create_hypertable('future_entry', by_range('timestamp', INTERVAL '7 days')); +SELECT create_hypertable('mainnet_spot_checkpoints', by_range('timestamp', INTERVAL '7 days')); +SELECT create_hypertable('spot_checkpoints', by_range('timestamp', INTERVAL '7 days')); + +ALTER TABLE mainnet_spot_entry SET ( + timescaledb.segmentby = 'pair_id' +); + +ALTER TABLE spot_entry SET ( + timescaledb.segmentby = 'pair_id' +); + +ALTER TABLE mainnet_future_entry SET ( + timescaledb.segmentby = 'pair_id' +); + +ALTER TABLE future_entry SET ( + timescaledb.segmentby = 'pair_id' +); diff --git a/sql/04-create-timescale-median-aggregates.sql b/sql/04-create-timescale-median-aggregates.sql new file mode 100644 index 00000000..0d62d407 --- /dev/null +++ b/sql/04-create-timescale-median-aggregates.sql @@ -0,0 +1,104 @@ +CREATE FUNCTION create_onchain_median_aggregate( + p_name text, -- e.g., 'mainnet_spot_median_100_ms' + p_table_name text, -- e.g., 'mainnet_spot_entry' + p_interval interval, -- e.g., '100 milliseconds' + p_start_offset interval, -- e.g., '300 milliseconds' + p_type text -- 'spot' or 'perp' +) +RETURNS void AS $$ +DECLARE + where_condition text; +BEGIN + -- Set the WHERE condition based on p_type and p_table_name + IF p_type = 'spot' AND (p_table_name = 'mainnet_spot_entry' OR p_table_name = 'spot_entry') THEN + where_condition := '"timestamp" IS NOT NULL'; + ELSIF p_type = 'perp' AND (p_table_name = 'mainnet_future_entry' OR p_table_name = 'future_entry') THEN + where_condition := '"timestamp" IS NOT NULL AND expiration_timestamp IS NULL'; + ELSE + RAISE EXCEPTION 'Invalid combination of p_type % and p_table_name %', p_type, p_table_name; + END IF; + + -- Create the per-source materialized view + EXECUTE format(' + CREATE MATERIALIZED VIEW %s_per_source + WITH (timescaledb.continuous, timescaledb.materialized_only = false) + AS SELECT + pair_id, + source, + time_bucket(%L, "timestamp") AS subbucket, + percentile_cont(0.5) WITHIN GROUP (ORDER BY price)::numeric(1000,0) AS source_median_price + FROM %I + WHERE %s + GROUP BY pair_id, source, subbucket + WITH NO DATA;', + p_name, p_interval, p_table_name, where_condition); + + -- Create the main materialized view with median across sources + EXECUTE format(' + CREATE MATERIALIZED VIEW %I + WITH (timescaledb.continuous, timescaledb.materialized_only = false) + AS SELECT + pair_id, + time_bucket(%L, subbucket) AS bucket, + percentile_cont(0.5) WITHIN GROUP (ORDER BY source_median_price)::numeric(1000,0) AS median_price, + COUNT(DISTINCT source) AS num_sources + FROM %I + GROUP BY pair_id, bucket + WITH NO DATA;', + p_name, p_interval, p_name || '_per_source'); + + -- Set chunk time interval to 7 days + EXECUTE format('SELECT set_chunk_time_interval(%L, INTERVAL ''7 days'');', p_name || '_per_source'); + EXECUTE format('SELECT set_chunk_time_interval(%L, INTERVAL ''7 days'');', p_name); + + -- Add continuous aggregate refresh policies + EXECUTE format(' + SELECT add_continuous_aggregate_policy(%L, + start_offset => %L, + end_offset => %L, + schedule_interval => %L);', + p_name || '_per_source', p_start_offset, '0'::interval, p_interval); + EXECUTE format(' + SELECT add_continuous_aggregate_policy(%L, + start_offset => %L, + end_offset => %L, + schedule_interval => %L);', + p_name, p_start_offset, '0'::interval, p_interval); +END; +$$ LANGUAGE plpgsql; + +-- Mainnet Spot Median +SELECT create_onchain_median_aggregate('mainnet_spot_median_10_s', 'mainnet_spot_entry', '10 seconds'::interval, '30 seconds'::interval, 'spot'); +SELECT create_onchain_median_aggregate('mainnet_spot_median_1_min', 'mainnet_spot_entry', '1 minute'::interval, '3 minutes'::interval, 'spot'); +SELECT create_onchain_median_aggregate('mainnet_spot_median_15_min', 'mainnet_spot_entry', '15 minutes'::interval, '45 minutes'::interval, 'spot'); +SELECT create_onchain_median_aggregate('mainnet_spot_median_1_h', 'mainnet_spot_entry', '1 hour'::interval, '3 hours'::interval, 'spot'); +SELECT create_onchain_median_aggregate('mainnet_spot_median_2_h', 'mainnet_spot_entry', '2 hours'::interval, '6 hours'::interval, 'spot'); +SELECT create_onchain_median_aggregate('mainnet_spot_median_1_day', 'mainnet_spot_entry', '1 day'::interval, '3 days'::interval, 'spot'); +SELECT create_onchain_median_aggregate('mainnet_spot_median_1_week', 'mainnet_spot_entry', '1 week'::interval, '3 weeks'::interval, 'spot'); + +-- Testnet Spot Median +SELECT create_onchain_median_aggregate('spot_median_10_s', 'spot_entry', '10 seconds'::interval, '30 seconds'::interval, 'spot'); +SELECT create_onchain_median_aggregate('spot_median_1_min', 'spot_entry', '1 minute'::interval, '3 minutes'::interval, 'spot'); +SELECT create_onchain_median_aggregate('spot_median_15_min', 'spot_entry', '15 minutes'::interval, '45 minutes'::interval, 'spot'); +SELECT create_onchain_median_aggregate('spot_median_1_h', 'spot_entry', '1 hour'::interval, '3 hours'::interval, 'spot'); +SELECT create_onchain_median_aggregate('spot_median_2_h', 'spot_entry', '2 hours'::interval, '6 hours'::interval, 'spot'); +SELECT create_onchain_median_aggregate('spot_median_1_day', 'spot_entry', '1 day'::interval, '3 days'::interval, 'spot'); +SELECT create_onchain_median_aggregate('spot_median_1_week', 'spot_entry', '1 week'::interval, '3 weeks'::interval, 'spot'); + +-- Mainnet Perp Median +SELECT create_onchain_median_aggregate('mainnet_perp_median_10_s', 'mainnet_future_entry', '10 seconds'::interval, '30 seconds'::interval, 'perp'); +SELECT create_onchain_median_aggregate('mainnet_perp_median_1_min', 'mainnet_future_entry', '1 minute'::interval, '3 minutes'::interval, 'perp'); +SELECT create_onchain_median_aggregate('mainnet_perp_median_15_min', 'mainnet_future_entry', '15 minutes'::interval, '45 minutes'::interval, 'perp'); +SELECT create_onchain_median_aggregate('mainnet_perp_median_1_h', 'mainnet_future_entry', '1 hour'::interval, '3 hours'::interval, 'perp'); +SELECT create_onchain_median_aggregate('mainnet_perp_median_2_h', 'mainnet_future_entry', '2 hours'::interval, '6 hours'::interval, 'perp'); +SELECT create_onchain_median_aggregate('mainnet_perp_median_1_day', 'mainnet_future_entry', '1 day'::interval, '3 days'::interval, 'perp'); +SELECT create_onchain_median_aggregate('mainnet_perp_median_1_week', 'mainnet_future_entry', '1 week'::interval, '3 weeks'::interval, 'perp'); + +-- Testnet Perp Median +SELECT create_onchain_median_aggregate('perp_median_10_s', 'future_entry', '10 seconds'::interval, '30 seconds'::interval, 'perp'); +SELECT create_onchain_median_aggregate('perp_median_1_min', 'future_entry', '1 minute'::interval, '3 minutes'::interval, 'perp'); +SELECT create_onchain_median_aggregate('perp_median_15_min', 'future_entry', '15 minutes'::interval, '45 minutes'::interval, 'perp'); +SELECT create_onchain_median_aggregate('perp_median_1_h', 'future_entry', '1 hour'::interval, '3 hours'::interval, 'perp'); +SELECT create_onchain_median_aggregate('perp_median_2_h', 'future_entry', '2 hours'::interval, '6 hours'::interval, 'perp'); +SELECT create_onchain_median_aggregate('perp_median_1_day', 'future_entry', '1 day'::interval, '3 days'::interval, 'perp'); +SELECT create_onchain_median_aggregate('perp_median_1_week', 'future_entry', '1 week'::interval, '3 weeks'::interval, 'perp'); diff --git a/sql/05-create-timescale-ohlc-aggregates.sql b/sql/05-create-timescale-ohlc-aggregates.sql new file mode 100644 index 00000000..b6b91c8d --- /dev/null +++ b/sql/05-create-timescale-ohlc-aggregates.sql @@ -0,0 +1,63 @@ +CREATE FUNCTION create_onchain_candlestick_view( + p_candle_name text, -- e.g., 'mainnet_spot_candle_10_s' + p_interval interval, -- e.g., '10 seconds' + p_start_offset interval, -- e.g., '30 seconds' + p_median_view_name text -- e.g., 'mainnet_spot_median_1_s' +) +RETURNS void AS $$ +BEGIN + -- Create the candlestick materialized view + EXECUTE format(' + CREATE MATERIALIZED VIEW %I + WITH (timescaledb.continuous, timescaledb.materialized_only = false) AS + SELECT + time_bucket(%L, subbucket) AS ohlc_bucket, + pair_id, + FIRST(source_median_price, subbucket)::numeric AS "open", + MAX(source_median_price)::numeric AS high, + MIN(source_median_price)::numeric AS low, + LAST(source_median_price, subbucket)::numeric AS "close" + FROM %I_per_source + GROUP BY ohlc_bucket, pair_id + WITH NO DATA;', + p_candle_name, p_interval, p_median_view_name); + + -- Set chunk time interval to 7 days + EXECUTE format('SELECT set_chunk_time_interval(%L, INTERVAL ''7 days'');', p_candle_name); + + -- Add continuous aggregate refresh policy + EXECUTE format(' + SELECT add_continuous_aggregate_policy(%L, + start_offset => %L, + end_offset => %L, + schedule_interval => %L);', p_candle_name, p_start_offset, '0'::interval, p_interval); +END; +$$ LANGUAGE plpgsql; + +-- Mainnet Spot Candlesticks +SELECT create_onchain_candlestick_view('mainnet_spot_candle_5_min', '5 minutes'::interval, '15 minutes'::interval, 'mainnet_spot_median_10_s'); +SELECT create_onchain_candlestick_view('mainnet_spot_candle_15_min', '15 minutes'::interval, '45 minutes'::interval, 'mainnet_spot_median_10_s'); +SELECT create_onchain_candlestick_view('mainnet_spot_candle_1_h', '1 hour'::interval, '3 hours'::interval, 'mainnet_spot_median_10_s'); +SELECT create_onchain_candlestick_view('mainnet_spot_candle_1_day', '1 day'::interval, '3 days'::interval, 'mainnet_spot_median_15_min'); +SELECT create_onchain_candlestick_view('mainnet_spot_candle_1_week', '1 week'::interval, '3 weeks'::interval, 'mainnet_spot_median_1_h'); + +-- Testnet Spot Candlesticks +SELECT create_onchain_candlestick_view('spot_candle_5_min', '5 minutes'::interval, '15 minutes'::interval, 'spot_median_10_s'); +SELECT create_onchain_candlestick_view('spot_candle_15_min', '15 minutes'::interval, '45 minutes'::interval, 'spot_median_10_s'); +SELECT create_onchain_candlestick_view('spot_candle_1_h', '1 hour'::interval, '3 hours'::interval, 'spot_median_10_s'); +SELECT create_onchain_candlestick_view('spot_candle_1_day', '1 day'::interval, '3 days'::interval, 'spot_median_15_min'); +SELECT create_onchain_candlestick_view('spot_candle_1_week', '1 week'::interval, '3 weeks'::interval, 'spot_median_1_h'); + +-- Mainnet Perp Candlesticks +SELECT create_onchain_candlestick_view('mainnet_perp_candle_5_min', '5 minutes'::interval, '15 minutes'::interval, 'mainnet_perp_median_10_s'); +SELECT create_onchain_candlestick_view('mainnet_perp_candle_15_min', '15 minutes'::interval, '45 minutes'::interval, 'mainnet_perp_median_10_s'); +SELECT create_onchain_candlestick_view('mainnet_perp_candle_1_h', '1 hour'::interval, '3 hours'::interval, 'mainnet_perp_median_10_s'); +SELECT create_onchain_candlestick_view('mainnet_perp_candle_1_day', '1 day'::interval, '3 days'::interval, 'mainnet_perp_median_15_min'); +SELECT create_onchain_candlestick_view('mainnet_perp_candle_1_week', '1 week'::interval, '3 weeks'::interval, 'mainnet_perp_median_1_h'); + +-- Testnet Perp Candlesticks +SELECT create_onchain_candlestick_view('perp_candle_5_min', '5 minutes'::interval, '15 minutes'::interval, 'perp_median_10_s'); +SELECT create_onchain_candlestick_view('perp_candle_15_min', '15 minutes'::interval, '45 minutes'::interval, 'perp_median_10_s'); +SELECT create_onchain_candlestick_view('perp_candle_1_h', '1 hour'::interval, '3 hours'::interval, 'perp_median_10_s'); +SELECT create_onchain_candlestick_view('perp_candle_1_day', '1 day'::interval, '3 days'::interval, 'perp_median_15_min'); +SELECT create_onchain_candlestick_view('perp_candle_1_week', '1 week'::interval, '3 weeks'::interval, 'perp_median_1_h'); diff --git a/sql/06-create-timescale-twap-aggregates.sql b/sql/06-create-timescale-twap-aggregates.sql new file mode 100644 index 00000000..e44c6381 --- /dev/null +++ b/sql/06-create-timescale-twap-aggregates.sql @@ -0,0 +1,100 @@ +CREATE FUNCTION create_onchain_twap_aggregate( + p_name text, -- e.g., 'mainnet_spot_twap_1_min' + p_table_name text, -- e.g., 'mainnet_spot_entry' + p_interval interval, -- e.g., '1 minute' + p_start_offset interval, -- e.g., '3 minutes' + p_type text -- 'spot' or 'perp' +) +RETURNS void AS $$ +DECLARE + where_condition text; +BEGIN + -- Set the WHERE condition based on p_type and p_table_name + IF p_type = 'spot' AND (p_table_name = 'mainnet_spot_entry' OR p_table_name = 'spot_entry') THEN + where_condition := '"timestamp" IS NOT NULL'; + ELSIF p_type = 'perp' AND (p_table_name = 'mainnet_future_entry' OR p_table_name = 'future_entry') THEN + where_condition := '"timestamp" IS NOT NULL AND expiration_timestamp IS NULL'; + ELSE + RAISE EXCEPTION 'Invalid combination of p_type % and p_table_name %', p_type, p_table_name; + END IF; + + -- Create the per-source TWAP materialized view + EXECUTE format(' + CREATE MATERIALIZED VIEW %s_per_source + WITH (timescaledb.continuous, timescaledb.materialized_only = false) + AS SELECT + pair_id, + source, + time_bucket(%L, "timestamp") AS subbucket, + average(time_weight(''Linear'', "timestamp", price))::numeric(1000,0) AS source_twap_price + FROM %I + WHERE %s + GROUP BY pair_id, source, subbucket + WITH NO DATA;', + p_name, p_interval, p_table_name, where_condition); + + -- Create the main TWAP materialized view averaging across sources + EXECUTE format(' + CREATE MATERIALIZED VIEW %I + WITH (timescaledb.continuous, timescaledb.materialized_only = false) + AS SELECT + pair_id, + time_bucket(%L, subbucket) AS bucket, + avg(source_twap_price)::numeric(1000,0) AS twap_price, + COUNT(DISTINCT source) AS num_sources + FROM %I + GROUP BY pair_id, bucket + WITH NO DATA;', + p_name, p_interval, p_name || '_per_source'); + + -- Set chunk time interval to 7 days for both views + EXECUTE format('SELECT set_chunk_time_interval(%L, INTERVAL ''7 days'');', p_name || '_per_source'); + EXECUTE format('SELECT set_chunk_time_interval(%L, INTERVAL ''7 days'');', p_name); + + -- Add continuous aggregate policies + EXECUTE format(' + SELECT add_continuous_aggregate_policy(%L, + start_offset => %L, + end_offset => %L, + schedule_interval => %L);', + p_name || '_per_source', p_start_offset, '0'::interval, p_interval); + EXECUTE format(' + SELECT add_continuous_aggregate_policy(%L, + start_offset => %L, + end_offset => %L, + schedule_interval => %L);', + p_name, p_start_offset, '0'::interval, p_interval); +END; +$$ LANGUAGE plpgsql; + +-- Mainnet Spot TWAP +SELECT create_onchain_twap_aggregate('mainnet_spot_twap_1_min', 'mainnet_spot_entry', '1 minute'::interval, '3 minutes'::interval, 'spot'); +SELECT create_onchain_twap_aggregate('mainnet_spot_twap_5_min', 'mainnet_spot_entry', '5 minutes'::interval, '15 minutes'::interval, 'spot'); +SELECT create_onchain_twap_aggregate('mainnet_spot_twap_15_min', 'mainnet_spot_entry', '15 minutes'::interval, '45 minutes'::interval, 'spot'); +SELECT create_onchain_twap_aggregate('mainnet_spot_twap_1_h', 'mainnet_spot_entry', '1 hour'::interval, '3 hours'::interval, 'spot'); +SELECT create_onchain_twap_aggregate('mainnet_spot_twap_2_h', 'mainnet_spot_entry', '2 hours'::interval, '6 hours'::interval, 'spot'); +SELECT create_onchain_twap_aggregate('mainnet_spot_twap_1_day', 'mainnet_spot_entry', '1 day'::interval, '3 days'::interval, 'spot'); + +-- Testnet Spot TWAP +SELECT create_onchain_twap_aggregate('spot_twap_1_min', 'spot_entry', '1 minute'::interval, '3 minutes'::interval, 'spot'); +SELECT create_onchain_twap_aggregate('spot_twap_5_min', 'spot_entry', '5 minutes'::interval, '15 minutes'::interval, 'spot'); +SELECT create_onchain_twap_aggregate('spot_twap_15_min', 'spot_entry', '15 minutes'::interval, '45 minutes'::interval, 'spot'); +SELECT create_onchain_twap_aggregate('spot_twap_1_h', 'spot_entry', '1 hour'::interval, '3 hours'::interval, 'spot'); +SELECT create_onchain_twap_aggregate('spot_twap_2_h', 'spot_entry', '2 hours'::interval, '6 hours'::interval, 'spot'); +SELECT create_onchain_twap_aggregate('spot_twap_1_day', 'spot_entry', '1 day'::interval, '3 days'::interval, 'spot'); + +-- Mainnet Perp TWAP +SELECT create_onchain_twap_aggregate('mainnet_perp_twap_1_min', 'mainnet_future_entry', '1 minute'::interval, '3 minutes'::interval, 'perp'); +SELECT create_onchain_twap_aggregate('mainnet_perp_twap_5_min', 'mainnet_future_entry', '5 minutes'::interval, '15 minutes'::interval, 'perp'); +SELECT create_onchain_twap_aggregate('mainnet_perp_twap_15_min', 'mainnet_future_entry', '15 minutes'::interval, '45 minutes'::interval, 'perp'); +SELECT create_onchain_twap_aggregate('mainnet_perp_twap_1_h', 'mainnet_future_entry', '1 hour'::interval, '3 hours'::interval, 'perp'); +SELECT create_onchain_twap_aggregate('mainnet_perp_twap_2_h', 'mainnet_future_entry', '2 hours'::interval, '6 hours'::interval, 'perp'); +SELECT create_onchain_twap_aggregate('mainnet_perp_twap_1_day', 'mainnet_future_entry', '1 day'::interval, '3 days'::interval, 'perp'); + +-- Testnet Perp TWAP +SELECT create_onchain_twap_aggregate('perp_twap_1_min', 'future_entry', '1 minute'::interval, '3 minutes'::interval, 'perp'); +SELECT create_onchain_twap_aggregate('perp_twap_5_min', 'future_entry', '5 minutes'::interval, '15 minutes'::interval, 'perp'); +SELECT create_onchain_twap_aggregate('perp_twap_15_min', 'future_entry', '15 minutes'::interval, '45 minutes'::interval, 'perp'); +SELECT create_onchain_twap_aggregate('perp_twap_1_h', 'future_entry', '1 hour'::interval, '3 hours'::interval, 'perp'); +SELECT create_onchain_twap_aggregate('perp_twap_2_h', 'future_entry', '2 hours'::interval, '6 hours'::interval, 'perp'); +SELECT create_onchain_twap_aggregate('perp_twap_1_day', 'future_entry', '1 day'::interval, '3 days'::interval, 'perp'); diff --git a/sql/07-add-compression.sql b/sql/07-add-compression.sql new file mode 100644 index 00000000..5d5bad64 --- /dev/null +++ b/sql/07-add-compression.sql @@ -0,0 +1,61 @@ +CREATE FUNCTION add_compression_to_onchain_aggregates() +RETURNS void AS $$ +DECLARE + median_views text[] := ARRAY[ + 'mainnet_spot_median_100_ms', 'mainnet_spot_median_1_s', 'mainnet_spot_median_5_s', 'mainnet_spot_median_10_s', + 'mainnet_spot_median_1_min', 'mainnet_spot_median_15_min', 'mainnet_spot_median_1_h', 'mainnet_spot_median_2_h', + 'mainnet_spot_median_1_day', 'mainnet_spot_median_1_week', + 'spot_median_100_ms', 'spot_median_1_s', 'spot_median_5_s', 'spot_median_10_s', + 'spot_median_1_min', 'spot_median_15_min', 'spot_median_1_h', 'spot_median_2_h', + 'spot_median_1_day', 'spot_median_1_week', + 'mainnet_perp_median_100_ms', 'mainnet_perp_median_1_s', 'mainnet_perp_median_5_s', 'mainnet_perp_median_10_s', + 'mainnet_perp_median_1_min', 'mainnet_perp_median_15_min', 'mainnet_perp_median_1_h', 'mainnet_perp_median_2_h', + 'mainnet_perp_median_1_day', 'mainnet_perp_median_1_week', + 'perp_median_100_ms', 'perp_median_1_s', 'perp_median_5_s', 'perp_median_10_s', + 'perp_median_1_min', 'perp_median_15_min', 'perp_median_1_h', 'perp_median_2_h', + 'perp_median_1_day', 'perp_median_1_week' + ]; + twap_views text[] := ARRAY[ + 'mainnet_spot_twap_1_min', 'mainnet_spot_twap_5_min', 'mainnet_spot_twap_15_min', + 'mainnet_spot_twap_1_h', 'mainnet_spot_twap_2_h', 'mainnet_spot_twap_1_day', + 'spot_twap_1_min', 'spot_twap_5_min', 'spot_twap_15_min', + 'spot_twap_1_h', 'spot_twap_2_h', 'spot_twap_1_day', + 'mainnet_perp_twap_1_min', 'mainnet_perp_twap_5_min', 'mainnet_perp_twap_15_min', + 'mainnet_perp_twap_1_h', 'mainnet_perp_twap_2_h', 'mainnet_perp_twap_1_day', + 'perp_twap_1_min', 'perp_twap_5_min', 'perp_twap_15_min', + 'perp_twap_1_h', 'perp_twap_2_h', 'perp_twap_1_day' + ]; + candle_views text[] := ARRAY[ + 'mainnet_spot_candle_10_s', 'mainnet_spot_candle_1_min', 'mainnet_spot_candle_5_min', 'mainnet_spot_candle_15_min', + 'mainnet_spot_candle_1_h', 'mainnet_spot_candle_1_day', + 'spot_candle_10_s', 'spot_candle_1_min', 'spot_candle_5_min', 'spot_candle_15_min', + 'spot_candle_1_h', 'spot_candle_1_day', + 'mainnet_perp_candle_10_s', 'mainnet_perp_candle_1_min', 'mainnet_perp_candle_5_min', 'mainnet_perp_candle_15_min', + 'mainnet_perp_candle_1_h', 'mainnet_perp_candle_1_day', + 'perp_candle_10_s', 'perp_candle_1_min', 'perp_candle_5_min', 'perp_candle_15_min', + 'perp_candle_1_h', 'perp_candle_1_day' + ]; + view_to_compress text; + compress_after interval; +BEGIN + FOR view_to_compress IN + SELECT view_n + FROM ( + SELECT unnest(median_views) AS view_n + UNION + SELECT unnest(median_views) || '_per_source' + UNION + SELECT unnest(twap_views) + UNION + SELECT unnest(candle_views) + ) AS all_views + LOOP + -- Perform operations sequentially within a transaction + EXECUTE format('ALTER MATERIALIZED VIEW %I SET (timescaledb.enable_columnstore = true, timescaledb.segmentby = ''pair_id'')', view_to_compress); + EXECUTE format('CALL add_columnstore_policy(%L, after => INTERVAL ''1 day'')', view_to_compress); + END LOOP; +END; +$$ LANGUAGE plpgsql; + +-- Execute the compression function +SELECT add_compression_to_onchain_aggregates(); diff --git a/infra/pragma-node/postgres_migrations/15-create-indexes.sql b/sql/08-create-indexes.sql similarity index 97% rename from infra/pragma-node/postgres_migrations/15-create-indexes.sql rename to sql/08-create-indexes.sql index 0c806315..adf0c7f5 100644 --- a/infra/pragma-node/postgres_migrations/15-create-indexes.sql +++ b/sql/08-create-indexes.sql @@ -33,11 +33,4 @@ CREATE INDEX mainnet_spot_entry_idx_cursor ON mainnet_spot_entry (_cursor); CREATE INDEX future_entry_idx_cursor ON future_entry (_cursor); CREATE INDEX mainnet_future_entry_idx_cursor ON mainnet_future_entry (_cursor); - - - - - - - - +CREATE INDEX idx_publisher_name ON publishers (name); diff --git a/tests/Cargo.toml b/tests/Cargo.toml index aa858a22..e625fcaa 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -1,8 +1,9 @@ [package] name = "tests" version = "0.1.0" -edition = "2021" +edition.workspace = true license = "MIT" +publish = false [[test]] name = "node-tests" @@ -10,6 +11,7 @@ path = "e2e/main.rs" [dev-dependencies] pragma-common = { workspace = true } +pragma-node = { workspace = true } diesel = { workspace = true, features = [ "postgres", diff --git a/tests/e2e/common/constants.rs b/tests/e2e/common/constants.rs index 39b5b0ac..8256478a 100644 --- a/tests/e2e/common/constants.rs +++ b/tests/e2e/common/constants.rs @@ -1,3 +1,4 @@ pub const EMPTY_SIGNATURE: &str = "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; pub const SOURCES: &[&str] = &["BINANCE", "BEBOP", "AVNU", "KUCOIN", "BYBIT", "GATEIO"]; -pub const VARIATION_PERCENTAGE: f64 = 5.0; +pub const VARIATION_PERCENTAGE: f64 = 2.5; +pub const EIGHTEEN_DECIMALS: i32 = 18; diff --git a/tests/e2e/common/containers/offchain_db.rs b/tests/e2e/common/containers/offchain_db.rs index cc6538ec..7442797f 100644 --- a/tests/e2e/common/containers/offchain_db.rs +++ b/tests/e2e/common/containers/offchain_db.rs @@ -10,7 +10,7 @@ pub const OFFCHAIN_DB_CONTAINER_NAME: &str = "test-offchain-db"; pub async fn setup_offchain_db() -> ContainerAsync { Postgres::default() .with_name("timescale/timescaledb-ha") - .with_tag("pg14-latest") + .with_tag("pg17.4-ts2.18.2") .with_env_var("POSTGRES_DB", "pragma") .with_env_var("POSTGRES_PASSWORD", "test-password") .with_env_var("TIMESCALEDB_TELEMETRY", "off") diff --git a/tests/e2e/common/containers/onchain_db.rs b/tests/e2e/common/containers/onchain_db.rs index c8644815..9c122d1f 100644 --- a/tests/e2e/common/containers/onchain_db.rs +++ b/tests/e2e/common/containers/onchain_db.rs @@ -5,8 +5,8 @@ use testcontainers::runners::AsyncRunner; use testcontainers::{ContainerAsync, ImageExt}; use testcontainers_modules::postgres::Postgres; -use super::utils::migrations::run_migrations; use super::Timescale; +use super::utils::migrations::run_migrations; pub const ONCHAIN_DB_CONTAINER_NAME: &str = "test-onchain-db"; @@ -14,7 +14,7 @@ pub const ONCHAIN_DB_CONTAINER_NAME: &str = "test-onchain-db"; pub async fn setup_onchain_db() -> ContainerAsync { Postgres::default() .with_name("timescale/timescaledb-ha") - .with_tag("pg14-latest") + .with_tag("pg17.4-ts2.18.2") .with_env_var("POSTGRES_DB", "pragma") .with_env_var("POSTGRES_PASSWORD", "test-password") .with_env_var("TIMESCALEDB_TELEMETRY", "off") @@ -26,12 +26,7 @@ pub async fn setup_onchain_db() -> ContainerAsync { } pub async fn run_onchain_migrations(db_pool: &Pool) { - let migrations_folder = current_dir() - .unwrap() - .join("..") - .join("infra") - .join("pragma-node") - .join("postgres_migrations"); + let migrations_folder = current_dir().unwrap().join("..").join("sql"); run_migrations(db_pool, migrations_folder).await; } diff --git a/tests/e2e/common/containers/pragma_node/docker.rs b/tests/e2e/common/containers/pragma_node/docker.rs index 1f337709..b7c9f812 100644 --- a/tests/e2e/common/containers/pragma_node/docker.rs +++ b/tests/e2e/common/containers/pragma_node/docker.rs @@ -1,9 +1,9 @@ use std::{borrow::Cow, collections::HashMap, env::current_dir, path::PathBuf, time::Duration}; use testcontainers::{ - core::{wait::HttpWaitStrategy, ContainerPort, IntoContainerPort, WaitFor}, - runners::AsyncRunner, ContainerAsync, Image, ImageExt, + core::{ContainerPort, IntoContainerPort, WaitFor, wait::HttpWaitStrategy}, + runners::AsyncRunner, }; use crate::common::containers::pragma_node::DB_PORT; @@ -92,6 +92,20 @@ impl PragmaNode { .insert("ONCHAIN_DATABASE_URL".to_owned(), db_url.to_owned()); self } + + /// Sets the mainnet URL. + pub fn with_mainnet_rpc_url(mut self, db_url: &str) -> Self { + self.env_vars + .insert("MAINNET_RPC_URL".to_owned(), db_url.to_owned()); + self + } + + /// Sets the sepolia URL. + pub fn with_sepolia_rpc_url(mut self, db_url: &str) -> Self { + self.env_vars + .insert("SEPOLIA_RPC_URL".to_owned(), db_url.to_owned()); + self + } } impl Image for PragmaNode { @@ -126,7 +140,14 @@ impl Default for PragmaNode { env_vars.insert("KAFKA_BROKERS".to_owned(), "pragma-data".to_owned()); env_vars.insert("PORT".to_owned(), "3000".to_owned()); env_vars.insert("METRICS_PORT".to_owned(), "8080".to_owned()); - + env_vars.insert( + "MAINNET_RPC_URL".to_owned(), + "https://free-rpc.nethermind.io/mainnet-juno".to_owned(), + ); + env_vars.insert( + "SEPOLIA_RPC_URL".to_owned(), + "https://free-rpc.nethermind.io/sepolia-juno".to_owned(), + ); Self { env_vars } } } @@ -135,12 +156,7 @@ impl Default for PragmaNode { // Returns the path of the Pragma node dockerfile. fn pragma_node_dockerfile_path() -> PathBuf { - current_dir() - .unwrap() - .join("..") - .join("infra") - .join("pragma-node") - .join("Dockerfile") + current_dir().unwrap().join("..").join("Dockerfile.node") } // Builds a connection URL from an host & db port. diff --git a/tests/e2e/common/setup.rs b/tests/e2e/common/setup.rs index 7c902c2a..8f625196 100644 --- a/tests/e2e/common/setup.rs +++ b/tests/e2e/common/setup.rs @@ -1,31 +1,29 @@ use std::sync::Arc; -use deadpool_diesel::{postgres::Pool, Manager}; +use deadpool_diesel::{Manager, postgres::Pool}; use diesel::RunQueryDsl; -use pragma_common::types::{AggregationMode, Interval}; +use pragma_common::{AggregationMode, InstrumentType, Interval}; +use pragma_node::utils::sql::{get_interval_specifier, get_table_suffix}; use testcontainers::ContainerAsync; use testcontainers_modules::kafka::Kafka; use testcontainers_modules::zookeeper::Zookeeper; use crate::common::containers::{ + Containers, Timescale, kafka::{init_kafka_topics, setup_kafka}, offchain_db::setup_offchain_db, onchain_db::{run_onchain_migrations, setup_onchain_db}, pragma_node::{ - docker::{setup_pragma_node_with_docker, PragmaNode}, - local::setup_pragma_node_with_cargo, SERVER_PORT, + docker::{PragmaNode, setup_pragma_node_with_docker}, + local::setup_pragma_node_with_cargo, }, zookeeper::setup_zookeeper, - Containers, Timescale, }; use crate::common::logs::init_logging; -use super::{ - containers::pragma_node::PragmaNodeMode, - utils::{get_interval_specifier, get_window_size}, -}; +use super::{containers::pragma_node::PragmaNodeMode, utils::get_window_size}; /// Main structure that we carry around for our tests. /// Contains some usefull fields & functions attached to make testing easier. @@ -62,19 +60,6 @@ impl TestHelper { } } - pub async fn push_strk(&self, pool: &Pool) { - self.execute_sql( - pool, - r#" - INSERT INTO - public.currencies (name, decimals, abstract, ethereum_address) - VALUES - ('STRK', 8, false, NULL)"# - .to_string(), - ) - .await; - } - /// Refreshes a TimescaleDB continuous aggregate materialized view around a specific timestamp. /// The refreshed view will be automatically found depending on the interval + aggregation mode. /// NOTE: It does not work with future entries for now since we don't care for our tests yet. @@ -84,24 +69,25 @@ impl TestHelper { interval: Interval, aggregation: AggregationMode, ) { - let is_twap = matches!(aggregation, AggregationMode::Twap); - let interval_spec = get_interval_specifier(interval, is_twap); + let interval_spec = + get_interval_specifier(interval, matches!(aggregation, AggregationMode::Twap)).unwrap(); let window_size = get_window_size(interval); + let suffix = get_table_suffix(InstrumentType::Spot).unwrap(); let table_name = if matches!(aggregation, AggregationMode::Twap) { "twap" } else { - "price" + "median" }; let sql = format!( - r#" + r" CALL refresh_continuous_aggregate( - '{}_{}_agg', + '{}_{}_{}', to_timestamp({} - {}), to_timestamp({} + {}) - );"#, - table_name, interval_spec, timestamp, window_size, timestamp, window_size + );", + table_name, interval_spec, suffix, timestamp, window_size, timestamp, window_size ); self.execute_sql(&self.offchain_pool, sql).await; @@ -173,7 +159,6 @@ pub async fn setup_containers( PragmaNodeMode::Docker => { tracing::info!("🔨 Setup pragma_node in Docker mode..."); let node = setup_pragma_node_with_docker.await; - tracing::info!("✅ ... pragma-node ready!\n"); (Some(Arc::new(node)), None) } PragmaNodeMode::Local => { diff --git a/tests/e2e/common/utils/comparisons.rs b/tests/e2e/common/utils/comparisons.rs index a53bfc91..43cd179a 100644 --- a/tests/e2e/common/utils/comparisons.rs +++ b/tests/e2e/common/utils/comparisons.rs @@ -1,4 +1,4 @@ -use bigdecimal::{num_bigint::BigInt, BigDecimal, Num}; +use bigdecimal::{BigDecimal, Num, num_bigint::BigInt}; /// Calculates the percentage difference between two hex-formatted prices pub fn price_difference_percentage(price1: &str, price2: &str) -> BigDecimal { diff --git a/tests/e2e/common/utils/populate.rs b/tests/e2e/common/utils/populate.rs index dcaba0fc..e185317c 100644 --- a/tests/e2e/common/utils/populate.rs +++ b/tests/e2e/common/utils/populate.rs @@ -1,7 +1,7 @@ use rand::Rng; use std::ops::Range; -use crate::common::constants::{EMPTY_SIGNATURE, SOURCES, VARIATION_PERCENTAGE}; +use crate::common::constants::{EIGHTEEN_DECIMALS, EMPTY_SIGNATURE, SOURCES, VARIATION_PERCENTAGE}; pub fn get_pair_price(pair: &str) -> u128 { let price = match pair { @@ -13,7 +13,7 @@ pub fn get_pair_price(pair: &str) -> u128 { _ => panic!("Pair not found, add it to the const PAIRS"), }; - (price * 10.0_f64.powi(8)) as u128 + (price * 10.0_f64.powi(EIGHTEEN_DECIMALS)) as u128 } pub fn generate_entries(pairs: Vec<&str>, num_entries: u32, timestamp: u64) -> Vec { @@ -55,7 +55,7 @@ pub fn generate_entry(pair: &str, source: &str, timestamp: u64) -> String { pub fn entry_from(pair: &str, timestamp: u64, price: u128, source: &str) -> String { format!( - r#" + r" INSERT INTO entries ( pair_id, publisher, @@ -71,6 +71,6 @@ pub fn entry_from(pair: &str, timestamp: u64, price: u128, source: &str) -> Stri '{source}', '{EMPTY_SIGNATURE}' ); - "# + " ) } diff --git a/tests/e2e/common/utils/pragma_node.rs b/tests/e2e/common/utils/pragma_node.rs index a7d809ec..f9bce8bc 100644 --- a/tests/e2e/common/utils/pragma_node.rs +++ b/tests/e2e/common/utils/pragma_node.rs @@ -1,37 +1,19 @@ //! Contains utils from the pragma-node repository. //! Since we can't import them here, we recreated them here. -use pragma_common::types::Interval; - -pub const fn get_interval_specifier(interval: Interval, is_twap: bool) -> &'static str { - if is_twap { - match interval { - Interval::OneHour => "1_hour", - Interval::TwoHours => "2_hours", - _ => panic!("unsupported interval"), - } - } else { - match interval { - Interval::OneSecond => "1_s", - Interval::FiveSeconds => "5_s", - Interval::OneMinute => "1_min", - Interval::FifteenMinutes => "15_min", - Interval::OneHour => "1_h", - Interval::TwoHours => "2_h", - Interval::OneDay => "1_day", - Interval::OneWeek => "1_week", - } - } -} +use pragma_common::Interval; pub const fn get_window_size(interval: Interval) -> i64 { match interval { - Interval::OneSecond => 10, // 10 seconds window - Interval::FiveSeconds => 30, // 30 seconds window - Interval::OneMinute => 300, // 5 minutes window - Interval::FifteenMinutes => 1800, // 30 minutes window - Interval::OneHour => 7200, // 2 hours window - Interval::TwoHours => 14400, // 4 hours window - Interval::OneDay => 86400, // 24 hours window - Interval::OneWeek => 604800, // 1 week window + Interval::OneHundredMillisecond => 1, // 1 second window + Interval::OneSecond => 10, // 10 seconds window + Interval::FiveSeconds => 30, // 30 seconds window + Interval::TenSeconds => 60, // 60 seconds window + Interval::OneMinute => 300, // 5 minutes window + Interval::FiveMinutes => 900, // 15 minutes window + Interval::FifteenMinutes => 1800, // 30 minutes window + Interval::OneHour => 7200, // 2 hours window + Interval::TwoHours => 14400, // 4 hours window + Interval::OneDay => 86400, // 24 hours window + Interval::OneWeek => 604800, // 1 week window } } diff --git a/tests/e2e/get_entry.rs b/tests/e2e/get_entry.rs index a52cff8f..3b1ec062 100644 --- a/tests/e2e/get_entry.rs +++ b/tests/e2e/get_entry.rs @@ -1,14 +1,14 @@ use bigdecimal::{BigDecimal, FromPrimitive}; use rstest::rstest; -use pragma_common::types::{AggregationMode, Interval}; +use pragma_common::{AggregationMode, Interval}; use serde::{Deserialize, Serialize}; use crate::{ assert_hex_prices_within_threshold, common::{ - constants::VARIATION_PERCENTAGE, - setup::{setup_containers, TestHelper}, + constants::{EIGHTEEN_DECIMALS, VARIATION_PERCENTAGE}, + setup::{TestHelper, setup_containers}, utils::populate::get_pair_price, }, }; @@ -93,14 +93,16 @@ pub struct GetEntryResponse { } #[rstest] -#[case::one_second(Interval::OneSecond)] -#[case::five_seconds(Interval::FiveSeconds)] -#[case::one_minute(Interval::OneMinute)] -#[case::fifteen_minutes(Interval::FifteenMinutes)] -#[case::one_hour(Interval::OneHour)] -#[case::two_hours(Interval::TwoHours)] -#[case::one_day(Interval::OneDay)] -#[case::one_week(Interval::OneWeek)] +#[case(Interval::OneHundredMillisecond)] +#[case(Interval::OneSecond)] +#[case(Interval::FiveSeconds)] +#[case(Interval::TenSeconds)] +#[case(Interval::OneMinute)] +#[case(Interval::FifteenMinutes)] +#[case(Interval::OneHour)] +#[case(Interval::TwoHours)] +#[case(Interval::OneDay)] +#[case(Interval::OneWeek)] #[serial_test::serial] #[tokio::test] async fn get_entry_median_ok_many( @@ -113,7 +115,7 @@ async fn get_entry_median_ok_many( let pair_id = "ETH/USD"; let current_timestamp: u64 = chrono::Utc::now().timestamp() as u64; let price: u128 = populate::get_pair_price(pair_id); - let sql_many = populate::generate_entries(vec!["ETH/USD"], 1000, current_timestamp); + let sql_many = populate::generate_entries(vec!["ETH/USD"], 100, current_timestamp); hlpr.execute_sql_many(&hlpr.offchain_pool, sql_many).await; @@ -137,7 +139,6 @@ async fn get_entry_median_ok_many( .with_routing(false) .with_aggregation(queried_aggregation), ); - tracing::info!("with endpoint: {endpoint}"); let response = reqwest::get(hlpr.endpoint(&endpoint)) .await @@ -173,7 +174,7 @@ async fn get_entry_twap_many_ok( let pair_id = "ETH/USD"; let current_timestamp: u64 = chrono::Utc::now().timestamp() as u64; let price: u128 = populate::get_pair_price(pair_id); - let sql_many = populate::generate_entries(vec!["ETH/USD"], 1000, current_timestamp); + let sql_many = populate::generate_entries(vec!["ETH/USD"], 100, current_timestamp); hlpr.execute_sql_many(&hlpr.offchain_pool, sql_many).await; @@ -197,7 +198,6 @@ async fn get_entry_twap_many_ok( .with_routing(false) .with_aggregation(queried_aggregation), ); - tracing::info!("with endpoint: {endpoint}"); let response = reqwest::get(hlpr.endpoint(&endpoint)) .await @@ -229,13 +229,11 @@ async fn get_entry_twap_strk_eth_ok( ) { let mut hlpr = setup_containers.await; - hlpr.push_strk(&hlpr.offchain_pool).await; - // 1. Insert one entry let pair_id = "STRK/USD"; let current_timestamp: u64 = chrono::Utc::now().timestamp() as u64; let price: u128 = populate::get_pair_price(pair_id); - let sql_many = populate::generate_entries(vec!["ETH/USD", "STRK/USD"], 1000, current_timestamp); + let sql_many = populate::generate_entries(vec!["ETH/USD", "STRK/USD"], 100, current_timestamp); hlpr.execute_sql_many(&hlpr.offchain_pool, sql_many).await; @@ -259,7 +257,6 @@ async fn get_entry_twap_strk_eth_ok( .with_routing(true) .with_aggregation(queried_aggregation), ); - tracing::info!("with endpoint: {endpoint}"); let response = reqwest::get(hlpr.endpoint(&endpoint)) .await @@ -274,7 +271,7 @@ async fn get_entry_twap_strk_eth_ok( // 4. Assert let strk_eth_price = price as f64 / get_pair_price("ETH/USD") as f64; - let strk_eth_price = strk_eth_price * 10.0_f64.powi(8); + let strk_eth_price = strk_eth_price * 10.0_f64.powi(EIGHTEEN_DECIMALS); let expected_price_hex = format!("0x{:x}", strk_eth_price as u128); let threshold = BigDecimal::from_f64(VARIATION_PERCENTAGE).unwrap(); diff --git a/tests/e2e/healthcheck.rs b/tests/e2e/healthcheck.rs index 93daab26..9da91954 100644 --- a/tests/e2e/healthcheck.rs +++ b/tests/e2e/healthcheck.rs @@ -1,7 +1,7 @@ use pretty_assertions::assert_eq; use rstest::rstest; -use crate::common::setup::{setup_containers, TestHelper}; +use crate::common::setup::{TestHelper, setup_containers}; #[rstest] #[serial_test::serial]